hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 958k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 2
classes | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1c455a3b6c64198e9f6c6b8b99f938134a554603 | 817 | py | Python | mmelemental/models/forcefield/bonded/bonds/base.py | RlyehAD/mmelemental | 5f0754356fd2c89e9119cf810f1972430dfa75dc | [
"BSD-3-Clause"
] | null | null | null | mmelemental/models/forcefield/bonded/bonds/base.py | RlyehAD/mmelemental | 5f0754356fd2c89e9119cf810f1972430dfa75dc | [
"BSD-3-Clause"
] | null | null | null | mmelemental/models/forcefield/bonded/bonds/base.py | RlyehAD/mmelemental | 5f0754356fd2c89e9119cf810f1972430dfa75dc | [
"BSD-3-Clause"
] | null | null | null | from pydantic import Field
from mmelemental.models.forcefield.params import Params
from typing import Optional, List, Tuple, Union
from cmselemental.types import Array
import os
import pathlib
__all__ = ["Bonds"]
class Bonds(Params):
lengths: Array[float] = Field(
..., description="Equilibrium bond lengths. Default unit is Angstroms."
)
lengths_units: Optional[str] = Field(
"angstroms", description="Equilibrium bond lengths unit."
)
connectivity: List[Tuple[Union[int, str], Union[int, str], float]] = Field( # type: ignore
...,
description="Particle indices or names e.g. types for each bond and the bond order: (index1, index2, order).",
min_items=1,
)
_path = os.path.join(pathlib.Path(__file__).parent.absolute(), "potentials", "*.py")
| 34.041667 | 119 | 0.684211 | from pydantic import Field
from mmelemental.models.forcefield.params import Params
from typing import Optional, List, Tuple, Union
from cmselemental.types import Array
import os
import pathlib
__all__ = ["Bonds"]
class Bonds(Params):
lengths: Array[float] = Field(
..., description="Equilibrium bond lengths. Default unit is Angstroms."
)
lengths_units: Optional[str] = Field(
"angstroms", description="Equilibrium bond lengths unit."
)
connectivity: List[Tuple[Union[int, str], Union[int, str], float]] = Field(
...,
description="Particle indices or names e.g. types for each bond and the bond order: (index1, index2, order).",
min_items=1,
)
_path = os.path.join(pathlib.Path(__file__).parent.absolute(), "potentials", "*.py")
| true | true |
1c455aa0f8a6d073be57b1f48b7b42ece8de7bc8 | 22,104 | py | Python | cirq/ops/three_qubit_gates.py | Hongbo-Miao/Cirq | d6c6f9b1ea282e79db4475e5327d0380e6558ba6 | [
"Apache-2.0"
] | null | null | null | cirq/ops/three_qubit_gates.py | Hongbo-Miao/Cirq | d6c6f9b1ea282e79db4475e5327d0380e6558ba6 | [
"Apache-2.0"
] | null | null | null | cirq/ops/three_qubit_gates.py | Hongbo-Miao/Cirq | d6c6f9b1ea282e79db4475e5327d0380e6558ba6 | [
"Apache-2.0"
] | 1 | 2020-12-24T07:13:40.000Z | 2020-12-24T07:13:40.000Z | # Copyright 2018 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common quantum gates that target three qubits."""
from typing import AbstractSet, Any, List, Optional, Tuple, TYPE_CHECKING
import numpy as np
import sympy
from cirq import linalg, protocols, value
from cirq._compat import proper_repr
from cirq._doc import document
from cirq.ops import (
common_gates,
controlled_gate,
eigen_gate,
gate_features,
pauli_gates,
swap_gates,
)
if TYPE_CHECKING:
# pylint: disable=unused-import
import cirq
class CCZPowGate(
eigen_gate.EigenGate, gate_features.ThreeQubitGate, gate_features.InterchangeableQubitsGate
):
"""A doubly-controlled-Z that can be raised to a power.
The matrix of `CCZ**t` is `diag(1, 1, 1, 1, 1, 1, 1, exp(i pi t))`.
"""
def _eigen_components(self):
return [
(0, np.diag([1, 1, 1, 1, 1, 1, 1, 0])),
(1, np.diag([0, 0, 0, 0, 0, 0, 0, 1])),
]
def _trace_distance_bound_(self) -> Optional[float]:
if self._is_parameterized_():
return None
return abs(np.sin(self._exponent * 0.5 * np.pi))
def _pauli_expansion_(self) -> value.LinearDict[str]:
if protocols.is_parameterized(self):
return NotImplemented
global_phase = 1j ** (2 * self._exponent * self._global_shift)
z_phase = 1j ** self._exponent
c = -1j * z_phase * np.sin(np.pi * self._exponent / 2) / 4
return value.LinearDict(
{
'III': global_phase * (1 - c),
'IIZ': global_phase * c,
'IZI': global_phase * c,
'ZII': global_phase * c,
'ZZI': global_phase * -c,
'ZIZ': global_phase * -c,
'IZZ': global_phase * -c,
'ZZZ': global_phase * c,
}
)
def _decompose_(self, qubits):
"""An adjacency-respecting decomposition.
0: ───p───@──────────────@───────@──────────@──────────
│ │ │ │
1: ───p───X───@───p^-1───X───@───X──────@───X──────@───
│ │ │ │
2: ───p───────X───p──────────X───p^-1───X───p^-1───X───
where p = T**self._exponent
"""
if protocols.is_parameterized(self):
return NotImplemented
a, b, c = qubits
# Hacky magic: avoid the non-adjacent edge.
if hasattr(b, 'is_adjacent'):
if not b.is_adjacent(a):
b, c = c, b
elif not b.is_adjacent(c):
a, b = b, a
p = common_gates.T ** self._exponent
sweep_abc = [common_gates.CNOT(a, b), common_gates.CNOT(b, c)]
return [
p(a),
p(b),
p(c),
sweep_abc,
p(b) ** -1,
p(c),
sweep_abc,
p(c) ** -1,
sweep_abc,
p(c) ** -1,
sweep_abc,
]
def _apply_unitary_(self, args: 'protocols.ApplyUnitaryArgs') -> np.ndarray:
if protocols.is_parameterized(self):
return NotImplemented
ooo = args.subspace_index(0b111)
args.target_tensor[ooo] *= np.exp(1j * self.exponent * np.pi)
p = 1j ** (2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return args.target_tensor
def _circuit_diagram_info_(
self, args: 'cirq.CircuitDiagramInfoArgs'
) -> 'cirq.CircuitDiagramInfo':
return protocols.CircuitDiagramInfo(('@', '@', '@'), exponent=self._diagram_exponent(args))
def _qasm_(self, args: 'cirq.QasmArgs', qubits: Tuple['cirq.Qid', ...]) -> Optional[str]:
if self._exponent != 1:
return None
args.validate_version('2.0')
lines = [
args.format('h {0};\n', qubits[2]),
args.format('ccx {0},{1},{2};\n', qubits[0], qubits[1], qubits[2]),
args.format('h {0};\n', qubits[2]),
]
return ''.join(lines)
def _quil_(
self, qubits: Tuple['cirq.Qid', ...], formatter: 'cirq.QuilFormatter'
) -> Optional[str]:
if self._exponent != 1:
return None
lines = [
formatter.format('H {0}\n', qubits[2]),
formatter.format('CCNOT {0} {1} {2}\n', qubits[0], qubits[1], qubits[2]),
formatter.format('H {0}\n', qubits[2]),
]
return ''.join(lines)
def __repr__(self) -> str:
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.CCZ'
return '(cirq.CCZ**{})'.format(proper_repr(self._exponent))
return 'cirq.CCZPowGate(exponent={}, global_shift={!r})'.format(
proper_repr(self._exponent), self._global_shift
)
def __str__(self) -> str:
if self._exponent == 1:
return 'CCZ'
return 'CCZ**{}'.format(self._exponent)
@value.value_equality()
class ThreeQubitDiagonalGate(gate_features.ThreeQubitGate):
"""A gate given by a diagonal 8x8 matrix."""
def __init__(self, diag_angles_radians: List[value.TParamVal]) -> None:
r"""A three qubit gate with only diagonal elements.
This gate's off-diagonal elements are zero and it's on diagonal
elements are all phases.
Args:
diag_angles_radians: The list of angles on the diagonal in radians.
If these values are $(x_0, x_1, \ldots , x_7)$ then the unitary
has diagonal values $(e^{i x_0}, e^{i x_1}, \ldots, e^{i x_7})$.
"""
self._diag_angles_radians: List[value.TParamVal] = diag_angles_radians
def _is_parameterized_(self) -> bool:
return any(protocols.is_parameterized(angle) for angle in self._diag_angles_radians)
def _parameter_names_(self) -> AbstractSet[str]:
return {
name for angle in self._diag_angles_radians for name in protocols.parameter_names(angle)
}
def _resolve_parameters_(
self, resolver: 'cirq.ParamResolverOrSimilarType'
) -> 'ThreeQubitDiagonalGate':
return self.__class__(
[protocols.resolve_parameters(angle, resolver) for angle in self._diag_angles_radians]
)
def _has_unitary_(self) -> bool:
return not self._is_parameterized_()
def _unitary_(self) -> np.ndarray:
if self._is_parameterized_():
return NotImplemented
return np.diag([np.exp(1j * angle) for angle in self._diag_angles_radians])
def _apply_unitary_(self, args: 'protocols.ApplyUnitaryArgs') -> np.ndarray:
if self._is_parameterized_():
return NotImplemented
for index, angle in enumerate(self._diag_angles_radians):
little_endian_index = 4 * (index & 1) + 2 * ((index >> 1) & 1) + ((index >> 2) & 1)
subspace_index = args.subspace_index(little_endian_index)
args.target_tensor[subspace_index] *= np.exp(1j * angle)
return args.target_tensor
def _circuit_diagram_info_(
self, args: 'cirq.CircuitDiagramInfoArgs'
) -> 'cirq.CircuitDiagramInfo':
rounded_angles = np.array(self._diag_angles_radians)
if args.precision is not None:
rounded_angles = rounded_angles.round(args.precision)
diag_str = 'diag({})'.format(', '.join(proper_repr(angle) for angle in rounded_angles))
return protocols.CircuitDiagramInfo((diag_str, '#2', '#3'))
def __pow__(self, exponent: Any) -> 'ThreeQubitDiagonalGate':
if not isinstance(exponent, (int, float, sympy.Basic)):
return NotImplemented
return ThreeQubitDiagonalGate(
[protocols.mul(angle, exponent, NotImplemented) for angle in self._diag_angles_radians]
)
def _decompose_(self, qubits):
"""An adjacency-respecting decomposition.
0: ───p_0───@──────────────@───────@──────────@──────────
│ │ │ │
1: ───p_1───X───@───p_3────X───@───X──────@───X──────@───
│ │ │ │
2: ───p_2───────X───p_4────────X───p_5────X───p_6────X───
where p_i = T**(4*x_i) and x_i solve the system of equations
[0, 0, 1, 0, 1, 1, 1][x_0] [r_1]
[0, 1, 0, 1, 1, 0, 1][x_1] [r_2]
[0, 1, 1, 1, 0, 1, 0][x_2] [r_3]
[1, 0, 0, 1, 1, 1, 0][x_3] = [r_4]
[1, 0, 1, 1, 0, 0, 1][x_4] [r_5]
[1, 1, 0, 0, 0, 1, 1][x_5] [r_6]
[1, 1, 1, 0, 1, 0, 0][x_6] [r_7]
where r_i is self._diag_angles_radians[i].
The above system was created by equating the composition of the gates
in the circuit diagram to np.diag(self._diag_angles) (shifted by a
global phase of np.exp(-1j * self._diag_angles[0])).
"""
a, b, c = qubits
if hasattr(b, 'is_adjacent'):
if not b.is_adjacent(a):
b, c = c, b
elif not b.is_adjacent(c):
a, b = b, a
sweep_abc = [common_gates.CNOT(a, b), common_gates.CNOT(b, c)]
phase_matrix_inverse = 0.25 * np.array(
[
[-1, -1, -1, 1, 1, 1, 1],
[-1, 1, 1, -1, -1, 1, 1],
[1, -1, 1, -1, 1, -1, 1],
[-1, 1, 1, 1, 1, -1, -1],
[1, 1, -1, 1, -1, -1, 1],
[1, -1, 1, 1, -1, 1, -1],
[1, 1, -1, -1, 1, 1, -1],
]
)
shifted_angles_tail = [
angle - self._diag_angles_radians[0] for angle in self._diag_angles_radians[1:]
]
phase_solutions = phase_matrix_inverse.dot(shifted_angles_tail)
p_gates = [pauli_gates.Z ** (solution / np.pi) for solution in phase_solutions]
return [
p_gates[0](a),
p_gates[1](b),
p_gates[2](c),
sweep_abc,
p_gates[3](b),
p_gates[4](c),
sweep_abc,
p_gates[5](c),
sweep_abc,
p_gates[6](c),
sweep_abc,
]
def _value_equality_values_(self):
return tuple(self._diag_angles_radians)
def _pauli_expansion_(self) -> value.LinearDict[str]:
if protocols.is_parameterized(self):
return NotImplemented
x = [np.exp(1j * angle) for angle in self._diag_angles_radians]
return value.LinearDict(
{
'III': (x[0] + x[1] + x[2] + x[3] + x[4] + x[5] + x[6] + x[7]) / 8,
'IIZ': (x[0] - x[1] + x[2] - x[3] + x[4] - x[5] + x[6] - x[7]) / 8,
'IZI': (x[0] + x[1] - x[2] - x[3] + x[4] + x[5] - x[6] - x[7]) / 8,
'IZZ': (x[0] - x[1] - x[2] + x[3] + x[4] - x[5] - x[6] + x[7]) / 8,
'ZII': (x[0] + x[1] + x[2] + x[3] - x[4] - x[5] - x[6] - x[7]) / 8,
'ZIZ': (x[0] - x[1] + x[2] - x[3] - x[4] + x[5] - x[6] + x[7]) / 8,
'ZZI': (x[0] + x[1] - x[2] - x[3] - x[4] - x[5] + x[6] + x[7]) / 8,
'ZZZ': (x[0] - x[1] - x[2] + x[3] - x[4] + x[5] + x[6] - x[7]) / 8,
}
)
def __repr__(self) -> str:
return 'cirq.ThreeQubitDiagonalGate([{}])'.format(
','.join(proper_repr(angle) for angle in self._diag_angles_radians)
)
class CCXPowGate(
eigen_gate.EigenGate, gate_features.ThreeQubitGate, gate_features.InterchangeableQubitsGate
):
"""A Toffoli (doubly-controlled-NOT) that can be raised to a power.
The matrix of `CCX**t` is an 8x8 identity except the bottom right 2x2 area
is the matrix of `X**t`.
"""
def _eigen_components(self):
return [
(0, linalg.block_diag(np.diag([1, 1, 1, 1, 1, 1]), np.array([[0.5, 0.5], [0.5, 0.5]]))),
(
1,
linalg.block_diag(
np.diag([0, 0, 0, 0, 0, 0]), np.array([[0.5, -0.5], [-0.5, 0.5]])
),
),
]
def _trace_distance_bound_(self) -> Optional[float]:
if self._is_parameterized_():
return None
return abs(np.sin(self._exponent * 0.5 * np.pi))
def _pauli_expansion_(self) -> value.LinearDict[str]:
if protocols.is_parameterized(self):
return NotImplemented
global_phase = 1j ** (2 * self._exponent * self._global_shift)
z_phase = 1j ** self._exponent
c = -1j * z_phase * np.sin(np.pi * self._exponent / 2) / 4
return value.LinearDict(
{
'III': global_phase * (1 - c),
'IIX': global_phase * c,
'IZI': global_phase * c,
'ZII': global_phase * c,
'ZZI': global_phase * -c,
'ZIX': global_phase * -c,
'IZX': global_phase * -c,
'ZZX': global_phase * c,
}
)
def qubit_index_to_equivalence_group_key(self, index):
return index < 2
def _apply_unitary_(self, args: 'protocols.ApplyUnitaryArgs') -> np.ndarray:
if protocols.is_parameterized(self):
return NotImplemented
p = 1j ** (2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return protocols.apply_unitary(
controlled_gate.ControlledGate(
controlled_gate.ControlledGate(pauli_gates.X ** self.exponent)
),
protocols.ApplyUnitaryArgs(args.target_tensor, args.available_buffer, args.axes),
default=NotImplemented,
)
def _decompose_(self, qubits):
c1, c2, t = qubits
yield common_gates.H(t)
yield CCZ(c1, c2, t) ** self._exponent
yield common_gates.H(t)
def _circuit_diagram_info_(
self, args: 'cirq.CircuitDiagramInfoArgs'
) -> 'cirq.CircuitDiagramInfo':
return protocols.CircuitDiagramInfo(('@', '@', 'X'), exponent=self._diagram_exponent(args))
def _qasm_(self, args: 'cirq.QasmArgs', qubits: Tuple['cirq.Qid', ...]) -> Optional[str]:
if self._exponent != 1:
return None
args.validate_version('2.0')
return args.format('ccx {0},{1},{2};\n', qubits[0], qubits[1], qubits[2])
def _quil_(
self, qubits: Tuple['cirq.Qid', ...], formatter: 'cirq.QuilFormatter'
) -> Optional[str]:
if self._exponent != 1:
return None
return formatter.format('CCNOT {0} {1} {2}\n', qubits[0], qubits[1], qubits[2])
def __repr__(self) -> str:
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.TOFFOLI'
return '(cirq.TOFFOLI**{})'.format(proper_repr(self._exponent))
return 'cirq.CCXPowGate(exponent={}, global_shift={!r})'.format(
proper_repr(self._exponent), self._global_shift
)
def __str__(self) -> str:
if self._exponent == 1:
return 'TOFFOLI'
return 'TOFFOLI**{}'.format(self._exponent)
@value.value_equality()
class CSwapGate(gate_features.ThreeQubitGate, gate_features.InterchangeableQubitsGate):
"""A controlled swap gate. The Fredkin gate."""
def qubit_index_to_equivalence_group_key(self, index):
return 0 if index == 0 else 1
def _pauli_expansion_(self) -> value.LinearDict[str]:
return value.LinearDict(
{
'III': 3 / 4,
'IXX': 1 / 4,
'IYY': 1 / 4,
'IZZ': 1 / 4,
'ZII': 1 / 4,
'ZXX': -1 / 4,
'ZYY': -1 / 4,
'ZZZ': -1 / 4,
}
)
def _trace_distance_bound_(self) -> float:
return 1.0
def _decompose_(self, qubits):
c, t1, t2 = qubits
# Hacky magic: special case based on adjacency.
if hasattr(t1, 'is_adjacent'):
if not t1.is_adjacent(t2):
# Targets separated by control.
return self._decompose_inside_control(t1, c, t2)
if not t1.is_adjacent(c):
# Control separated from t1 by t2.
return self._decompose_outside_control(c, t2, t1)
return self._decompose_outside_control(c, t1, t2)
def _decompose_inside_control(
self, target1: 'cirq.Qid', control: 'cirq.Qid', target2: 'cirq.Qid'
) -> 'cirq.OP_TREE':
"""A decomposition assuming the control separates the targets.
target1: ─@─X───────T──────@────────@─────────X───@─────X^-0.5─
│ │ │ │ │ │
control: ─X─@─X─────@─T^-1─X─@─T────X─@─X^0.5─@─@─X─@──────────
│ │ │ │ │ │
target2: ─────@─H─T─X─T──────X─T^-1───X─T^-1────X───X─H─S^-1───
"""
a, b, c = target1, control, target2
yield common_gates.CNOT(a, b)
yield common_gates.CNOT(b, a)
yield common_gates.CNOT(c, b)
yield common_gates.H(c)
yield common_gates.T(c)
yield common_gates.CNOT(b, c)
yield common_gates.T(a)
yield common_gates.T(b) ** -1
yield common_gates.T(c)
yield common_gates.CNOT(a, b)
yield common_gates.CNOT(b, c)
yield common_gates.T(b)
yield common_gates.T(c) ** -1
yield common_gates.CNOT(a, b)
yield common_gates.CNOT(b, c)
yield pauli_gates.X(b) ** 0.5
yield common_gates.T(c) ** -1
yield common_gates.CNOT(b, a)
yield common_gates.CNOT(b, c)
yield common_gates.CNOT(a, b)
yield common_gates.CNOT(b, c)
yield common_gates.H(c)
yield common_gates.S(c) ** -1
yield pauli_gates.X(a) ** -0.5
def _apply_unitary_(self, args: 'protocols.ApplyUnitaryArgs') -> np.ndarray:
return protocols.apply_unitary(
controlled_gate.ControlledGate(swap_gates.SWAP),
protocols.ApplyUnitaryArgs(args.target_tensor, args.available_buffer, args.axes),
default=NotImplemented,
)
def _decompose_outside_control(
self, control: 'cirq.Qid', near_target: 'cirq.Qid', far_target: 'cirq.Qid'
) -> 'cirq.OP_TREE':
"""A decomposition assuming one of the targets is in the middle.
control: ───T──────@────────@───@────────────@────────────────
│ │ │ │
near: ─X─T──────X─@─T^-1─X─@─X────@─X^0.5─X─@─X^0.5────────
│ │ │ │ │
far: ─@─Y^-0.5─T─X─T──────X─T^-1─X─T^-1────X─S─────X^-0.5─
"""
a, b, c = control, near_target, far_target
t = common_gates.T
sweep_abc = [common_gates.CNOT(a, b), common_gates.CNOT(b, c)]
yield common_gates.CNOT(c, b)
yield pauli_gates.Y(c) ** -0.5
yield t(a), t(b), t(c)
yield sweep_abc
yield t(b) ** -1, t(c)
yield sweep_abc
yield t(c) ** -1
yield sweep_abc
yield t(c) ** -1
yield pauli_gates.X(b) ** 0.5
yield sweep_abc
yield common_gates.S(c)
yield pauli_gates.X(b) ** 0.5
yield pauli_gates.X(c) ** -0.5
def _has_unitary_(self) -> bool:
return True
def _unitary_(self) -> np.ndarray:
return linalg.block_diag(np.diag([1, 1, 1, 1, 1]), np.array([[0, 1], [1, 0]]), np.diag([1]))
def _circuit_diagram_info_(
self, args: 'cirq.CircuitDiagramInfoArgs'
) -> 'cirq.CircuitDiagramInfo':
if not args.use_unicode_characters:
return protocols.CircuitDiagramInfo(('@', 'swap', 'swap'))
return protocols.CircuitDiagramInfo(('@', '×', '×'))
def _qasm_(self, args: 'cirq.QasmArgs', qubits: Tuple['cirq.Qid', ...]) -> Optional[str]:
args.validate_version('2.0')
return args.format('cswap {0},{1},{2};\n', qubits[0], qubits[1], qubits[2])
def _quil_(
self, qubits: Tuple['cirq.Qid', ...], formatter: 'cirq.QuilFormatter'
) -> Optional[str]:
return formatter.format('CSWAP {0} {1} {2}\n', qubits[0], qubits[1], qubits[2])
def _value_equality_values_(self):
return ()
def __str__(self) -> str:
return 'FREDKIN'
def __repr__(self) -> str:
return 'cirq.FREDKIN'
CCZ = CCZPowGate()
document(
CCZ,
"""The Controlled-Controlled-Z gate.
The `exponent=1` instance of `cirq.CCZPowGate`.
Matrix:
```
[[1 . . . . . . .],
[. 1 . . . . . .],
[. . 1 . . . . .],
[. . . 1 . . . .],
[. . . . 1 . . .],
[. . . . . 1 . .],
[. . . . . . 1 .],
[. . . . . . . -1]]
```
""",
)
CCNotPowGate = CCXPowGate
CCX = TOFFOLI = CCNOT = CCXPowGate()
document(
CCX,
"""The TOFFOLI gate.
The `exponent=1` instance of `cirq.CCXPowGate`.
Matrix:
```
[[1 . . . . . . .],
[. 1 . . . . . .],
[. . 1 . . . . .],
[. . . 1 . . . .],
[. . . . 1 . . .],
[. . . . . 1 . .],
[. . . . . . . 1],
[. . . . . . 1 .]]
```
""",
)
CSWAP = FREDKIN = CSwapGate()
document(
CSWAP,
"""The Controlled Swap gate.
An instance of `cirq.CSwapGate`.
Matrix:
```
[[1 . . . . . . .],
[. 1 . . . . . .],
[. . 1 . . . . .],
[. . . 1 . . . .],
[. . . . 1 . . .],
[. . . . . . 1 .],
[. . . . . 1 . .],
[. . . . . . . 1]]
```
""",
)
| 34.864353 | 100 | 0.513346 |
from typing import AbstractSet, Any, List, Optional, Tuple, TYPE_CHECKING
import numpy as np
import sympy
from cirq import linalg, protocols, value
from cirq._compat import proper_repr
from cirq._doc import document
from cirq.ops import (
common_gates,
controlled_gate,
eigen_gate,
gate_features,
pauli_gates,
swap_gates,
)
if TYPE_CHECKING:
import cirq
class CCZPowGate(
eigen_gate.EigenGate, gate_features.ThreeQubitGate, gate_features.InterchangeableQubitsGate
):
def _eigen_components(self):
return [
(0, np.diag([1, 1, 1, 1, 1, 1, 1, 0])),
(1, np.diag([0, 0, 0, 0, 0, 0, 0, 1])),
]
def _trace_distance_bound_(self) -> Optional[float]:
if self._is_parameterized_():
return None
return abs(np.sin(self._exponent * 0.5 * np.pi))
def _pauli_expansion_(self) -> value.LinearDict[str]:
if protocols.is_parameterized(self):
return NotImplemented
global_phase = 1j ** (2 * self._exponent * self._global_shift)
z_phase = 1j ** self._exponent
c = -1j * z_phase * np.sin(np.pi * self._exponent / 2) / 4
return value.LinearDict(
{
'III': global_phase * (1 - c),
'IIZ': global_phase * c,
'IZI': global_phase * c,
'ZII': global_phase * c,
'ZZI': global_phase * -c,
'ZIZ': global_phase * -c,
'IZZ': global_phase * -c,
'ZZZ': global_phase * c,
}
)
def _decompose_(self, qubits):
if protocols.is_parameterized(self):
return NotImplemented
a, b, c = qubits
if hasattr(b, 'is_adjacent'):
if not b.is_adjacent(a):
b, c = c, b
elif not b.is_adjacent(c):
a, b = b, a
p = common_gates.T ** self._exponent
sweep_abc = [common_gates.CNOT(a, b), common_gates.CNOT(b, c)]
return [
p(a),
p(b),
p(c),
sweep_abc,
p(b) ** -1,
p(c),
sweep_abc,
p(c) ** -1,
sweep_abc,
p(c) ** -1,
sweep_abc,
]
def _apply_unitary_(self, args: 'protocols.ApplyUnitaryArgs') -> np.ndarray:
if protocols.is_parameterized(self):
return NotImplemented
ooo = args.subspace_index(0b111)
args.target_tensor[ooo] *= np.exp(1j * self.exponent * np.pi)
p = 1j ** (2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return args.target_tensor
def _circuit_diagram_info_(
self, args: 'cirq.CircuitDiagramInfoArgs'
) -> 'cirq.CircuitDiagramInfo':
return protocols.CircuitDiagramInfo(('@', '@', '@'), exponent=self._diagram_exponent(args))
def _qasm_(self, args: 'cirq.QasmArgs', qubits: Tuple['cirq.Qid', ...]) -> Optional[str]:
if self._exponent != 1:
return None
args.validate_version('2.0')
lines = [
args.format('h {0};\n', qubits[2]),
args.format('ccx {0},{1},{2};\n', qubits[0], qubits[1], qubits[2]),
args.format('h {0};\n', qubits[2]),
]
return ''.join(lines)
def _quil_(
self, qubits: Tuple['cirq.Qid', ...], formatter: 'cirq.QuilFormatter'
) -> Optional[str]:
if self._exponent != 1:
return None
lines = [
formatter.format('H {0}\n', qubits[2]),
formatter.format('CCNOT {0} {1} {2}\n', qubits[0], qubits[1], qubits[2]),
formatter.format('H {0}\n', qubits[2]),
]
return ''.join(lines)
def __repr__(self) -> str:
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.CCZ'
return '(cirq.CCZ**{})'.format(proper_repr(self._exponent))
return 'cirq.CCZPowGate(exponent={}, global_shift={!r})'.format(
proper_repr(self._exponent), self._global_shift
)
def __str__(self) -> str:
if self._exponent == 1:
return 'CCZ'
return 'CCZ**{}'.format(self._exponent)
@value.value_equality()
class ThreeQubitDiagonalGate(gate_features.ThreeQubitGate):
def __init__(self, diag_angles_radians: List[value.TParamVal]) -> None:
self._diag_angles_radians: List[value.TParamVal] = diag_angles_radians
def _is_parameterized_(self) -> bool:
return any(protocols.is_parameterized(angle) for angle in self._diag_angles_radians)
def _parameter_names_(self) -> AbstractSet[str]:
return {
name for angle in self._diag_angles_radians for name in protocols.parameter_names(angle)
}
def _resolve_parameters_(
self, resolver: 'cirq.ParamResolverOrSimilarType'
) -> 'ThreeQubitDiagonalGate':
return self.__class__(
[protocols.resolve_parameters(angle, resolver) for angle in self._diag_angles_radians]
)
def _has_unitary_(self) -> bool:
return not self._is_parameterized_()
def _unitary_(self) -> np.ndarray:
if self._is_parameterized_():
return NotImplemented
return np.diag([np.exp(1j * angle) for angle in self._diag_angles_radians])
def _apply_unitary_(self, args: 'protocols.ApplyUnitaryArgs') -> np.ndarray:
if self._is_parameterized_():
return NotImplemented
for index, angle in enumerate(self._diag_angles_radians):
little_endian_index = 4 * (index & 1) + 2 * ((index >> 1) & 1) + ((index >> 2) & 1)
subspace_index = args.subspace_index(little_endian_index)
args.target_tensor[subspace_index] *= np.exp(1j * angle)
return args.target_tensor
def _circuit_diagram_info_(
self, args: 'cirq.CircuitDiagramInfoArgs'
) -> 'cirq.CircuitDiagramInfo':
rounded_angles = np.array(self._diag_angles_radians)
if args.precision is not None:
rounded_angles = rounded_angles.round(args.precision)
diag_str = 'diag({})'.format(', '.join(proper_repr(angle) for angle in rounded_angles))
return protocols.CircuitDiagramInfo((diag_str, '#2', '#3'))
def __pow__(self, exponent: Any) -> 'ThreeQubitDiagonalGate':
if not isinstance(exponent, (int, float, sympy.Basic)):
return NotImplemented
return ThreeQubitDiagonalGate(
[protocols.mul(angle, exponent, NotImplemented) for angle in self._diag_angles_radians]
)
def _decompose_(self, qubits):
a, b, c = qubits
if hasattr(b, 'is_adjacent'):
if not b.is_adjacent(a):
b, c = c, b
elif not b.is_adjacent(c):
a, b = b, a
sweep_abc = [common_gates.CNOT(a, b), common_gates.CNOT(b, c)]
phase_matrix_inverse = 0.25 * np.array(
[
[-1, -1, -1, 1, 1, 1, 1],
[-1, 1, 1, -1, -1, 1, 1],
[1, -1, 1, -1, 1, -1, 1],
[-1, 1, 1, 1, 1, -1, -1],
[1, 1, -1, 1, -1, -1, 1],
[1, -1, 1, 1, -1, 1, -1],
[1, 1, -1, -1, 1, 1, -1],
]
)
shifted_angles_tail = [
angle - self._diag_angles_radians[0] for angle in self._diag_angles_radians[1:]
]
phase_solutions = phase_matrix_inverse.dot(shifted_angles_tail)
p_gates = [pauli_gates.Z ** (solution / np.pi) for solution in phase_solutions]
return [
p_gates[0](a),
p_gates[1](b),
p_gates[2](c),
sweep_abc,
p_gates[3](b),
p_gates[4](c),
sweep_abc,
p_gates[5](c),
sweep_abc,
p_gates[6](c),
sweep_abc,
]
def _value_equality_values_(self):
return tuple(self._diag_angles_radians)
def _pauli_expansion_(self) -> value.LinearDict[str]:
if protocols.is_parameterized(self):
return NotImplemented
x = [np.exp(1j * angle) for angle in self._diag_angles_radians]
return value.LinearDict(
{
'III': (x[0] + x[1] + x[2] + x[3] + x[4] + x[5] + x[6] + x[7]) / 8,
'IIZ': (x[0] - x[1] + x[2] - x[3] + x[4] - x[5] + x[6] - x[7]) / 8,
'IZI': (x[0] + x[1] - x[2] - x[3] + x[4] + x[5] - x[6] - x[7]) / 8,
'IZZ': (x[0] - x[1] - x[2] + x[3] + x[4] - x[5] - x[6] + x[7]) / 8,
'ZII': (x[0] + x[1] + x[2] + x[3] - x[4] - x[5] - x[6] - x[7]) / 8,
'ZIZ': (x[0] - x[1] + x[2] - x[3] - x[4] + x[5] - x[6] + x[7]) / 8,
'ZZI': (x[0] + x[1] - x[2] - x[3] - x[4] - x[5] + x[6] + x[7]) / 8,
'ZZZ': (x[0] - x[1] - x[2] + x[3] - x[4] + x[5] + x[6] - x[7]) / 8,
}
)
def __repr__(self) -> str:
return 'cirq.ThreeQubitDiagonalGate([{}])'.format(
','.join(proper_repr(angle) for angle in self._diag_angles_radians)
)
class CCXPowGate(
eigen_gate.EigenGate, gate_features.ThreeQubitGate, gate_features.InterchangeableQubitsGate
):
def _eigen_components(self):
return [
(0, linalg.block_diag(np.diag([1, 1, 1, 1, 1, 1]), np.array([[0.5, 0.5], [0.5, 0.5]]))),
(
1,
linalg.block_diag(
np.diag([0, 0, 0, 0, 0, 0]), np.array([[0.5, -0.5], [-0.5, 0.5]])
),
),
]
def _trace_distance_bound_(self) -> Optional[float]:
if self._is_parameterized_():
return None
return abs(np.sin(self._exponent * 0.5 * np.pi))
def _pauli_expansion_(self) -> value.LinearDict[str]:
if protocols.is_parameterized(self):
return NotImplemented
global_phase = 1j ** (2 * self._exponent * self._global_shift)
z_phase = 1j ** self._exponent
c = -1j * z_phase * np.sin(np.pi * self._exponent / 2) / 4
return value.LinearDict(
{
'III': global_phase * (1 - c),
'IIX': global_phase * c,
'IZI': global_phase * c,
'ZII': global_phase * c,
'ZZI': global_phase * -c,
'ZIX': global_phase * -c,
'IZX': global_phase * -c,
'ZZX': global_phase * c,
}
)
def qubit_index_to_equivalence_group_key(self, index):
return index < 2
def _apply_unitary_(self, args: 'protocols.ApplyUnitaryArgs') -> np.ndarray:
if protocols.is_parameterized(self):
return NotImplemented
p = 1j ** (2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return protocols.apply_unitary(
controlled_gate.ControlledGate(
controlled_gate.ControlledGate(pauli_gates.X ** self.exponent)
),
protocols.ApplyUnitaryArgs(args.target_tensor, args.available_buffer, args.axes),
default=NotImplemented,
)
def _decompose_(self, qubits):
c1, c2, t = qubits
yield common_gates.H(t)
yield CCZ(c1, c2, t) ** self._exponent
yield common_gates.H(t)
def _circuit_diagram_info_(
self, args: 'cirq.CircuitDiagramInfoArgs'
) -> 'cirq.CircuitDiagramInfo':
return protocols.CircuitDiagramInfo(('@', '@', 'X'), exponent=self._diagram_exponent(args))
def _qasm_(self, args: 'cirq.QasmArgs', qubits: Tuple['cirq.Qid', ...]) -> Optional[str]:
if self._exponent != 1:
return None
args.validate_version('2.0')
return args.format('ccx {0},{1},{2};\n', qubits[0], qubits[1], qubits[2])
def _quil_(
self, qubits: Tuple['cirq.Qid', ...], formatter: 'cirq.QuilFormatter'
) -> Optional[str]:
if self._exponent != 1:
return None
return formatter.format('CCNOT {0} {1} {2}\n', qubits[0], qubits[1], qubits[2])
def __repr__(self) -> str:
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.TOFFOLI'
return '(cirq.TOFFOLI**{})'.format(proper_repr(self._exponent))
return 'cirq.CCXPowGate(exponent={}, global_shift={!r})'.format(
proper_repr(self._exponent), self._global_shift
)
def __str__(self) -> str:
if self._exponent == 1:
return 'TOFFOLI'
return 'TOFFOLI**{}'.format(self._exponent)
@value.value_equality()
class CSwapGate(gate_features.ThreeQubitGate, gate_features.InterchangeableQubitsGate):
def qubit_index_to_equivalence_group_key(self, index):
return 0 if index == 0 else 1
def _pauli_expansion_(self) -> value.LinearDict[str]:
return value.LinearDict(
{
'III': 3 / 4,
'IXX': 1 / 4,
'IYY': 1 / 4,
'IZZ': 1 / 4,
'ZII': 1 / 4,
'ZXX': -1 / 4,
'ZYY': -1 / 4,
'ZZZ': -1 / 4,
}
)
def _trace_distance_bound_(self) -> float:
return 1.0
def _decompose_(self, qubits):
c, t1, t2 = qubits
if hasattr(t1, 'is_adjacent'):
if not t1.is_adjacent(t2):
return self._decompose_inside_control(t1, c, t2)
if not t1.is_adjacent(c):
return self._decompose_outside_control(c, t2, t1)
return self._decompose_outside_control(c, t1, t2)
def _decompose_inside_control(
self, target1: 'cirq.Qid', control: 'cirq.Qid', target2: 'cirq.Qid'
) -> 'cirq.OP_TREE':
a, b, c = target1, control, target2
yield common_gates.CNOT(a, b)
yield common_gates.CNOT(b, a)
yield common_gates.CNOT(c, b)
yield common_gates.H(c)
yield common_gates.T(c)
yield common_gates.CNOT(b, c)
yield common_gates.T(a)
yield common_gates.T(b) ** -1
yield common_gates.T(c)
yield common_gates.CNOT(a, b)
yield common_gates.CNOT(b, c)
yield common_gates.T(b)
yield common_gates.T(c) ** -1
yield common_gates.CNOT(a, b)
yield common_gates.CNOT(b, c)
yield pauli_gates.X(b) ** 0.5
yield common_gates.T(c) ** -1
yield common_gates.CNOT(b, a)
yield common_gates.CNOT(b, c)
yield common_gates.CNOT(a, b)
yield common_gates.CNOT(b, c)
yield common_gates.H(c)
yield common_gates.S(c) ** -1
yield pauli_gates.X(a) ** -0.5
def _apply_unitary_(self, args: 'protocols.ApplyUnitaryArgs') -> np.ndarray:
return protocols.apply_unitary(
controlled_gate.ControlledGate(swap_gates.SWAP),
protocols.ApplyUnitaryArgs(args.target_tensor, args.available_buffer, args.axes),
default=NotImplemented,
)
def _decompose_outside_control(
self, control: 'cirq.Qid', near_target: 'cirq.Qid', far_target: 'cirq.Qid'
) -> 'cirq.OP_TREE':
a, b, c = control, near_target, far_target
t = common_gates.T
sweep_abc = [common_gates.CNOT(a, b), common_gates.CNOT(b, c)]
yield common_gates.CNOT(c, b)
yield pauli_gates.Y(c) ** -0.5
yield t(a), t(b), t(c)
yield sweep_abc
yield t(b) ** -1, t(c)
yield sweep_abc
yield t(c) ** -1
yield sweep_abc
yield t(c) ** -1
yield pauli_gates.X(b) ** 0.5
yield sweep_abc
yield common_gates.S(c)
yield pauli_gates.X(b) ** 0.5
yield pauli_gates.X(c) ** -0.5
def _has_unitary_(self) -> bool:
return True
def _unitary_(self) -> np.ndarray:
return linalg.block_diag(np.diag([1, 1, 1, 1, 1]), np.array([[0, 1], [1, 0]]), np.diag([1]))
def _circuit_diagram_info_(
self, args: 'cirq.CircuitDiagramInfoArgs'
) -> 'cirq.CircuitDiagramInfo':
if not args.use_unicode_characters:
return protocols.CircuitDiagramInfo(('@', 'swap', 'swap'))
return protocols.CircuitDiagramInfo(('@', '×', '×'))
def _qasm_(self, args: 'cirq.QasmArgs', qubits: Tuple['cirq.Qid', ...]) -> Optional[str]:
args.validate_version('2.0')
return args.format('cswap {0},{1},{2};\n', qubits[0], qubits[1], qubits[2])
def _quil_(
self, qubits: Tuple['cirq.Qid', ...], formatter: 'cirq.QuilFormatter'
) -> Optional[str]:
return formatter.format('CSWAP {0} {1} {2}\n', qubits[0], qubits[1], qubits[2])
def _value_equality_values_(self):
return ()
def __str__(self) -> str:
return 'FREDKIN'
def __repr__(self) -> str:
return 'cirq.FREDKIN'
CCZ = CCZPowGate()
document(
CCZ,
"""The Controlled-Controlled-Z gate.
The `exponent=1` instance of `cirq.CCZPowGate`.
Matrix:
```
[[1 . . . . . . .],
[. 1 . . . . . .],
[. . 1 . . . . .],
[. . . 1 . . . .],
[. . . . 1 . . .],
[. . . . . 1 . .],
[. . . . . . 1 .],
[. . . . . . . -1]]
```
""",
)
CCNotPowGate = CCXPowGate
CCX = TOFFOLI = CCNOT = CCXPowGate()
document(
CCX,
"""The TOFFOLI gate.
The `exponent=1` instance of `cirq.CCXPowGate`.
Matrix:
```
[[1 . . . . . . .],
[. 1 . . . . . .],
[. . 1 . . . . .],
[. . . 1 . . . .],
[. . . . 1 . . .],
[. . . . . 1 . .],
[. . . . . . . 1],
[. . . . . . 1 .]]
```
""",
)
CSWAP = FREDKIN = CSwapGate()
document(
CSWAP,
"""The Controlled Swap gate.
An instance of `cirq.CSwapGate`.
Matrix:
```
[[1 . . . . . . .],
[. 1 . . . . . .],
[. . 1 . . . . .],
[. . . 1 . . . .],
[. . . . 1 . . .],
[. . . . . . 1 .],
[. . . . . 1 . .],
[. . . . . . . 1]]
```
""",
)
| true | true |
1c455c21b416c77406ef8ef3f269649ffa99767d | 657 | py | Python | tests/while/test_while_class_in_body.py | sco1/pylox | b4820828306c20cee3f8533c2547fafb92c6c1bd | [
"MIT"
] | 2 | 2021-12-18T01:52:50.000Z | 2022-01-17T19:41:52.000Z | tests/while/test_while_class_in_body.py | sco1/pylox | b4820828306c20cee3f8533c2547fafb92c6c1bd | [
"MIT"
] | 18 | 2021-11-30T04:05:53.000Z | 2022-02-01T03:30:04.000Z | tests/while/test_while_class_in_body.py | sco1/pylox | b4820828306c20cee3f8533c2547fafb92c6c1bd | [
"MIT"
] | null | null | null | from textwrap import dedent
import pytest
from pylox.lox import Lox
# Base cases from https://github.com/munificent/craftinginterpreters/blob/master/test/while/class_in_body.lox
TEST_SRC = dedent(
"""\
// [line 2] Error at 'class': Expect expression.
while (true) class Foo {}
"""
)
EXPECTED_STDOUTS = ["2:14: LoxParseError: Expected expression."]
def test_class_in_body(capsys: pytest.CaptureFixture) -> None:
interpreter = Lox()
interpreter.run(TEST_SRC)
assert interpreter.had_error
assert not interpreter.had_runtime_error
all_out = capsys.readouterr().out.splitlines()
assert all_out == EXPECTED_STDOUTS
| 24.333333 | 109 | 0.727549 | from textwrap import dedent
import pytest
from pylox.lox import Lox
TEST_SRC = dedent(
"""\
// [line 2] Error at 'class': Expect expression.
while (true) class Foo {}
"""
)
EXPECTED_STDOUTS = ["2:14: LoxParseError: Expected expression."]
def test_class_in_body(capsys: pytest.CaptureFixture) -> None:
interpreter = Lox()
interpreter.run(TEST_SRC)
assert interpreter.had_error
assert not interpreter.had_runtime_error
all_out = capsys.readouterr().out.splitlines()
assert all_out == EXPECTED_STDOUTS
| true | true |
1c455d20df340c5bca26c4bb77387d4e69955a1a | 364 | py | Python | tomwhite-hadoop-book/ch17-hive/src/main/python/max_temperature_reduce.py | booknu/study-hadoop-book | 68c9f00d224289c470ba03533c571492979d850f | [
"MIT"
] | null | null | null | tomwhite-hadoop-book/ch17-hive/src/main/python/max_temperature_reduce.py | booknu/study-hadoop-book | 68c9f00d224289c470ba03533c571492979d850f | [
"MIT"
] | 3 | 2021-08-02T17:05:27.000Z | 2022-02-09T22:28:27.000Z | tomwhite-hadoop-book/ch17-hive/src/main/python/max_temperature_reduce.py | booknu/study-hadoop-book | 68c9f00d224289c470ba03533c571492979d850f | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import sys
(last_key, max_val) = (None, 0)
for line in sys.stdin:
(key, val) = line.strip().split("\t")
if last_key and last_key != key:
print "%s\t%s" % (last_key, max_val)
(last_key, max_val) = (key, int(val))
else:
(last_key, max_val) = (key, max(max_val, int(val)))
if last_key:
print "%s\t%s" % (last_key, max_val) | 24.266667 | 55 | 0.615385 |
import sys
(last_key, max_val) = (None, 0)
for line in sys.stdin:
(key, val) = line.strip().split("\t")
if last_key and last_key != key:
print "%s\t%s" % (last_key, max_val)
(last_key, max_val) = (key, int(val))
else:
(last_key, max_val) = (key, max(max_val, int(val)))
if last_key:
print "%s\t%s" % (last_key, max_val) | false | true |
1c455d557f5e17a71c8823251f3fd837386f7ace | 2,004 | py | Python | launch/mouse_with_lidar.launch.py | rt-net/raspimouse_ros2_examples | f16aef6c087a6e6325801b9f6a10b272b4d59c91 | [
"Apache-2.0"
] | 30 | 2020-05-08T12:13:03.000Z | 2021-12-27T20:14:43.000Z | launch/mouse_with_lidar.launch.py | rt-net/raspimouse_ros2_examples | f16aef6c087a6e6325801b9f6a10b272b4d59c91 | [
"Apache-2.0"
] | 16 | 2020-05-28T02:35:24.000Z | 2021-12-10T05:41:31.000Z | launch/mouse_with_lidar.launch.py | rt-net/raspimouse_ros2_examples | f16aef6c087a6e6325801b9f6a10b272b4d59c91 | [
"Apache-2.0"
] | 4 | 2020-10-02T23:50:02.000Z | 2021-08-25T14:19:17.000Z | # Copyright 2020 RT Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from ament_index_python.packages import get_package_share_directory
from launch import LaunchDescription
from launch.actions import DeclareLaunchArgument
from launch.actions import IncludeLaunchDescription
from launch.actions import OpaqueFunction
from launch.launch_description_sources import PythonLaunchDescriptionSource
from launch_ros.actions import LifecycleNode
def generate_launch_description():
declare_lidar = DeclareLaunchArgument(
'lidar', default_value='lds',
description='LiDAR: lds only, for now.'
)
mouse_node = LifecycleNode(
name='raspimouse',
package='raspimouse', executable='raspimouse', output='screen',
parameters=[os.path.join(get_package_share_directory(
'raspimouse_ros2_examples'), 'config', 'mouse.yml')]
)
def func_launch_lidar_node(context):
if context.launch_configurations['lidar'] == 'lds':
return [IncludeLaunchDescription(
PythonLaunchDescriptionSource([os.path.join(
get_package_share_directory('hls_lfcd_lds_driver'),
'launch'),
'/hlds_laser.launch.py'
]),)]
launch_lidar_node = OpaqueFunction(function=func_launch_lidar_node)
ld = LaunchDescription()
ld.add_action(declare_lidar)
ld.add_action(mouse_node)
ld.add_action(launch_lidar_node)
return ld
| 35.785714 | 75 | 0.72006 |
import os
from ament_index_python.packages import get_package_share_directory
from launch import LaunchDescription
from launch.actions import DeclareLaunchArgument
from launch.actions import IncludeLaunchDescription
from launch.actions import OpaqueFunction
from launch.launch_description_sources import PythonLaunchDescriptionSource
from launch_ros.actions import LifecycleNode
def generate_launch_description():
declare_lidar = DeclareLaunchArgument(
'lidar', default_value='lds',
description='LiDAR: lds only, for now.'
)
mouse_node = LifecycleNode(
name='raspimouse',
package='raspimouse', executable='raspimouse', output='screen',
parameters=[os.path.join(get_package_share_directory(
'raspimouse_ros2_examples'), 'config', 'mouse.yml')]
)
def func_launch_lidar_node(context):
if context.launch_configurations['lidar'] == 'lds':
return [IncludeLaunchDescription(
PythonLaunchDescriptionSource([os.path.join(
get_package_share_directory('hls_lfcd_lds_driver'),
'launch'),
'/hlds_laser.launch.py'
]),)]
launch_lidar_node = OpaqueFunction(function=func_launch_lidar_node)
ld = LaunchDescription()
ld.add_action(declare_lidar)
ld.add_action(mouse_node)
ld.add_action(launch_lidar_node)
return ld
| true | true |
1c455e490ae1d668471a3db85f33281fb0cacecb | 1,017 | py | Python | CGPA_Claculator.py | DharaneeswaranR/CGPA-Calculator | ce8390288057ac8e1f79b2d76a233e63d6d7402c | [
"BSD-3-Clause"
] | 1 | 2021-11-20T15:42:15.000Z | 2021-11-20T15:42:15.000Z | CGPA_Claculator.py | DharaneeswaranR/CGPA-Calculator | ce8390288057ac8e1f79b2d76a233e63d6d7402c | [
"BSD-3-Clause"
] | null | null | null | CGPA_Claculator.py | DharaneeswaranR/CGPA-Calculator | ce8390288057ac8e1f79b2d76a233e63d6d7402c | [
"BSD-3-Clause"
] | null | null | null | def calculate_cgpa(grade_points, credits, num):
mark_sum = list()
for i in range(num):
Mark_sum.append(grade_points[i] * credits[i])
cgpa = sum(mark_sum) / sum(credits)
return cgpa
if __name__ == '__main__':
num = int(input("\nEnter number of subjects : "))
grade_points = list()
credits = list()
for i in range(1, num+1):
mark = int(input("\nEnter marks of Subject " + str(i) + " : "))
credit = int(input("Enter credit of Subject " + str(i) + " : "))
if 90 <= mark <= 100:
grade_points.append(10)
elif 80 <= mark <= 89:
grade_points.append(9)
elif 70 <= mark <= 79:
grade_points.append(8)
elif 60 <= mark <= 69:
grade_points.append(7)
elif 50 <= mark <= 59:
grade_points.append(6)
else:
grade_points.append(0)
credits.append(credit)
print("\nCGPA is {:.2f}\n".format(calculate_cgpa(grade_points, credits, num)))
| 26.763158 | 82 | 0.547689 | def calculate_cgpa(grade_points, credits, num):
mark_sum = list()
for i in range(num):
Mark_sum.append(grade_points[i] * credits[i])
cgpa = sum(mark_sum) / sum(credits)
return cgpa
if __name__ == '__main__':
num = int(input("\nEnter number of subjects : "))
grade_points = list()
credits = list()
for i in range(1, num+1):
mark = int(input("\nEnter marks of Subject " + str(i) + " : "))
credit = int(input("Enter credit of Subject " + str(i) + " : "))
if 90 <= mark <= 100:
grade_points.append(10)
elif 80 <= mark <= 89:
grade_points.append(9)
elif 70 <= mark <= 79:
grade_points.append(8)
elif 60 <= mark <= 69:
grade_points.append(7)
elif 50 <= mark <= 59:
grade_points.append(6)
else:
grade_points.append(0)
credits.append(credit)
print("\nCGPA is {:.2f}\n".format(calculate_cgpa(grade_points, credits, num)))
| true | true |
1c455ea2754f11157926ef47242fd8393fbd2d15 | 9,560 | py | Python | mlfromscratch/supervised_learning/regression.py | sourcepirate/ML-From-Scratch | c6839bf47c360d6fa48861302fd90ccd4a8c38db | [
"MIT"
] | null | null | null | mlfromscratch/supervised_learning/regression.py | sourcepirate/ML-From-Scratch | c6839bf47c360d6fa48861302fd90ccd4a8c38db | [
"MIT"
] | null | null | null | mlfromscratch/supervised_learning/regression.py | sourcepirate/ML-From-Scratch | c6839bf47c360d6fa48861302fd90ccd4a8c38db | [
"MIT"
] | 2 | 2017-10-03T07:45:16.000Z | 2018-12-21T01:31:21.000Z | from __future__ import print_function, division
import numpy as np
import math
from mlfromscratch.utils import normalize, polynomial_features
class Regression(object):
""" Base regression model. Models the relationship between a scalar dependent variable y and the independent
variables X.
Parameters:
-----------
reg_factor: float
The factor that will determine the amount of regularization and feature
shrinkage.
n_iterations: float
The number of training iterations the algorithm will tune the weights for.
learning_rate: float
The step length that will be used when updating the weights.
gradient_descent: boolean
True or false depending if gradient descent should be used when training. If
false then we use batch optimization by least squares.
"""
def __init__(self, reg_factor, n_iterations, learning_rate, gradient_descent):
self.w = None
self.n_iterations = n_iterations
self.learning_rate = learning_rate
self.gradient_descent = gradient_descent
self.reg_factor = reg_factor
def initialize_weights(self, n_features):
""" Initialize weights randomly [-1/N, 1/N] """
limit = 1 / math.sqrt(n_features)
self.w = np.random.uniform(-limit, limit, (n_features, ))
def regularization(self):
# No regularization by default
return 0
def regularization_gradient(self):
# No regularization by default
return 0
def fit(self, X, y):
# Insert constant ones as first column (for bias weights)
X = np.insert(X, 0, 1, axis=1)
n_features = np.shape(X)[1]
# Get weights by gradient descent opt.
if self.gradient_descent:
self.training_errors = []
self.initialize_weights(n_features)
# Do gradient descent for n_iterations
for _ in range(self.n_iterations):
y_pred = X.dot(self.w)
# Calculate mean squared error
mse = np.mean(0.5 * (y - y_pred)**2 + self.regularization())
self.training_errors.append(mse)
# Gradient of l2 loss w.r.t w
grad_w = - (y - y_pred).dot(X) + self.regularization_gradient()
# Update the weights
self.w -= self.learning_rate * grad_w
# Get weights by least squares (using Moore-Penrose pseudoinverse)
else:
U, S, V = np.linalg.svd(X.T.dot(X) + self.reg_factor * np.identity(n_features))
S = np.diag(S)
X_sq_reg_inv = V.dot(np.linalg.pinv(S)).dot(U.T)
self.w = X_sq_reg_inv.dot(X.T).dot(y)
def predict(self, X):
# Insert constant ones for bias weights
X = np.insert(X, 0, 1, axis=1)
y_pred = X.dot(self.w)
return y_pred
class LinearRegression(Regression):
"""Linear model.
Parameters:
-----------
n_iterations: float
The number of training iterations the algorithm will tune the weights for.
learning_rate: float
The step length that will be used when updating the weights.
gradient_descent: boolean
True or false depending if gradient descent should be used when training. If
false then we use batch optimization by least squares.
"""
def __init__(self, n_iterations=100, learning_rate=0.001, gradient_descent=True):
super(LinearRegression, self).__init__(reg_factor=0, n_iterations=n_iterations, \
learning_rate=learning_rate, gradient_descent=gradient_descent)
class PolynomialRegression(Regression):
"""Performs a non-linear transformation of the data before fitting the model
and doing predictions which allows for doing non-linear regression.
Parameters:
-----------
degree: int
The power of the polynomial that the independent variable X will be transformed to.
n_iterations: float
The number of training iterations the algorithm will tune the weights for.
learning_rate: float
The step length that will be used when updating the weights.
gradient_descent: boolean
True or false depending if gradient descent should be used when training. If
false then we use batch optimization by least squares.
"""
def __init__(self, degree, n_iterations=3000, learning_rate=0.001, gradient_descent=True):
self.degree = degree
super(PolynomialRegression, self).__init__(reg_factor=0, n_iterations=n_iterations, \
learning_rate=learning_rate, gradient_descent=gradient_descent)
def fit(self, X, y):
X_transformed = polynomial_features(X, degree=self.degree)
super(PolynomialRegression, self).fit(X_transformed, y)
def predict(self, X):
X_transformed = polynomial_features(X, degree=self.degree)
return super(PolynomialRegression, self).predict(X_transformed)
class RidgeRegression(Regression):
"""Also referred to as Tikhonov regularization. Linear regression model with a regularization factor.
Model that tries to balance the fit of the model with respect to the training data and the complexity
of the model. A large regularization factor with decreases the variance of the model.
Parameters:
-----------
reg_factor: float
The factor that will determine the amount of regularization and feature
shrinkage.
n_iterations: float
The number of training iterations the algorithm will tune the weights for.
learning_rate: float
The step length that will be used when updating the weights.
gradient_descent: boolean
True or false depending if gradient descent should be used when training. If
false then we use batch optimization by least squares.
"""
def __init__(self, reg_factor, n_iterations=1000, learning_rate=0.001, gradient_descent=True):
super(RidgeRegression, self).__init__(reg_factor, n_iterations, learning_rate, gradient_descent)
def regularization(self):
return self.reg_factor * self.w.T.dot(self.w)
def regularization_gradient(self):
return self.reg_factor * self.w
class LassoRegression(Regression):
"""Linear regression model with a regularization factor which does both variable selection
and regularization. Model that tries to balance the fit of the model with respect to the training
data and the complexity of the model. A large regularization factor with decreases the variance of
the model and do para.
Parameters:
-----------
degree: int
The power of the polynomial that the independent variable X will be transformed to.
reg_factor: float
The factor that will determine the amount of regularization and feature
shrinkage.
n_iterations: float
The number of training iterations the algorithm will tune the weights for.
learning_rate: float
The step length that will be used when updating the weights.
gradient_descent: boolean
True or false depending if gradient descent should be used when training. If
false then we use batch optimization by least squares.
"""
def __init__(self, degree, reg_factor, n_iterations=3000, learning_rate=0.01, gradient_descent=True):
self.degree = degree
super(LassoRegression, self).__init__(reg_factor, n_iterations, learning_rate, gradient_descent)
def fit(self, X, y):
X_transformed = normalize(polynomial_features(X, degree=self.degree))
super(LassoRegression, self).fit(X_transformed, y)
def predict(self, X):
X_transformed = normalize(polynomial_features(X, degree=self.degree))
return super(LassoRegression, self).predict(X_transformed)
def regularization(self):
return self.reg_factor * len(self.w)
def regularization_gradient(self):
return self.reg_factor * np.sign(self.w)
class PolynomialRidgeRegression(Regression):
"""Similar to regular ridge regression except that the data is transformed to allow
for polynomial regression.
Parameters:
-----------
degree: int
The power of the polynomial that the independent variable X will be transformed to.
reg_factor: float
The factor that will determine the amount of regularization and feature
shrinkage.
n_iterations: float
The number of training iterations the algorithm will tune the weights for.
learning_rate: float
The step length that will be used when updating the weights.
gradient_descent: boolean
True or false depending if gradient descent should be used when training. If
false then we use batch optimization by least squares.
"""
def __init__(self, degree, reg_factor, n_iterations=3000, learning_rate=0.01, gradient_descent=True):
self.degree = degree
super(PolynomialRidgeRegression, self).__init__(reg_factor, n_iterations, learning_rate, gradient_descent)
def fit(self, X, y):
X_transformed = normalize(polynomial_features(X, degree=self.degree))
super(PolynomialRidgeRegression, self).fit(X_transformed, y)
def predict(self, X):
X_transformed = normalize(polynomial_features(X, degree=self.degree))
return super(PolynomialRidgeRegression, self).predict(X_transformed)
def regularization(self):
return self.reg_factor * self.w.T.dot(self.w)
def regularization_gradient(self):
return self.reg_factor * self.w
| 43.853211 | 114 | 0.688808 | from __future__ import print_function, division
import numpy as np
import math
from mlfromscratch.utils import normalize, polynomial_features
class Regression(object):
def __init__(self, reg_factor, n_iterations, learning_rate, gradient_descent):
self.w = None
self.n_iterations = n_iterations
self.learning_rate = learning_rate
self.gradient_descent = gradient_descent
self.reg_factor = reg_factor
def initialize_weights(self, n_features):
limit = 1 / math.sqrt(n_features)
self.w = np.random.uniform(-limit, limit, (n_features, ))
def regularization(self):
return 0
def regularization_gradient(self):
return 0
def fit(self, X, y):
X = np.insert(X, 0, 1, axis=1)
n_features = np.shape(X)[1]
if self.gradient_descent:
self.training_errors = []
self.initialize_weights(n_features)
for _ in range(self.n_iterations):
y_pred = X.dot(self.w)
mse = np.mean(0.5 * (y - y_pred)**2 + self.regularization())
self.training_errors.append(mse)
grad_w = - (y - y_pred).dot(X) + self.regularization_gradient()
self.w -= self.learning_rate * grad_w
else:
U, S, V = np.linalg.svd(X.T.dot(X) + self.reg_factor * np.identity(n_features))
S = np.diag(S)
X_sq_reg_inv = V.dot(np.linalg.pinv(S)).dot(U.T)
self.w = X_sq_reg_inv.dot(X.T).dot(y)
def predict(self, X):
X = np.insert(X, 0, 1, axis=1)
y_pred = X.dot(self.w)
return y_pred
class LinearRegression(Regression):
def __init__(self, n_iterations=100, learning_rate=0.001, gradient_descent=True):
super(LinearRegression, self).__init__(reg_factor=0, n_iterations=n_iterations, \
learning_rate=learning_rate, gradient_descent=gradient_descent)
class PolynomialRegression(Regression):
def __init__(self, degree, n_iterations=3000, learning_rate=0.001, gradient_descent=True):
self.degree = degree
super(PolynomialRegression, self).__init__(reg_factor=0, n_iterations=n_iterations, \
learning_rate=learning_rate, gradient_descent=gradient_descent)
def fit(self, X, y):
X_transformed = polynomial_features(X, degree=self.degree)
super(PolynomialRegression, self).fit(X_transformed, y)
def predict(self, X):
X_transformed = polynomial_features(X, degree=self.degree)
return super(PolynomialRegression, self).predict(X_transformed)
class RidgeRegression(Regression):
def __init__(self, reg_factor, n_iterations=1000, learning_rate=0.001, gradient_descent=True):
super(RidgeRegression, self).__init__(reg_factor, n_iterations, learning_rate, gradient_descent)
def regularization(self):
return self.reg_factor * self.w.T.dot(self.w)
def regularization_gradient(self):
return self.reg_factor * self.w
class LassoRegression(Regression):
def __init__(self, degree, reg_factor, n_iterations=3000, learning_rate=0.01, gradient_descent=True):
self.degree = degree
super(LassoRegression, self).__init__(reg_factor, n_iterations, learning_rate, gradient_descent)
def fit(self, X, y):
X_transformed = normalize(polynomial_features(X, degree=self.degree))
super(LassoRegression, self).fit(X_transformed, y)
def predict(self, X):
X_transformed = normalize(polynomial_features(X, degree=self.degree))
return super(LassoRegression, self).predict(X_transformed)
def regularization(self):
return self.reg_factor * len(self.w)
def regularization_gradient(self):
return self.reg_factor * np.sign(self.w)
class PolynomialRidgeRegression(Regression):
def __init__(self, degree, reg_factor, n_iterations=3000, learning_rate=0.01, gradient_descent=True):
self.degree = degree
super(PolynomialRidgeRegression, self).__init__(reg_factor, n_iterations, learning_rate, gradient_descent)
def fit(self, X, y):
X_transformed = normalize(polynomial_features(X, degree=self.degree))
super(PolynomialRidgeRegression, self).fit(X_transformed, y)
def predict(self, X):
X_transformed = normalize(polynomial_features(X, degree=self.degree))
return super(PolynomialRidgeRegression, self).predict(X_transformed)
def regularization(self):
return self.reg_factor * self.w.T.dot(self.w)
def regularization_gradient(self):
return self.reg_factor * self.w
| true | true |
1c455f55b99a0f9313f7b61d44989bbe51ff3591 | 72,468 | py | Python | intersight/model/virtualization_vmware_datacenter_relationship.py | CiscoDevNet/intersight-python | 04b721f37c3044646a91c185c7259edfb991557a | [
"Apache-2.0"
] | 5 | 2021-12-16T15:13:32.000Z | 2022-03-29T16:09:54.000Z | intersight/model/virtualization_vmware_datacenter_relationship.py | CiscoDevNet/intersight-python | 04b721f37c3044646a91c185c7259edfb991557a | [
"Apache-2.0"
] | 4 | 2022-01-25T19:05:51.000Z | 2022-03-29T20:18:37.000Z | intersight/model/virtualization_vmware_datacenter_relationship.py | CiscoDevNet/intersight-python | 04b721f37c3044646a91c185c7259edfb991557a | [
"Apache-2.0"
] | 2 | 2020-07-07T15:01:08.000Z | 2022-01-31T04:27:35.000Z | """
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. The Intersight OpenAPI document defines the complete set of properties that are returned in the HTTP response. From that perspective, a client can expect that no additional properties are returned, unless these properties are explicitly defined in the OpenAPI document. However, when a client uses an older version of the Intersight OpenAPI document, the server may send additional properties because the software is more recent than the client. In that case, the client may receive properties that it does not know about. Some generated SDKs perform a strict validation of the HTTP response body against the OpenAPI document. # noqa: E501
The version of the OpenAPI document: 1.0.9-4950
Contact: intersight@cisco.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from intersight.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from intersight.model.asset_device_registration_relationship import AssetDeviceRegistrationRelationship
from intersight.model.display_names import DisplayNames
from intersight.model.mo_base_mo_relationship import MoBaseMoRelationship
from intersight.model.mo_mo_ref import MoMoRef
from intersight.model.mo_tag import MoTag
from intersight.model.mo_version_context import MoVersionContext
from intersight.model.virtualization_vmware_datacenter import VirtualizationVmwareDatacenter
from intersight.model.virtualization_vmware_folder_relationship import VirtualizationVmwareFolderRelationship
from intersight.model.virtualization_vmware_vcenter_relationship import VirtualizationVmwareVcenterRelationship
globals()['AssetDeviceRegistrationRelationship'] = AssetDeviceRegistrationRelationship
globals()['DisplayNames'] = DisplayNames
globals()['MoBaseMoRelationship'] = MoBaseMoRelationship
globals()['MoMoRef'] = MoMoRef
globals()['MoTag'] = MoTag
globals()['MoVersionContext'] = MoVersionContext
globals()['VirtualizationVmwareDatacenter'] = VirtualizationVmwareDatacenter
globals()['VirtualizationVmwareFolderRelationship'] = VirtualizationVmwareFolderRelationship
globals()['VirtualizationVmwareVcenterRelationship'] = VirtualizationVmwareVcenterRelationship
class VirtualizationVmwareDatacenterRelationship(ModelComposed):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('class_id',): {
'MO.MOREF': "mo.MoRef",
},
('object_type',): {
'AAA.AUDITRECORD': "aaa.AuditRecord",
'AAA.RETENTIONCONFIG': "aaa.RetentionConfig",
'AAA.RETENTIONPOLICY': "aaa.RetentionPolicy",
'ACCESS.POLICY': "access.Policy",
'ADAPTER.CONFIGPOLICY': "adapter.ConfigPolicy",
'ADAPTER.EXTETHINTERFACE': "adapter.ExtEthInterface",
'ADAPTER.HOSTETHINTERFACE': "adapter.HostEthInterface",
'ADAPTER.HOSTFCINTERFACE': "adapter.HostFcInterface",
'ADAPTER.HOSTISCSIINTERFACE': "adapter.HostIscsiInterface",
'ADAPTER.UNIT': "adapter.Unit",
'ADAPTER.UNITEXPANDER': "adapter.UnitExpander",
'APPLIANCE.APPSTATUS': "appliance.AppStatus",
'APPLIANCE.AUTORMAPOLICY': "appliance.AutoRmaPolicy",
'APPLIANCE.BACKUP': "appliance.Backup",
'APPLIANCE.BACKUPPOLICY': "appliance.BackupPolicy",
'APPLIANCE.CERTIFICATESETTING': "appliance.CertificateSetting",
'APPLIANCE.DATAEXPORTPOLICY': "appliance.DataExportPolicy",
'APPLIANCE.DEVICECERTIFICATE': "appliance.DeviceCertificate",
'APPLIANCE.DEVICECLAIM': "appliance.DeviceClaim",
'APPLIANCE.DEVICEUPGRADEPOLICY': "appliance.DeviceUpgradePolicy",
'APPLIANCE.DIAGSETTING': "appliance.DiagSetting",
'APPLIANCE.EXTERNALSYSLOGSETTING': "appliance.ExternalSyslogSetting",
'APPLIANCE.FILEGATEWAY': "appliance.FileGateway",
'APPLIANCE.FILESYSTEMSTATUS': "appliance.FileSystemStatus",
'APPLIANCE.GROUPSTATUS': "appliance.GroupStatus",
'APPLIANCE.IMAGEBUNDLE': "appliance.ImageBundle",
'APPLIANCE.NODEINFO': "appliance.NodeInfo",
'APPLIANCE.NODESTATUS': "appliance.NodeStatus",
'APPLIANCE.RELEASENOTE': "appliance.ReleaseNote",
'APPLIANCE.REMOTEFILEIMPORT': "appliance.RemoteFileImport",
'APPLIANCE.RESTORE': "appliance.Restore",
'APPLIANCE.SETUPINFO': "appliance.SetupInfo",
'APPLIANCE.SYSTEMINFO': "appliance.SystemInfo",
'APPLIANCE.SYSTEMSTATUS': "appliance.SystemStatus",
'APPLIANCE.UPGRADE': "appliance.Upgrade",
'APPLIANCE.UPGRADEPOLICY': "appliance.UpgradePolicy",
'ASSET.CLUSTERMEMBER': "asset.ClusterMember",
'ASSET.DEPLOYMENT': "asset.Deployment",
'ASSET.DEPLOYMENTDEVICE': "asset.DeploymentDevice",
'ASSET.DEVICECLAIM': "asset.DeviceClaim",
'ASSET.DEVICECONFIGURATION': "asset.DeviceConfiguration",
'ASSET.DEVICECONNECTORMANAGER': "asset.DeviceConnectorManager",
'ASSET.DEVICECONTRACTINFORMATION': "asset.DeviceContractInformation",
'ASSET.DEVICECONTRACTNOTIFICATION': "asset.DeviceContractNotification",
'ASSET.DEVICEREGISTRATION': "asset.DeviceRegistration",
'ASSET.SUBSCRIPTION': "asset.Subscription",
'ASSET.SUBSCRIPTIONACCOUNT': "asset.SubscriptionAccount",
'ASSET.SUBSCRIPTIONDEVICECONTRACTINFORMATION': "asset.SubscriptionDeviceContractInformation",
'ASSET.TARGET': "asset.Target",
'BIOS.BOOTDEVICE': "bios.BootDevice",
'BIOS.BOOTMODE': "bios.BootMode",
'BIOS.POLICY': "bios.Policy",
'BIOS.SYSTEMBOOTORDER': "bios.SystemBootOrder",
'BIOS.TOKENSETTINGS': "bios.TokenSettings",
'BIOS.UNIT': "bios.Unit",
'BIOS.VFSELECTMEMORYRASCONFIGURATION': "bios.VfSelectMemoryRasConfiguration",
'BOOT.CDDDEVICE': "boot.CddDevice",
'BOOT.DEVICEBOOTMODE': "boot.DeviceBootMode",
'BOOT.DEVICEBOOTSECURITY': "boot.DeviceBootSecurity",
'BOOT.HDDDEVICE': "boot.HddDevice",
'BOOT.ISCSIDEVICE': "boot.IscsiDevice",
'BOOT.NVMEDEVICE': "boot.NvmeDevice",
'BOOT.PCHSTORAGEDEVICE': "boot.PchStorageDevice",
'BOOT.PRECISIONPOLICY': "boot.PrecisionPolicy",
'BOOT.PXEDEVICE': "boot.PxeDevice",
'BOOT.SANDEVICE': "boot.SanDevice",
'BOOT.SDDEVICE': "boot.SdDevice",
'BOOT.UEFISHELLDEVICE': "boot.UefiShellDevice",
'BOOT.USBDEVICE': "boot.UsbDevice",
'BOOT.VMEDIADEVICE': "boot.VmediaDevice",
'BULK.EXPORT': "bulk.Export",
'BULK.EXPORTEDITEM': "bulk.ExportedItem",
'BULK.MOCLONER': "bulk.MoCloner",
'BULK.MOMERGER': "bulk.MoMerger",
'BULK.REQUEST': "bulk.Request",
'BULK.SUBREQUESTOBJ': "bulk.SubRequestObj",
'CAPABILITY.ADAPTERUNITDESCRIPTOR': "capability.AdapterUnitDescriptor",
'CAPABILITY.CATALOG': "capability.Catalog",
'CAPABILITY.CHASSISDESCRIPTOR': "capability.ChassisDescriptor",
'CAPABILITY.CHASSISMANUFACTURINGDEF': "capability.ChassisManufacturingDef",
'CAPABILITY.CIMCFIRMWAREDESCRIPTOR': "capability.CimcFirmwareDescriptor",
'CAPABILITY.EQUIPMENTPHYSICALDEF': "capability.EquipmentPhysicalDef",
'CAPABILITY.EQUIPMENTSLOTARRAY': "capability.EquipmentSlotArray",
'CAPABILITY.FANMODULEDESCRIPTOR': "capability.FanModuleDescriptor",
'CAPABILITY.FANMODULEMANUFACTURINGDEF': "capability.FanModuleManufacturingDef",
'CAPABILITY.IOCARDCAPABILITYDEF': "capability.IoCardCapabilityDef",
'CAPABILITY.IOCARDDESCRIPTOR': "capability.IoCardDescriptor",
'CAPABILITY.IOCARDMANUFACTURINGDEF': "capability.IoCardManufacturingDef",
'CAPABILITY.PORTGROUPAGGREGATIONDEF': "capability.PortGroupAggregationDef",
'CAPABILITY.PSUDESCRIPTOR': "capability.PsuDescriptor",
'CAPABILITY.PSUMANUFACTURINGDEF': "capability.PsuManufacturingDef",
'CAPABILITY.SERVERMODELSCAPABILITYDEF': "capability.ServerModelsCapabilityDef",
'CAPABILITY.SERVERSCHEMADESCRIPTOR': "capability.ServerSchemaDescriptor",
'CAPABILITY.SIOCMODULECAPABILITYDEF': "capability.SiocModuleCapabilityDef",
'CAPABILITY.SIOCMODULEDESCRIPTOR': "capability.SiocModuleDescriptor",
'CAPABILITY.SIOCMODULEMANUFACTURINGDEF': "capability.SiocModuleManufacturingDef",
'CAPABILITY.SWITCHCAPABILITY': "capability.SwitchCapability",
'CAPABILITY.SWITCHDESCRIPTOR': "capability.SwitchDescriptor",
'CAPABILITY.SWITCHMANUFACTURINGDEF': "capability.SwitchManufacturingDef",
'CERTIFICATEMANAGEMENT.POLICY': "certificatemanagement.Policy",
'CHASSIS.CONFIGCHANGEDETAIL': "chassis.ConfigChangeDetail",
'CHASSIS.CONFIGIMPORT': "chassis.ConfigImport",
'CHASSIS.CONFIGRESULT': "chassis.ConfigResult",
'CHASSIS.CONFIGRESULTENTRY': "chassis.ConfigResultEntry",
'CHASSIS.IOMPROFILE': "chassis.IomProfile",
'CHASSIS.PROFILE': "chassis.Profile",
'CLOUD.AWSBILLINGUNIT': "cloud.AwsBillingUnit",
'CLOUD.AWSKEYPAIR': "cloud.AwsKeyPair",
'CLOUD.AWSNETWORKINTERFACE': "cloud.AwsNetworkInterface",
'CLOUD.AWSORGANIZATIONALUNIT': "cloud.AwsOrganizationalUnit",
'CLOUD.AWSSECURITYGROUP': "cloud.AwsSecurityGroup",
'CLOUD.AWSSUBNET': "cloud.AwsSubnet",
'CLOUD.AWSVIRTUALMACHINE': "cloud.AwsVirtualMachine",
'CLOUD.AWSVOLUME': "cloud.AwsVolume",
'CLOUD.AWSVPC': "cloud.AwsVpc",
'CLOUD.COLLECTINVENTORY': "cloud.CollectInventory",
'CLOUD.REGIONS': "cloud.Regions",
'CLOUD.SKUCONTAINERTYPE': "cloud.SkuContainerType",
'CLOUD.SKUDATABASETYPE': "cloud.SkuDatabaseType",
'CLOUD.SKUINSTANCETYPE': "cloud.SkuInstanceType",
'CLOUD.SKUNETWORKTYPE': "cloud.SkuNetworkType",
'CLOUD.SKUREGIONRATECARDS': "cloud.SkuRegionRateCards",
'CLOUD.SKUVOLUMETYPE': "cloud.SkuVolumeType",
'CLOUD.TFCAGENTPOOL': "cloud.TfcAgentpool",
'CLOUD.TFCORGANIZATION': "cloud.TfcOrganization",
'CLOUD.TFCWORKSPACE': "cloud.TfcWorkspace",
'COMM.HTTPPROXYPOLICY': "comm.HttpProxyPolicy",
'COMPUTE.BIOSPOSTPOLICY': "compute.BiosPostPolicy",
'COMPUTE.BLADE': "compute.Blade",
'COMPUTE.BLADEIDENTITY': "compute.BladeIdentity",
'COMPUTE.BOARD': "compute.Board",
'COMPUTE.MAPPING': "compute.Mapping",
'COMPUTE.PHYSICALSUMMARY': "compute.PhysicalSummary",
'COMPUTE.RACKUNIT': "compute.RackUnit",
'COMPUTE.RACKUNITIDENTITY': "compute.RackUnitIdentity",
'COMPUTE.SERVERPOWERPOLICY': "compute.ServerPowerPolicy",
'COMPUTE.SERVERSETTING': "compute.ServerSetting",
'COMPUTE.VMEDIA': "compute.Vmedia",
'COND.ALARM': "cond.Alarm",
'COND.ALARMAGGREGATION': "cond.AlarmAggregation",
'COND.HCLSTATUS': "cond.HclStatus",
'COND.HCLSTATUSDETAIL': "cond.HclStatusDetail",
'COND.HCLSTATUSJOB': "cond.HclStatusJob",
'CONNECTORPACK.CONNECTORPACKUPGRADE': "connectorpack.ConnectorPackUpgrade",
'CONNECTORPACK.UPGRADEIMPACT': "connectorpack.UpgradeImpact",
'CONVERGEDINFRA.HEALTHCHECKDEFINITION': "convergedinfra.HealthCheckDefinition",
'CONVERGEDINFRA.HEALTHCHECKEXECUTION': "convergedinfra.HealthCheckExecution",
'CONVERGEDINFRA.POD': "convergedinfra.Pod",
'CRD.CUSTOMRESOURCE': "crd.CustomResource",
'DEVICECONNECTOR.POLICY': "deviceconnector.Policy",
'EQUIPMENT.CHASSIS': "equipment.Chassis",
'EQUIPMENT.CHASSISIDENTITY': "equipment.ChassisIdentity",
'EQUIPMENT.CHASSISOPERATION': "equipment.ChassisOperation",
'EQUIPMENT.DEVICESUMMARY': "equipment.DeviceSummary",
'EQUIPMENT.EXPANDERMODULE': "equipment.ExpanderModule",
'EQUIPMENT.FAN': "equipment.Fan",
'EQUIPMENT.FANCONTROL': "equipment.FanControl",
'EQUIPMENT.FANMODULE': "equipment.FanModule",
'EQUIPMENT.FEX': "equipment.Fex",
'EQUIPMENT.FEXIDENTITY': "equipment.FexIdentity",
'EQUIPMENT.FEXOPERATION': "equipment.FexOperation",
'EQUIPMENT.FRU': "equipment.Fru",
'EQUIPMENT.IDENTITYSUMMARY': "equipment.IdentitySummary",
'EQUIPMENT.IOCARD': "equipment.IoCard",
'EQUIPMENT.IOCARDOPERATION': "equipment.IoCardOperation",
'EQUIPMENT.IOEXPANDER': "equipment.IoExpander",
'EQUIPMENT.LOCATORLED': "equipment.LocatorLed",
'EQUIPMENT.PSU': "equipment.Psu",
'EQUIPMENT.PSUCONTROL': "equipment.PsuControl",
'EQUIPMENT.RACKENCLOSURE': "equipment.RackEnclosure",
'EQUIPMENT.RACKENCLOSURESLOT': "equipment.RackEnclosureSlot",
'EQUIPMENT.SHAREDIOMODULE': "equipment.SharedIoModule",
'EQUIPMENT.SWITCHCARD': "equipment.SwitchCard",
'EQUIPMENT.SYSTEMIOCONTROLLER': "equipment.SystemIoController",
'EQUIPMENT.TPM': "equipment.Tpm",
'EQUIPMENT.TRANSCEIVER': "equipment.Transceiver",
'ETHER.HOSTPORT': "ether.HostPort",
'ETHER.NETWORKPORT': "ether.NetworkPort",
'ETHER.PHYSICALPORT': "ether.PhysicalPort",
'ETHER.PORTCHANNEL': "ether.PortChannel",
'EXTERNALSITE.AUTHORIZATION': "externalsite.Authorization",
'FABRIC.APPLIANCEPCROLE': "fabric.AppliancePcRole",
'FABRIC.APPLIANCEROLE': "fabric.ApplianceRole",
'FABRIC.CONFIGCHANGEDETAIL': "fabric.ConfigChangeDetail",
'FABRIC.CONFIGRESULT': "fabric.ConfigResult",
'FABRIC.CONFIGRESULTENTRY': "fabric.ConfigResultEntry",
'FABRIC.ELEMENTIDENTITY': "fabric.ElementIdentity",
'FABRIC.ESTIMATEIMPACT': "fabric.EstimateImpact",
'FABRIC.ETHNETWORKCONTROLPOLICY': "fabric.EthNetworkControlPolicy",
'FABRIC.ETHNETWORKGROUPPOLICY': "fabric.EthNetworkGroupPolicy",
'FABRIC.ETHNETWORKPOLICY': "fabric.EthNetworkPolicy",
'FABRIC.FCNETWORKPOLICY': "fabric.FcNetworkPolicy",
'FABRIC.FCSTORAGEROLE': "fabric.FcStorageRole",
'FABRIC.FCUPLINKPCROLE': "fabric.FcUplinkPcRole",
'FABRIC.FCUPLINKROLE': "fabric.FcUplinkRole",
'FABRIC.FCOEUPLINKPCROLE': "fabric.FcoeUplinkPcRole",
'FABRIC.FCOEUPLINKROLE': "fabric.FcoeUplinkRole",
'FABRIC.FLOWCONTROLPOLICY': "fabric.FlowControlPolicy",
'FABRIC.LINKAGGREGATIONPOLICY': "fabric.LinkAggregationPolicy",
'FABRIC.LINKCONTROLPOLICY': "fabric.LinkControlPolicy",
'FABRIC.MULTICASTPOLICY': "fabric.MulticastPolicy",
'FABRIC.PCMEMBER': "fabric.PcMember",
'FABRIC.PCOPERATION': "fabric.PcOperation",
'FABRIC.PORTMODE': "fabric.PortMode",
'FABRIC.PORTOPERATION': "fabric.PortOperation",
'FABRIC.PORTPOLICY': "fabric.PortPolicy",
'FABRIC.SERVERROLE': "fabric.ServerRole",
'FABRIC.SWITCHCLUSTERPROFILE': "fabric.SwitchClusterProfile",
'FABRIC.SWITCHCONTROLPOLICY': "fabric.SwitchControlPolicy",
'FABRIC.SWITCHPROFILE': "fabric.SwitchProfile",
'FABRIC.SYSTEMQOSPOLICY': "fabric.SystemQosPolicy",
'FABRIC.UPLINKPCROLE': "fabric.UplinkPcRole",
'FABRIC.UPLINKROLE': "fabric.UplinkRole",
'FABRIC.VLAN': "fabric.Vlan",
'FABRIC.VSAN': "fabric.Vsan",
'FAULT.INSTANCE': "fault.Instance",
'FC.PHYSICALPORT': "fc.PhysicalPort",
'FC.PORTCHANNEL': "fc.PortChannel",
'FCPOOL.FCBLOCK': "fcpool.FcBlock",
'FCPOOL.LEASE': "fcpool.Lease",
'FCPOOL.POOL': "fcpool.Pool",
'FCPOOL.POOLMEMBER': "fcpool.PoolMember",
'FCPOOL.UNIVERSE': "fcpool.Universe",
'FEEDBACK.FEEDBACKPOST': "feedback.FeedbackPost",
'FIRMWARE.BIOSDESCRIPTOR': "firmware.BiosDescriptor",
'FIRMWARE.BOARDCONTROLLERDESCRIPTOR': "firmware.BoardControllerDescriptor",
'FIRMWARE.CHASSISUPGRADE': "firmware.ChassisUpgrade",
'FIRMWARE.CIMCDESCRIPTOR': "firmware.CimcDescriptor",
'FIRMWARE.DIMMDESCRIPTOR': "firmware.DimmDescriptor",
'FIRMWARE.DISTRIBUTABLE': "firmware.Distributable",
'FIRMWARE.DISTRIBUTABLEMETA': "firmware.DistributableMeta",
'FIRMWARE.DRIVEDESCRIPTOR': "firmware.DriveDescriptor",
'FIRMWARE.DRIVERDISTRIBUTABLE': "firmware.DriverDistributable",
'FIRMWARE.EULA': "firmware.Eula",
'FIRMWARE.FIRMWARESUMMARY': "firmware.FirmwareSummary",
'FIRMWARE.GPUDESCRIPTOR': "firmware.GpuDescriptor",
'FIRMWARE.HBADESCRIPTOR': "firmware.HbaDescriptor",
'FIRMWARE.IOMDESCRIPTOR': "firmware.IomDescriptor",
'FIRMWARE.MSWITCHDESCRIPTOR': "firmware.MswitchDescriptor",
'FIRMWARE.NXOSDESCRIPTOR': "firmware.NxosDescriptor",
'FIRMWARE.PCIEDESCRIPTOR': "firmware.PcieDescriptor",
'FIRMWARE.PSUDESCRIPTOR': "firmware.PsuDescriptor",
'FIRMWARE.RUNNINGFIRMWARE': "firmware.RunningFirmware",
'FIRMWARE.SASEXPANDERDESCRIPTOR': "firmware.SasExpanderDescriptor",
'FIRMWARE.SERVERCONFIGURATIONUTILITYDISTRIBUTABLE': "firmware.ServerConfigurationUtilityDistributable",
'FIRMWARE.STORAGECONTROLLERDESCRIPTOR': "firmware.StorageControllerDescriptor",
'FIRMWARE.SWITCHUPGRADE': "firmware.SwitchUpgrade",
'FIRMWARE.UNSUPPORTEDVERSIONUPGRADE': "firmware.UnsupportedVersionUpgrade",
'FIRMWARE.UPGRADE': "firmware.Upgrade",
'FIRMWARE.UPGRADEIMPACT': "firmware.UpgradeImpact",
'FIRMWARE.UPGRADEIMPACTSTATUS': "firmware.UpgradeImpactStatus",
'FIRMWARE.UPGRADESTATUS': "firmware.UpgradeStatus",
'FORECAST.CATALOG': "forecast.Catalog",
'FORECAST.DEFINITION': "forecast.Definition",
'FORECAST.INSTANCE': "forecast.Instance",
'GRAPHICS.CARD': "graphics.Card",
'GRAPHICS.CONTROLLER': "graphics.Controller",
'HCL.COMPATIBILITYSTATUS': "hcl.CompatibilityStatus",
'HCL.DRIVERIMAGE': "hcl.DriverImage",
'HCL.EXEMPTEDCATALOG': "hcl.ExemptedCatalog",
'HCL.HYPERFLEXSOFTWARECOMPATIBILITYINFO': "hcl.HyperflexSoftwareCompatibilityInfo",
'HCL.OPERATINGSYSTEM': "hcl.OperatingSystem",
'HCL.OPERATINGSYSTEMVENDOR': "hcl.OperatingSystemVendor",
'HCL.SUPPORTEDDRIVERNAME': "hcl.SupportedDriverName",
'HYPERFLEX.ALARM': "hyperflex.Alarm",
'HYPERFLEX.APPCATALOG': "hyperflex.AppCatalog",
'HYPERFLEX.AUTOSUPPORTPOLICY': "hyperflex.AutoSupportPolicy",
'HYPERFLEX.BACKUPCLUSTER': "hyperflex.BackupCluster",
'HYPERFLEX.CAPABILITYINFO': "hyperflex.CapabilityInfo",
'HYPERFLEX.CLUSTER': "hyperflex.Cluster",
'HYPERFLEX.CLUSTERBACKUPPOLICY': "hyperflex.ClusterBackupPolicy",
'HYPERFLEX.CLUSTERBACKUPPOLICYDEPLOYMENT': "hyperflex.ClusterBackupPolicyDeployment",
'HYPERFLEX.CLUSTERBACKUPPOLICYINVENTORY': "hyperflex.ClusterBackupPolicyInventory",
'HYPERFLEX.CLUSTERHEALTHCHECKEXECUTIONSNAPSHOT': "hyperflex.ClusterHealthCheckExecutionSnapshot",
'HYPERFLEX.CLUSTERNETWORKPOLICY': "hyperflex.ClusterNetworkPolicy",
'HYPERFLEX.CLUSTERPROFILE': "hyperflex.ClusterProfile",
'HYPERFLEX.CLUSTERREPLICATIONNETWORKPOLICY': "hyperflex.ClusterReplicationNetworkPolicy",
'HYPERFLEX.CLUSTERREPLICATIONNETWORKPOLICYDEPLOYMENT': "hyperflex.ClusterReplicationNetworkPolicyDeployment",
'HYPERFLEX.CLUSTERSTORAGEPOLICY': "hyperflex.ClusterStoragePolicy",
'HYPERFLEX.CONFIGRESULT': "hyperflex.ConfigResult",
'HYPERFLEX.CONFIGRESULTENTRY': "hyperflex.ConfigResultEntry",
'HYPERFLEX.DATAPROTECTIONPEER': "hyperflex.DataProtectionPeer",
'HYPERFLEX.DATASTORESTATISTIC': "hyperflex.DatastoreStatistic",
'HYPERFLEX.DEVICEPACKAGEDOWNLOADSTATE': "hyperflex.DevicePackageDownloadState",
'HYPERFLEX.DRIVE': "hyperflex.Drive",
'HYPERFLEX.EXTFCSTORAGEPOLICY': "hyperflex.ExtFcStoragePolicy",
'HYPERFLEX.EXTISCSISTORAGEPOLICY': "hyperflex.ExtIscsiStoragePolicy",
'HYPERFLEX.FEATURELIMITEXTERNAL': "hyperflex.FeatureLimitExternal",
'HYPERFLEX.FEATURELIMITINTERNAL': "hyperflex.FeatureLimitInternal",
'HYPERFLEX.HEALTH': "hyperflex.Health",
'HYPERFLEX.HEALTHCHECKDEFINITION': "hyperflex.HealthCheckDefinition",
'HYPERFLEX.HEALTHCHECKEXECUTION': "hyperflex.HealthCheckExecution",
'HYPERFLEX.HEALTHCHECKEXECUTIONSNAPSHOT': "hyperflex.HealthCheckExecutionSnapshot",
'HYPERFLEX.HEALTHCHECKPACKAGECHECKSUM': "hyperflex.HealthCheckPackageChecksum",
'HYPERFLEX.HXDPVERSION': "hyperflex.HxdpVersion",
'HYPERFLEX.LICENSE': "hyperflex.License",
'HYPERFLEX.LOCALCREDENTIALPOLICY': "hyperflex.LocalCredentialPolicy",
'HYPERFLEX.NODE': "hyperflex.Node",
'HYPERFLEX.NODECONFIGPOLICY': "hyperflex.NodeConfigPolicy",
'HYPERFLEX.NODEPROFILE': "hyperflex.NodeProfile",
'HYPERFLEX.PROTECTEDCLUSTER': "hyperflex.ProtectedCluster",
'HYPERFLEX.PROXYSETTINGPOLICY': "hyperflex.ProxySettingPolicy",
'HYPERFLEX.SERVERFIRMWAREVERSION': "hyperflex.ServerFirmwareVersion",
'HYPERFLEX.SERVERFIRMWAREVERSIONENTRY': "hyperflex.ServerFirmwareVersionEntry",
'HYPERFLEX.SERVERMODEL': "hyperflex.ServerModel",
'HYPERFLEX.SERVICEAUTHTOKEN': "hyperflex.ServiceAuthToken",
'HYPERFLEX.SOFTWAREDISTRIBUTIONCOMPONENT': "hyperflex.SoftwareDistributionComponent",
'HYPERFLEX.SOFTWAREDISTRIBUTIONENTRY': "hyperflex.SoftwareDistributionEntry",
'HYPERFLEX.SOFTWAREDISTRIBUTIONVERSION': "hyperflex.SoftwareDistributionVersion",
'HYPERFLEX.SOFTWAREVERSIONPOLICY': "hyperflex.SoftwareVersionPolicy",
'HYPERFLEX.STORAGECONTAINER': "hyperflex.StorageContainer",
'HYPERFLEX.SYSCONFIGPOLICY': "hyperflex.SysConfigPolicy",
'HYPERFLEX.UCSMCONFIGPOLICY': "hyperflex.UcsmConfigPolicy",
'HYPERFLEX.VCENTERCONFIGPOLICY': "hyperflex.VcenterConfigPolicy",
'HYPERFLEX.VMBACKUPINFO': "hyperflex.VmBackupInfo",
'HYPERFLEX.VMIMPORTOPERATION': "hyperflex.VmImportOperation",
'HYPERFLEX.VMRESTOREOPERATION': "hyperflex.VmRestoreOperation",
'HYPERFLEX.VMSNAPSHOTINFO': "hyperflex.VmSnapshotInfo",
'HYPERFLEX.VOLUME': "hyperflex.Volume",
'HYPERFLEX.WITNESSCONFIGURATION': "hyperflex.WitnessConfiguration",
'IAAS.CONNECTORPACK': "iaas.ConnectorPack",
'IAAS.DEVICESTATUS': "iaas.DeviceStatus",
'IAAS.DIAGNOSTICMESSAGES': "iaas.DiagnosticMessages",
'IAAS.LICENSEINFO': "iaas.LicenseInfo",
'IAAS.MOSTRUNTASKS': "iaas.MostRunTasks",
'IAAS.SERVICEREQUEST': "iaas.ServiceRequest",
'IAAS.UCSDINFO': "iaas.UcsdInfo",
'IAAS.UCSDMANAGEDINFRA': "iaas.UcsdManagedInfra",
'IAAS.UCSDMESSAGES': "iaas.UcsdMessages",
'IAM.ACCOUNT': "iam.Account",
'IAM.ACCOUNTEXPERIENCE': "iam.AccountExperience",
'IAM.APIKEY': "iam.ApiKey",
'IAM.APPREGISTRATION': "iam.AppRegistration",
'IAM.BANNERMESSAGE': "iam.BannerMessage",
'IAM.CERTIFICATE': "iam.Certificate",
'IAM.CERTIFICATEREQUEST': "iam.CertificateRequest",
'IAM.DOMAINGROUP': "iam.DomainGroup",
'IAM.ENDPOINTPRIVILEGE': "iam.EndPointPrivilege",
'IAM.ENDPOINTROLE': "iam.EndPointRole",
'IAM.ENDPOINTUSER': "iam.EndPointUser",
'IAM.ENDPOINTUSERPOLICY': "iam.EndPointUserPolicy",
'IAM.ENDPOINTUSERROLE': "iam.EndPointUserRole",
'IAM.IDP': "iam.Idp",
'IAM.IDPREFERENCE': "iam.IdpReference",
'IAM.IPACCESSMANAGEMENT': "iam.IpAccessManagement",
'IAM.IPADDRESS': "iam.IpAddress",
'IAM.LDAPGROUP': "iam.LdapGroup",
'IAM.LDAPPOLICY': "iam.LdapPolicy",
'IAM.LDAPPROVIDER': "iam.LdapProvider",
'IAM.LOCALUSERPASSWORD': "iam.LocalUserPassword",
'IAM.LOCALUSERPASSWORDPOLICY': "iam.LocalUserPasswordPolicy",
'IAM.OAUTHTOKEN': "iam.OAuthToken",
'IAM.PERMISSION': "iam.Permission",
'IAM.PRIVATEKEYSPEC': "iam.PrivateKeySpec",
'IAM.PRIVILEGE': "iam.Privilege",
'IAM.PRIVILEGESET': "iam.PrivilegeSet",
'IAM.QUALIFIER': "iam.Qualifier",
'IAM.RESOURCELIMITS': "iam.ResourceLimits",
'IAM.RESOURCEPERMISSION': "iam.ResourcePermission",
'IAM.RESOURCEROLES': "iam.ResourceRoles",
'IAM.ROLE': "iam.Role",
'IAM.SECURITYHOLDER': "iam.SecurityHolder",
'IAM.SERVICEPROVIDER': "iam.ServiceProvider",
'IAM.SESSION': "iam.Session",
'IAM.SESSIONLIMITS': "iam.SessionLimits",
'IAM.SYSTEM': "iam.System",
'IAM.TRUSTPOINT': "iam.TrustPoint",
'IAM.USER': "iam.User",
'IAM.USERGROUP': "iam.UserGroup",
'IAM.USERPREFERENCE': "iam.UserPreference",
'INVENTORY.DEVICEINFO': "inventory.DeviceInfo",
'INVENTORY.DNMOBINDING': "inventory.DnMoBinding",
'INVENTORY.GENERICINVENTORY': "inventory.GenericInventory",
'INVENTORY.GENERICINVENTORYHOLDER': "inventory.GenericInventoryHolder",
'INVENTORY.REQUEST': "inventory.Request",
'IPMIOVERLAN.POLICY': "ipmioverlan.Policy",
'IPPOOL.BLOCKLEASE': "ippool.BlockLease",
'IPPOOL.IPLEASE': "ippool.IpLease",
'IPPOOL.POOL': "ippool.Pool",
'IPPOOL.POOLMEMBER': "ippool.PoolMember",
'IPPOOL.SHADOWBLOCK': "ippool.ShadowBlock",
'IPPOOL.SHADOWPOOL': "ippool.ShadowPool",
'IPPOOL.UNIVERSE': "ippool.Universe",
'IQNPOOL.BLOCK': "iqnpool.Block",
'IQNPOOL.LEASE': "iqnpool.Lease",
'IQNPOOL.POOL': "iqnpool.Pool",
'IQNPOOL.POOLMEMBER': "iqnpool.PoolMember",
'IQNPOOL.UNIVERSE': "iqnpool.Universe",
'IWOTENANT.TENANTSTATUS': "iwotenant.TenantStatus",
'KUBERNETES.ACICNIAPIC': "kubernetes.AciCniApic",
'KUBERNETES.ACICNIPROFILE': "kubernetes.AciCniProfile",
'KUBERNETES.ACICNITENANTCLUSTERALLOCATION': "kubernetes.AciCniTenantClusterAllocation",
'KUBERNETES.ADDONDEFINITION': "kubernetes.AddonDefinition",
'KUBERNETES.ADDONPOLICY': "kubernetes.AddonPolicy",
'KUBERNETES.ADDONREPOSITORY': "kubernetes.AddonRepository",
'KUBERNETES.BAREMETALNODEPROFILE': "kubernetes.BaremetalNodeProfile",
'KUBERNETES.CATALOG': "kubernetes.Catalog",
'KUBERNETES.CLUSTER': "kubernetes.Cluster",
'KUBERNETES.CLUSTERADDONPROFILE': "kubernetes.ClusterAddonProfile",
'KUBERNETES.CLUSTERPROFILE': "kubernetes.ClusterProfile",
'KUBERNETES.CONFIGRESULT': "kubernetes.ConfigResult",
'KUBERNETES.CONFIGRESULTENTRY': "kubernetes.ConfigResultEntry",
'KUBERNETES.CONTAINERRUNTIMEPOLICY': "kubernetes.ContainerRuntimePolicy",
'KUBERNETES.DAEMONSET': "kubernetes.DaemonSet",
'KUBERNETES.DEPLOYMENT': "kubernetes.Deployment",
'KUBERNETES.INGRESS': "kubernetes.Ingress",
'KUBERNETES.NETWORKPOLICY': "kubernetes.NetworkPolicy",
'KUBERNETES.NODE': "kubernetes.Node",
'KUBERNETES.NODEGROUPPROFILE': "kubernetes.NodeGroupProfile",
'KUBERNETES.POD': "kubernetes.Pod",
'KUBERNETES.SERVICE': "kubernetes.Service",
'KUBERNETES.STATEFULSET': "kubernetes.StatefulSet",
'KUBERNETES.SYSCONFIGPOLICY': "kubernetes.SysConfigPolicy",
'KUBERNETES.TRUSTEDREGISTRIESPOLICY': "kubernetes.TrustedRegistriesPolicy",
'KUBERNETES.VERSION': "kubernetes.Version",
'KUBERNETES.VERSIONPOLICY': "kubernetes.VersionPolicy",
'KUBERNETES.VIRTUALMACHINEINFRACONFIGPOLICY': "kubernetes.VirtualMachineInfraConfigPolicy",
'KUBERNETES.VIRTUALMACHINEINFRASTRUCTUREPROVIDER': "kubernetes.VirtualMachineInfrastructureProvider",
'KUBERNETES.VIRTUALMACHINEINSTANCETYPE': "kubernetes.VirtualMachineInstanceType",
'KUBERNETES.VIRTUALMACHINENODEPROFILE': "kubernetes.VirtualMachineNodeProfile",
'KVM.POLICY': "kvm.Policy",
'KVM.SESSION': "kvm.Session",
'KVM.TUNNEL': "kvm.Tunnel",
'LICENSE.ACCOUNTLICENSEDATA': "license.AccountLicenseData",
'LICENSE.CUSTOMEROP': "license.CustomerOp",
'LICENSE.IKSCUSTOMEROP': "license.IksCustomerOp",
'LICENSE.IKSLICENSECOUNT': "license.IksLicenseCount",
'LICENSE.IWOCUSTOMEROP': "license.IwoCustomerOp",
'LICENSE.IWOLICENSECOUNT': "license.IwoLicenseCount",
'LICENSE.LICENSEINFO': "license.LicenseInfo",
'LICENSE.LICENSERESERVATIONOP': "license.LicenseReservationOp",
'LICENSE.SMARTLICENSETOKEN': "license.SmartlicenseToken",
'LS.SERVICEPROFILE': "ls.ServiceProfile",
'MACPOOL.IDBLOCK': "macpool.IdBlock",
'MACPOOL.LEASE': "macpool.Lease",
'MACPOOL.POOL': "macpool.Pool",
'MACPOOL.POOLMEMBER': "macpool.PoolMember",
'MACPOOL.UNIVERSE': "macpool.Universe",
'MANAGEMENT.CONTROLLER': "management.Controller",
'MANAGEMENT.ENTITY': "management.Entity",
'MANAGEMENT.INTERFACE': "management.Interface",
'MEMORY.ARRAY': "memory.Array",
'MEMORY.PERSISTENTMEMORYCONFIGRESULT': "memory.PersistentMemoryConfigResult",
'MEMORY.PERSISTENTMEMORYCONFIGURATION': "memory.PersistentMemoryConfiguration",
'MEMORY.PERSISTENTMEMORYNAMESPACE': "memory.PersistentMemoryNamespace",
'MEMORY.PERSISTENTMEMORYNAMESPACECONFIGRESULT': "memory.PersistentMemoryNamespaceConfigResult",
'MEMORY.PERSISTENTMEMORYPOLICY': "memory.PersistentMemoryPolicy",
'MEMORY.PERSISTENTMEMORYREGION': "memory.PersistentMemoryRegion",
'MEMORY.PERSISTENTMEMORYUNIT': "memory.PersistentMemoryUnit",
'MEMORY.UNIT': "memory.Unit",
'META.DEFINITION': "meta.Definition",
'NETWORK.ELEMENT': "network.Element",
'NETWORK.ELEMENTSUMMARY': "network.ElementSummary",
'NETWORK.FCZONEINFO': "network.FcZoneInfo",
'NETWORK.VLANPORTINFO': "network.VlanPortInfo",
'NETWORKCONFIG.POLICY': "networkconfig.Policy",
'NIAAPI.APICCCOPOST': "niaapi.ApicCcoPost",
'NIAAPI.APICFIELDNOTICE': "niaapi.ApicFieldNotice",
'NIAAPI.APICHWEOL': "niaapi.ApicHweol",
'NIAAPI.APICLATESTMAINTAINEDRELEASE': "niaapi.ApicLatestMaintainedRelease",
'NIAAPI.APICRELEASERECOMMEND': "niaapi.ApicReleaseRecommend",
'NIAAPI.APICSWEOL': "niaapi.ApicSweol",
'NIAAPI.DCNMCCOPOST': "niaapi.DcnmCcoPost",
'NIAAPI.DCNMFIELDNOTICE': "niaapi.DcnmFieldNotice",
'NIAAPI.DCNMHWEOL': "niaapi.DcnmHweol",
'NIAAPI.DCNMLATESTMAINTAINEDRELEASE': "niaapi.DcnmLatestMaintainedRelease",
'NIAAPI.DCNMRELEASERECOMMEND': "niaapi.DcnmReleaseRecommend",
'NIAAPI.DCNMSWEOL': "niaapi.DcnmSweol",
'NIAAPI.FILEDOWNLOADER': "niaapi.FileDownloader",
'NIAAPI.NIAMETADATA': "niaapi.NiaMetadata",
'NIAAPI.NIBFILEDOWNLOADER': "niaapi.NibFileDownloader",
'NIAAPI.NIBMETADATA': "niaapi.NibMetadata",
'NIAAPI.VERSIONREGEX': "niaapi.VersionRegex",
'NIATELEMETRY.AAALDAPPROVIDERDETAILS': "niatelemetry.AaaLdapProviderDetails",
'NIATELEMETRY.AAARADIUSPROVIDERDETAILS': "niatelemetry.AaaRadiusProviderDetails",
'NIATELEMETRY.AAATACACSPROVIDERDETAILS': "niatelemetry.AaaTacacsProviderDetails",
'NIATELEMETRY.APICAPPPLUGINDETAILS': "niatelemetry.ApicAppPluginDetails",
'NIATELEMETRY.APICCOREFILEDETAILS': "niatelemetry.ApicCoreFileDetails",
'NIATELEMETRY.APICDBGEXPRSEXPORTDEST': "niatelemetry.ApicDbgexpRsExportDest",
'NIATELEMETRY.APICDBGEXPRSTSSCHEDULER': "niatelemetry.ApicDbgexpRsTsScheduler",
'NIATELEMETRY.APICFANDETAILS': "niatelemetry.ApicFanDetails",
'NIATELEMETRY.APICFEXDETAILS': "niatelemetry.ApicFexDetails",
'NIATELEMETRY.APICFLASHDETAILS': "niatelemetry.ApicFlashDetails",
'NIATELEMETRY.APICNTPAUTH': "niatelemetry.ApicNtpAuth",
'NIATELEMETRY.APICPSUDETAILS': "niatelemetry.ApicPsuDetails",
'NIATELEMETRY.APICREALMDETAILS': "niatelemetry.ApicRealmDetails",
'NIATELEMETRY.APICSNMPCLIENTGRPDETAILS': "niatelemetry.ApicSnmpClientGrpDetails",
'NIATELEMETRY.APICSNMPCOMMUNITYACCESSDETAILS': "niatelemetry.ApicSnmpCommunityAccessDetails",
'NIATELEMETRY.APICSNMPCOMMUNITYDETAILS': "niatelemetry.ApicSnmpCommunityDetails",
'NIATELEMETRY.APICSNMPTRAPDETAILS': "niatelemetry.ApicSnmpTrapDetails",
'NIATELEMETRY.APICSNMPTRAPFWDSERVERDETAILS': "niatelemetry.ApicSnmpTrapFwdServerDetails",
'NIATELEMETRY.APICSNMPVERSIONTHREEDETAILS': "niatelemetry.ApicSnmpVersionThreeDetails",
'NIATELEMETRY.APICSYSLOGGRP': "niatelemetry.ApicSysLogGrp",
'NIATELEMETRY.APICSYSLOGSRC': "niatelemetry.ApicSysLogSrc",
'NIATELEMETRY.APICTRANSCEIVERDETAILS': "niatelemetry.ApicTransceiverDetails",
'NIATELEMETRY.APICUIPAGECOUNTS': "niatelemetry.ApicUiPageCounts",
'NIATELEMETRY.APPDETAILS': "niatelemetry.AppDetails",
'NIATELEMETRY.COMMONPOLICIES': "niatelemetry.CommonPolicies",
'NIATELEMETRY.DCNMFANDETAILS': "niatelemetry.DcnmFanDetails",
'NIATELEMETRY.DCNMFEXDETAILS': "niatelemetry.DcnmFexDetails",
'NIATELEMETRY.DCNMMODULEDETAILS': "niatelemetry.DcnmModuleDetails",
'NIATELEMETRY.DCNMPSUDETAILS': "niatelemetry.DcnmPsuDetails",
'NIATELEMETRY.DCNMTRANSCEIVERDETAILS': "niatelemetry.DcnmTransceiverDetails",
'NIATELEMETRY.EPG': "niatelemetry.Epg",
'NIATELEMETRY.FABRICMODULEDETAILS': "niatelemetry.FabricModuleDetails",
'NIATELEMETRY.FABRICPODPROFILE': "niatelemetry.FabricPodProfile",
'NIATELEMETRY.FABRICPODSS': "niatelemetry.FabricPodSs",
'NIATELEMETRY.FAULT': "niatelemetry.Fault",
'NIATELEMETRY.HTTPSACLCONTRACTDETAILS': "niatelemetry.HttpsAclContractDetails",
'NIATELEMETRY.HTTPSACLCONTRACTFILTERMAP': "niatelemetry.HttpsAclContractFilterMap",
'NIATELEMETRY.HTTPSACLEPGCONTRACTMAP': "niatelemetry.HttpsAclEpgContractMap",
'NIATELEMETRY.HTTPSACLEPGDETAILS': "niatelemetry.HttpsAclEpgDetails",
'NIATELEMETRY.HTTPSACLFILTERDETAILS': "niatelemetry.HttpsAclFilterDetails",
'NIATELEMETRY.LC': "niatelemetry.Lc",
'NIATELEMETRY.MSOCONTRACTDETAILS': "niatelemetry.MsoContractDetails",
'NIATELEMETRY.MSOEPGDETAILS': "niatelemetry.MsoEpgDetails",
'NIATELEMETRY.MSOSCHEMADETAILS': "niatelemetry.MsoSchemaDetails",
'NIATELEMETRY.MSOSITEDETAILS': "niatelemetry.MsoSiteDetails",
'NIATELEMETRY.MSOTENANTDETAILS': "niatelemetry.MsoTenantDetails",
'NIATELEMETRY.NEXUSDASHBOARDCONTROLLERDETAILS': "niatelemetry.NexusDashboardControllerDetails",
'NIATELEMETRY.NEXUSDASHBOARDDETAILS': "niatelemetry.NexusDashboardDetails",
'NIATELEMETRY.NEXUSDASHBOARDMEMORYDETAILS': "niatelemetry.NexusDashboardMemoryDetails",
'NIATELEMETRY.NEXUSDASHBOARDS': "niatelemetry.NexusDashboards",
'NIATELEMETRY.NIAFEATUREUSAGE': "niatelemetry.NiaFeatureUsage",
'NIATELEMETRY.NIAINVENTORY': "niatelemetry.NiaInventory",
'NIATELEMETRY.NIAINVENTORYDCNM': "niatelemetry.NiaInventoryDcnm",
'NIATELEMETRY.NIAINVENTORYFABRIC': "niatelemetry.NiaInventoryFabric",
'NIATELEMETRY.NIALICENSESTATE': "niatelemetry.NiaLicenseState",
'NIATELEMETRY.PASSWORDSTRENGTHCHECK': "niatelemetry.PasswordStrengthCheck",
'NIATELEMETRY.PODCOMMPOLICIES': "niatelemetry.PodCommPolicies",
'NIATELEMETRY.PODSNMPPOLICIES': "niatelemetry.PodSnmpPolicies",
'NIATELEMETRY.PODTIMESERVERPOLICIES': "niatelemetry.PodTimeServerPolicies",
'NIATELEMETRY.SITEINVENTORY': "niatelemetry.SiteInventory",
'NIATELEMETRY.SNMPSRC': "niatelemetry.SnmpSrc",
'NIATELEMETRY.SSHVERSIONTWO': "niatelemetry.SshVersionTwo",
'NIATELEMETRY.SUPERVISORMODULEDETAILS': "niatelemetry.SupervisorModuleDetails",
'NIATELEMETRY.SYSLOGREMOTEDEST': "niatelemetry.SyslogRemoteDest",
'NIATELEMETRY.SYSLOGSYSMSG': "niatelemetry.SyslogSysMsg",
'NIATELEMETRY.SYSLOGSYSMSGFACFILTER': "niatelemetry.SyslogSysMsgFacFilter",
'NIATELEMETRY.SYSTEMCONTROLLERDETAILS': "niatelemetry.SystemControllerDetails",
'NIATELEMETRY.TENANT': "niatelemetry.Tenant",
'NOTIFICATION.ACCOUNTSUBSCRIPTION': "notification.AccountSubscription",
'NTP.POLICY': "ntp.Policy",
'OAUTH.ACCESSTOKEN': "oauth.AccessToken",
'OAUTH.AUTHORIZATION': "oauth.Authorization",
'OPRS.DEPLOYMENT': "oprs.Deployment",
'OPRS.SYNCTARGETLISTMESSAGE': "oprs.SyncTargetListMessage",
'ORGANIZATION.ORGANIZATION': "organization.Organization",
'OS.BULKINSTALLINFO': "os.BulkInstallInfo",
'OS.CATALOG': "os.Catalog",
'OS.CONFIGURATIONFILE': "os.ConfigurationFile",
'OS.DISTRIBUTION': "os.Distribution",
'OS.INSTALL': "os.Install",
'OS.OSSUPPORT': "os.OsSupport",
'OS.SUPPORTEDVERSION': "os.SupportedVersion",
'OS.TEMPLATEFILE': "os.TemplateFile",
'OS.VALIDINSTALLTARGET': "os.ValidInstallTarget",
'PCI.COPROCESSORCARD': "pci.CoprocessorCard",
'PCI.DEVICE': "pci.Device",
'PCI.LINK': "pci.Link",
'PCI.SWITCH': "pci.Switch",
'PORT.GROUP': "port.Group",
'PORT.MACBINDING': "port.MacBinding",
'PORT.SUBGROUP': "port.SubGroup",
'POWER.CONTROLSTATE': "power.ControlState",
'POWER.POLICY': "power.Policy",
'PROCESSOR.UNIT': "processor.Unit",
'RACK.UNITPERSONALITY': "rack.UnitPersonality",
'RECOMMENDATION.CAPACITYRUNWAY': "recommendation.CapacityRunway",
'RECOMMENDATION.PHYSICALITEM': "recommendation.PhysicalItem",
'RECOVERY.BACKUPCONFIGPOLICY': "recovery.BackupConfigPolicy",
'RECOVERY.BACKUPPROFILE': "recovery.BackupProfile",
'RECOVERY.CONFIGRESULT': "recovery.ConfigResult",
'RECOVERY.CONFIGRESULTENTRY': "recovery.ConfigResultEntry",
'RECOVERY.ONDEMANDBACKUP': "recovery.OnDemandBackup",
'RECOVERY.RESTORE': "recovery.Restore",
'RECOVERY.SCHEDULECONFIGPOLICY': "recovery.ScheduleConfigPolicy",
'RESOURCE.GROUP': "resource.Group",
'RESOURCE.GROUPMEMBER': "resource.GroupMember",
'RESOURCE.LICENSERESOURCECOUNT': "resource.LicenseResourceCount",
'RESOURCE.MEMBERSHIP': "resource.Membership",
'RESOURCE.MEMBERSHIPHOLDER': "resource.MembershipHolder",
'RESOURCE.RESERVATION': "resource.Reservation",
'RESOURCEPOOL.LEASE': "resourcepool.Lease",
'RESOURCEPOOL.LEASERESOURCE': "resourcepool.LeaseResource",
'RESOURCEPOOL.POOL': "resourcepool.Pool",
'RESOURCEPOOL.POOLMEMBER': "resourcepool.PoolMember",
'RESOURCEPOOL.UNIVERSE': "resourcepool.Universe",
'RPROXY.REVERSEPROXY': "rproxy.ReverseProxy",
'SDCARD.POLICY': "sdcard.Policy",
'SDWAN.PROFILE': "sdwan.Profile",
'SDWAN.ROUTERNODE': "sdwan.RouterNode",
'SDWAN.ROUTERPOLICY': "sdwan.RouterPolicy",
'SDWAN.VMANAGEACCOUNTPOLICY': "sdwan.VmanageAccountPolicy",
'SEARCH.SEARCHITEM': "search.SearchItem",
'SEARCH.TAGITEM': "search.TagItem",
'SECURITY.UNIT': "security.Unit",
'SERVER.CONFIGCHANGEDETAIL': "server.ConfigChangeDetail",
'SERVER.CONFIGIMPORT': "server.ConfigImport",
'SERVER.CONFIGRESULT': "server.ConfigResult",
'SERVER.CONFIGRESULTENTRY': "server.ConfigResultEntry",
'SERVER.PROFILE': "server.Profile",
'SERVER.PROFILETEMPLATE': "server.ProfileTemplate",
'SMTP.POLICY': "smtp.Policy",
'SNMP.POLICY': "snmp.Policy",
'SOFTWARE.APPLIANCEDISTRIBUTABLE': "software.ApplianceDistributable",
'SOFTWARE.DOWNLOADHISTORY': "software.DownloadHistory",
'SOFTWARE.HCLMETA': "software.HclMeta",
'SOFTWARE.HYPERFLEXBUNDLEDISTRIBUTABLE': "software.HyperflexBundleDistributable",
'SOFTWARE.HYPERFLEXDISTRIBUTABLE': "software.HyperflexDistributable",
'SOFTWARE.RELEASEMETA': "software.ReleaseMeta",
'SOFTWARE.SOLUTIONDISTRIBUTABLE': "software.SolutionDistributable",
'SOFTWARE.UCSDBUNDLEDISTRIBUTABLE': "software.UcsdBundleDistributable",
'SOFTWARE.UCSDDISTRIBUTABLE': "software.UcsdDistributable",
'SOFTWAREREPOSITORY.AUTHORIZATION': "softwarerepository.Authorization",
'SOFTWAREREPOSITORY.CACHEDIMAGE': "softwarerepository.CachedImage",
'SOFTWAREREPOSITORY.CATALOG': "softwarerepository.Catalog",
'SOFTWAREREPOSITORY.CATEGORYMAPPER': "softwarerepository.CategoryMapper",
'SOFTWAREREPOSITORY.CATEGORYMAPPERMODEL': "softwarerepository.CategoryMapperModel",
'SOFTWAREREPOSITORY.CATEGORYSUPPORTCONSTRAINT': "softwarerepository.CategorySupportConstraint",
'SOFTWAREREPOSITORY.DOWNLOADSPEC': "softwarerepository.DownloadSpec",
'SOFTWAREREPOSITORY.OPERATINGSYSTEMFILE': "softwarerepository.OperatingSystemFile",
'SOFTWAREREPOSITORY.RELEASE': "softwarerepository.Release",
'SOL.POLICY': "sol.Policy",
'SSH.POLICY': "ssh.Policy",
'STORAGE.CONTROLLER': "storage.Controller",
'STORAGE.DISKGROUP': "storage.DiskGroup",
'STORAGE.DISKSLOT': "storage.DiskSlot",
'STORAGE.DRIVEGROUP': "storage.DriveGroup",
'STORAGE.ENCLOSURE': "storage.Enclosure",
'STORAGE.ENCLOSUREDISK': "storage.EnclosureDisk",
'STORAGE.ENCLOSUREDISKSLOTEP': "storage.EnclosureDiskSlotEp",
'STORAGE.FLEXFLASHCONTROLLER': "storage.FlexFlashController",
'STORAGE.FLEXFLASHCONTROLLERPROPS': "storage.FlexFlashControllerProps",
'STORAGE.FLEXFLASHPHYSICALDRIVE': "storage.FlexFlashPhysicalDrive",
'STORAGE.FLEXFLASHVIRTUALDRIVE': "storage.FlexFlashVirtualDrive",
'STORAGE.FLEXUTILCONTROLLER': "storage.FlexUtilController",
'STORAGE.FLEXUTILPHYSICALDRIVE': "storage.FlexUtilPhysicalDrive",
'STORAGE.FLEXUTILVIRTUALDRIVE': "storage.FlexUtilVirtualDrive",
'STORAGE.HITACHIARRAY': "storage.HitachiArray",
'STORAGE.HITACHICONTROLLER': "storage.HitachiController",
'STORAGE.HITACHIDISK': "storage.HitachiDisk",
'STORAGE.HITACHIHOST': "storage.HitachiHost",
'STORAGE.HITACHIHOSTLUN': "storage.HitachiHostLun",
'STORAGE.HITACHIPARITYGROUP': "storage.HitachiParityGroup",
'STORAGE.HITACHIPOOL': "storage.HitachiPool",
'STORAGE.HITACHIPORT': "storage.HitachiPort",
'STORAGE.HITACHIVOLUME': "storage.HitachiVolume",
'STORAGE.HYPERFLEXSTORAGECONTAINER': "storage.HyperFlexStorageContainer",
'STORAGE.HYPERFLEXVOLUME': "storage.HyperFlexVolume",
'STORAGE.ITEM': "storage.Item",
'STORAGE.NETAPPAGGREGATE': "storage.NetAppAggregate",
'STORAGE.NETAPPBASEDISK': "storage.NetAppBaseDisk",
'STORAGE.NETAPPCLUSTER': "storage.NetAppCluster",
'STORAGE.NETAPPETHERNETPORT': "storage.NetAppEthernetPort",
'STORAGE.NETAPPEXPORTPOLICY': "storage.NetAppExportPolicy",
'STORAGE.NETAPPFCINTERFACE': "storage.NetAppFcInterface",
'STORAGE.NETAPPFCPORT': "storage.NetAppFcPort",
'STORAGE.NETAPPINITIATORGROUP': "storage.NetAppInitiatorGroup",
'STORAGE.NETAPPIPINTERFACE': "storage.NetAppIpInterface",
'STORAGE.NETAPPLICENSE': "storage.NetAppLicense",
'STORAGE.NETAPPLUN': "storage.NetAppLun",
'STORAGE.NETAPPLUNMAP': "storage.NetAppLunMap",
'STORAGE.NETAPPNODE': "storage.NetAppNode",
'STORAGE.NETAPPNTPSERVER': "storage.NetAppNtpServer",
'STORAGE.NETAPPSENSOR': "storage.NetAppSensor",
'STORAGE.NETAPPSTORAGEVM': "storage.NetAppStorageVm",
'STORAGE.NETAPPVOLUME': "storage.NetAppVolume",
'STORAGE.NETAPPVOLUMESNAPSHOT': "storage.NetAppVolumeSnapshot",
'STORAGE.PHYSICALDISK': "storage.PhysicalDisk",
'STORAGE.PHYSICALDISKEXTENSION': "storage.PhysicalDiskExtension",
'STORAGE.PHYSICALDISKUSAGE': "storage.PhysicalDiskUsage",
'STORAGE.PUREARRAY': "storage.PureArray",
'STORAGE.PURECONTROLLER': "storage.PureController",
'STORAGE.PUREDISK': "storage.PureDisk",
'STORAGE.PUREHOST': "storage.PureHost",
'STORAGE.PUREHOSTGROUP': "storage.PureHostGroup",
'STORAGE.PUREHOSTLUN': "storage.PureHostLun",
'STORAGE.PUREPORT': "storage.PurePort",
'STORAGE.PUREPROTECTIONGROUP': "storage.PureProtectionGroup",
'STORAGE.PUREPROTECTIONGROUPSNAPSHOT': "storage.PureProtectionGroupSnapshot",
'STORAGE.PUREREPLICATIONSCHEDULE': "storage.PureReplicationSchedule",
'STORAGE.PURESNAPSHOTSCHEDULE': "storage.PureSnapshotSchedule",
'STORAGE.PUREVOLUME': "storage.PureVolume",
'STORAGE.PUREVOLUMESNAPSHOT': "storage.PureVolumeSnapshot",
'STORAGE.SASEXPANDER': "storage.SasExpander",
'STORAGE.SASPORT': "storage.SasPort",
'STORAGE.SPAN': "storage.Span",
'STORAGE.STORAGEPOLICY': "storage.StoragePolicy",
'STORAGE.VDMEMBEREP': "storage.VdMemberEp",
'STORAGE.VIRTUALDRIVE': "storage.VirtualDrive",
'STORAGE.VIRTUALDRIVECONTAINER': "storage.VirtualDriveContainer",
'STORAGE.VIRTUALDRIVEEXTENSION': "storage.VirtualDriveExtension",
'STORAGE.VIRTUALDRIVEIDENTITY': "storage.VirtualDriveIdentity",
'SYSLOG.POLICY': "syslog.Policy",
'TAM.ADVISORYCOUNT': "tam.AdvisoryCount",
'TAM.ADVISORYDEFINITION': "tam.AdvisoryDefinition",
'TAM.ADVISORYINFO': "tam.AdvisoryInfo",
'TAM.ADVISORYINSTANCE': "tam.AdvisoryInstance",
'TAM.SECURITYADVISORY': "tam.SecurityAdvisory",
'TASK.HITACHISCOPEDINVENTORY': "task.HitachiScopedInventory",
'TASK.HYPERFLEXSCOPEDINVENTORY': "task.HyperflexScopedInventory",
'TASK.IWESCOPEDINVENTORY': "task.IweScopedInventory",
'TASK.NETAPPSCOPEDINVENTORY': "task.NetAppScopedInventory",
'TASK.PUBLICCLOUDSCOPEDINVENTORY': "task.PublicCloudScopedInventory",
'TASK.PURESCOPEDINVENTORY': "task.PureScopedInventory",
'TASK.SERVERSCOPEDINVENTORY': "task.ServerScopedInventory",
'TECHSUPPORTMANAGEMENT.COLLECTIONCONTROLPOLICY': "techsupportmanagement.CollectionControlPolicy",
'TECHSUPPORTMANAGEMENT.DOWNLOAD': "techsupportmanagement.Download",
'TECHSUPPORTMANAGEMENT.TECHSUPPORTBUNDLE': "techsupportmanagement.TechSupportBundle",
'TECHSUPPORTMANAGEMENT.TECHSUPPORTSTATUS': "techsupportmanagement.TechSupportStatus",
'TERMINAL.AUDITLOG': "terminal.AuditLog",
'TERRAFORM.EXECUTOR': "terraform.Executor",
'THERMAL.POLICY': "thermal.Policy",
'TOP.SYSTEM': "top.System",
'UCSD.BACKUPINFO': "ucsd.BackupInfo",
'UUIDPOOL.BLOCK': "uuidpool.Block",
'UUIDPOOL.POOL': "uuidpool.Pool",
'UUIDPOOL.POOLMEMBER': "uuidpool.PoolMember",
'UUIDPOOL.UNIVERSE': "uuidpool.Universe",
'UUIDPOOL.UUIDLEASE': "uuidpool.UuidLease",
'VIRTUALIZATION.CISCOHYPERVISORMANAGER': "virtualization.CiscoHypervisorManager",
'VIRTUALIZATION.ESXICONSOLE': "virtualization.EsxiConsole",
'VIRTUALIZATION.HOST': "virtualization.Host",
'VIRTUALIZATION.IWECLUSTER': "virtualization.IweCluster",
'VIRTUALIZATION.IWEDATACENTER': "virtualization.IweDatacenter",
'VIRTUALIZATION.IWEDVUPLINK': "virtualization.IweDvUplink",
'VIRTUALIZATION.IWEDVSWITCH': "virtualization.IweDvswitch",
'VIRTUALIZATION.IWEHOST': "virtualization.IweHost",
'VIRTUALIZATION.IWEHOSTINTERFACE': "virtualization.IweHostInterface",
'VIRTUALIZATION.IWEHOSTVSWITCH': "virtualization.IweHostVswitch",
'VIRTUALIZATION.IWENETWORK': "virtualization.IweNetwork",
'VIRTUALIZATION.IWEVIRTUALDISK': "virtualization.IweVirtualDisk",
'VIRTUALIZATION.IWEVIRTUALMACHINE': "virtualization.IweVirtualMachine",
'VIRTUALIZATION.IWEVIRTUALMACHINENETWORKINTERFACE': "virtualization.IweVirtualMachineNetworkInterface",
'VIRTUALIZATION.VIRTUALDISK': "virtualization.VirtualDisk",
'VIRTUALIZATION.VIRTUALMACHINE': "virtualization.VirtualMachine",
'VIRTUALIZATION.VIRTUALNETWORK': "virtualization.VirtualNetwork",
'VIRTUALIZATION.VMWARECLUSTER': "virtualization.VmwareCluster",
'VIRTUALIZATION.VMWAREDATACENTER': "virtualization.VmwareDatacenter",
'VIRTUALIZATION.VMWAREDATASTORE': "virtualization.VmwareDatastore",
'VIRTUALIZATION.VMWAREDATASTORECLUSTER': "virtualization.VmwareDatastoreCluster",
'VIRTUALIZATION.VMWAREDISTRIBUTEDNETWORK': "virtualization.VmwareDistributedNetwork",
'VIRTUALIZATION.VMWAREDISTRIBUTEDSWITCH': "virtualization.VmwareDistributedSwitch",
'VIRTUALIZATION.VMWAREFOLDER': "virtualization.VmwareFolder",
'VIRTUALIZATION.VMWAREHOST': "virtualization.VmwareHost",
'VIRTUALIZATION.VMWAREKERNELNETWORK': "virtualization.VmwareKernelNetwork",
'VIRTUALIZATION.VMWARENETWORK': "virtualization.VmwareNetwork",
'VIRTUALIZATION.VMWAREPHYSICALNETWORKINTERFACE': "virtualization.VmwarePhysicalNetworkInterface",
'VIRTUALIZATION.VMWAREUPLINKPORT': "virtualization.VmwareUplinkPort",
'VIRTUALIZATION.VMWAREVCENTER': "virtualization.VmwareVcenter",
'VIRTUALIZATION.VMWAREVIRTUALDISK': "virtualization.VmwareVirtualDisk",
'VIRTUALIZATION.VMWAREVIRTUALMACHINE': "virtualization.VmwareVirtualMachine",
'VIRTUALIZATION.VMWAREVIRTUALMACHINESNAPSHOT': "virtualization.VmwareVirtualMachineSnapshot",
'VIRTUALIZATION.VMWAREVIRTUALNETWORKINTERFACE': "virtualization.VmwareVirtualNetworkInterface",
'VIRTUALIZATION.VMWAREVIRTUALSWITCH': "virtualization.VmwareVirtualSwitch",
'VMEDIA.POLICY': "vmedia.Policy",
'VMRC.CONSOLE': "vmrc.Console",
'VNC.CONSOLE': "vnc.Console",
'VNIC.ETHADAPTERPOLICY': "vnic.EthAdapterPolicy",
'VNIC.ETHIF': "vnic.EthIf",
'VNIC.ETHNETWORKPOLICY': "vnic.EthNetworkPolicy",
'VNIC.ETHQOSPOLICY': "vnic.EthQosPolicy",
'VNIC.FCADAPTERPOLICY': "vnic.FcAdapterPolicy",
'VNIC.FCIF': "vnic.FcIf",
'VNIC.FCNETWORKPOLICY': "vnic.FcNetworkPolicy",
'VNIC.FCQOSPOLICY': "vnic.FcQosPolicy",
'VNIC.ISCSIADAPTERPOLICY': "vnic.IscsiAdapterPolicy",
'VNIC.ISCSIBOOTPOLICY': "vnic.IscsiBootPolicy",
'VNIC.ISCSISTATICTARGETPOLICY': "vnic.IscsiStaticTargetPolicy",
'VNIC.LANCONNECTIVITYPOLICY': "vnic.LanConnectivityPolicy",
'VNIC.LCPSTATUS': "vnic.LcpStatus",
'VNIC.SANCONNECTIVITYPOLICY': "vnic.SanConnectivityPolicy",
'VNIC.SCPSTATUS': "vnic.ScpStatus",
'VRF.VRF': "vrf.Vrf",
'WORKFLOW.ANSIBLEBATCHEXECUTOR': "workflow.AnsibleBatchExecutor",
'WORKFLOW.BATCHAPIEXECUTOR': "workflow.BatchApiExecutor",
'WORKFLOW.BUILDTASKMETA': "workflow.BuildTaskMeta",
'WORKFLOW.BUILDTASKMETAOWNER': "workflow.BuildTaskMetaOwner",
'WORKFLOW.CATALOG': "workflow.Catalog",
'WORKFLOW.CUSTOMDATATYPEDEFINITION': "workflow.CustomDataTypeDefinition",
'WORKFLOW.ERRORRESPONSEHANDLER': "workflow.ErrorResponseHandler",
'WORKFLOW.PENDINGDYNAMICWORKFLOWINFO': "workflow.PendingDynamicWorkflowInfo",
'WORKFLOW.ROLLBACKWORKFLOW': "workflow.RollbackWorkflow",
'WORKFLOW.SOLUTIONACTIONDEFINITION': "workflow.SolutionActionDefinition",
'WORKFLOW.SOLUTIONACTIONINSTANCE': "workflow.SolutionActionInstance",
'WORKFLOW.SOLUTIONDEFINITION': "workflow.SolutionDefinition",
'WORKFLOW.SOLUTIONINSTANCE': "workflow.SolutionInstance",
'WORKFLOW.SOLUTIONOUTPUT': "workflow.SolutionOutput",
'WORKFLOW.SSHBATCHEXECUTOR': "workflow.SshBatchExecutor",
'WORKFLOW.TASKDEBUGLOG': "workflow.TaskDebugLog",
'WORKFLOW.TASKDEFINITION': "workflow.TaskDefinition",
'WORKFLOW.TASKINFO': "workflow.TaskInfo",
'WORKFLOW.TASKMETADATA': "workflow.TaskMetadata",
'WORKFLOW.TASKNOTIFICATION': "workflow.TaskNotification",
'WORKFLOW.TEMPLATEEVALUATION': "workflow.TemplateEvaluation",
'WORKFLOW.TEMPLATEFUNCTIONMETA': "workflow.TemplateFunctionMeta",
'WORKFLOW.WORKFLOWDEFINITION': "workflow.WorkflowDefinition",
'WORKFLOW.WORKFLOWINFO': "workflow.WorkflowInfo",
'WORKFLOW.WORKFLOWMETA': "workflow.WorkflowMeta",
'WORKFLOW.WORKFLOWMETADATA': "workflow.WorkflowMetadata",
'WORKFLOW.WORKFLOWNOTIFICATION': "workflow.WorkflowNotification",
},
}
validations = {
('uuid',): {
'regex': {
'pattern': r'^$|^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$', # noqa: E501
},
},
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'class_id': (str,), # noqa: E501
'moid': (str,), # noqa: E501
'selector': (str,), # noqa: E501
'link': (str,), # noqa: E501
'account_moid': (str,), # noqa: E501
'create_time': (datetime,), # noqa: E501
'domain_group_moid': (str,), # noqa: E501
'mod_time': (datetime,), # noqa: E501
'owners': ([str], none_type,), # noqa: E501
'shared_scope': (str,), # noqa: E501
'tags': ([MoTag], none_type,), # noqa: E501
'version_context': (MoVersionContext,), # noqa: E501
'ancestors': ([MoBaseMoRelationship], none_type,), # noqa: E501
'parent': (MoBaseMoRelationship,), # noqa: E501
'permission_resources': ([MoBaseMoRelationship], none_type,), # noqa: E501
'display_names': (DisplayNames,), # noqa: E501
'registered_device': (AssetDeviceRegistrationRelationship,), # noqa: E501
'name': (str,), # noqa: E501
'uuid': (str,), # noqa: E501
'identity': (str,), # noqa: E501
'cluster_count': (int,), # noqa: E501
'datastore_count': (int,), # noqa: E501
'host_count': (int,), # noqa: E501
'inventory_path': (str,), # noqa: E501
'network_count': (int,), # noqa: E501
'vm_count': (int,), # noqa: E501
'vm_template_count': (int,), # noqa: E501
'hypervisor_manager': (VirtualizationVmwareVcenterRelationship,), # noqa: E501
'parent_folder': (VirtualizationVmwareFolderRelationship,), # noqa: E501
'object_type': (str,), # noqa: E501
}
@cached_property
def discriminator():
lazy_import()
val = {
'mo.MoRef': MoMoRef,
'virtualization.VmwareDatacenter': VirtualizationVmwareDatacenter,
}
if not val:
return None
return {'class_id': val}
attribute_map = {
'class_id': 'ClassId', # noqa: E501
'moid': 'Moid', # noqa: E501
'selector': 'Selector', # noqa: E501
'link': 'link', # noqa: E501
'account_moid': 'AccountMoid', # noqa: E501
'create_time': 'CreateTime', # noqa: E501
'domain_group_moid': 'DomainGroupMoid', # noqa: E501
'mod_time': 'ModTime', # noqa: E501
'owners': 'Owners', # noqa: E501
'shared_scope': 'SharedScope', # noqa: E501
'tags': 'Tags', # noqa: E501
'version_context': 'VersionContext', # noqa: E501
'ancestors': 'Ancestors', # noqa: E501
'parent': 'Parent', # noqa: E501
'permission_resources': 'PermissionResources', # noqa: E501
'display_names': 'DisplayNames', # noqa: E501
'registered_device': 'RegisteredDevice', # noqa: E501
'name': 'Name', # noqa: E501
'uuid': 'Uuid', # noqa: E501
'identity': 'Identity', # noqa: E501
'cluster_count': 'ClusterCount', # noqa: E501
'datastore_count': 'DatastoreCount', # noqa: E501
'host_count': 'HostCount', # noqa: E501
'inventory_path': 'InventoryPath', # noqa: E501
'network_count': 'NetworkCount', # noqa: E501
'vm_count': 'VmCount', # noqa: E501
'vm_template_count': 'VmTemplateCount', # noqa: E501
'hypervisor_manager': 'HypervisorManager', # noqa: E501
'parent_folder': 'ParentFolder', # noqa: E501
'object_type': 'ObjectType', # noqa: E501
}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
'_composed_instances',
'_var_name_to_model_instances',
'_additional_properties_model_instances',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""VirtualizationVmwareDatacenterRelationship - a model defined in OpenAPI
Args:
Keyword Args:
class_id (str): The fully-qualified name of the instantiated, concrete type. This property is used as a discriminator to identify the type of the payload when marshaling and unmarshaling data.. defaults to "mo.MoRef", must be one of ["mo.MoRef", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
moid (str): The Moid of the referenced REST resource.. [optional] # noqa: E501
selector (str): An OData $filter expression which describes the REST resource to be referenced. This field may be set instead of 'moid' by clients. 1. If 'moid' is set this field is ignored. 1. If 'selector' is set and 'moid' is empty/absent from the request, Intersight determines the Moid of the resource matching the filter expression and populates it in the MoRef that is part of the object instance being inserted/updated to fulfill the REST request. An error is returned if the filter matches zero or more than one REST resource. An example filter string is: Serial eq '3AA8B7T11'.. [optional] # noqa: E501
link (str): A URL to an instance of the 'mo.MoRef' class.. [optional] # noqa: E501
account_moid (str): The Account ID for this managed object.. [optional] # noqa: E501
create_time (datetime): The time when this managed object was created.. [optional] # noqa: E501
domain_group_moid (str): The DomainGroup ID for this managed object.. [optional] # noqa: E501
mod_time (datetime): The time when this managed object was last modified.. [optional] # noqa: E501
owners ([str], none_type): [optional] # noqa: E501
shared_scope (str): Intersight provides pre-built workflows, tasks and policies to end users through global catalogs. Objects that are made available through global catalogs are said to have a 'shared' ownership. Shared objects are either made globally available to all end users or restricted to end users based on their license entitlement. Users can use this property to differentiate the scope (global or a specific license tier) to which a shared MO belongs.. [optional] # noqa: E501
tags ([MoTag], none_type): [optional] # noqa: E501
version_context (MoVersionContext): [optional] # noqa: E501
ancestors ([MoBaseMoRelationship], none_type): An array of relationships to moBaseMo resources.. [optional] # noqa: E501
parent (MoBaseMoRelationship): [optional] # noqa: E501
permission_resources ([MoBaseMoRelationship], none_type): An array of relationships to moBaseMo resources.. [optional] # noqa: E501
display_names (DisplayNames): [optional] # noqa: E501
registered_device (AssetDeviceRegistrationRelationship): [optional] # noqa: E501
name (str): Name of the virtual machine placement. It is the name of the VPC (Virtual Private Cloud) in case of AWS virtual machine, and datacenter name in case of VMware virtual machine.. [optional] # noqa: E501
uuid (str): The uuid of this placement. The uuid is internally generated and not user specified.. [optional] # noqa: E501
identity (str): The internally generated identity of this placement. This entity is not manipulated by users. It aids in uniquely identifying the placement object.. [optional] # noqa: E501
cluster_count (int): Count of all clusters associated with this DC.. [optional] # noqa: E501
datastore_count (int): Count of all datastores associated with this DC.. [optional] # noqa: E501
host_count (int): Count of all hosts associated with this DC.. [optional] # noqa: E501
inventory_path (str): Inventory path of the DC.. [optional] # noqa: E501
network_count (int): Count of all networks associated with this datacenter (DC).. [optional] # noqa: E501
vm_count (int): Count of all virtual machines (VMs) associated with this DC.. [optional] # noqa: E501
vm_template_count (int): Count of all virtual machines templates associated with this DC.. [optional] # noqa: E501
hypervisor_manager (VirtualizationVmwareVcenterRelationship): [optional] # noqa: E501
parent_folder (VirtualizationVmwareFolderRelationship): [optional] # noqa: E501
object_type (str): The fully-qualified name of the remote type referred by this relationship.. [optional] # noqa: E501
"""
class_id = kwargs.get('class_id', "mo.MoRef")
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
constant_args = {
'_check_type': _check_type,
'_path_to_item': _path_to_item,
'_spec_property_naming': _spec_property_naming,
'_configuration': _configuration,
'_visited_composed_classes': self._visited_composed_classes,
}
required_args = {
'class_id': class_id,
}
model_args = {}
model_args.update(required_args)
model_args.update(kwargs)
composed_info = validate_get_composed_info(
constant_args, model_args, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
unused_args = composed_info[3]
for var_name, var_value in required_args.items():
setattr(self, var_name, var_value)
for var_name, var_value in kwargs.items():
if var_name in unused_args and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
not self._additional_properties_model_instances:
# discard variable.
continue
setattr(self, var_name, var_value)
@cached_property
def _composed_schemas():
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error beause the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
lazy_import()
return {
'anyOf': [
],
'allOf': [
],
'oneOf': [
MoMoRef,
VirtualizationVmwareDatacenter,
none_type,
],
}
| 63.070496 | 1,678 | 0.659325 |
import re
import sys
from intersight.model_utils import (
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from intersight.model.asset_device_registration_relationship import AssetDeviceRegistrationRelationship
from intersight.model.display_names import DisplayNames
from intersight.model.mo_base_mo_relationship import MoBaseMoRelationship
from intersight.model.mo_mo_ref import MoMoRef
from intersight.model.mo_tag import MoTag
from intersight.model.mo_version_context import MoVersionContext
from intersight.model.virtualization_vmware_datacenter import VirtualizationVmwareDatacenter
from intersight.model.virtualization_vmware_folder_relationship import VirtualizationVmwareFolderRelationship
from intersight.model.virtualization_vmware_vcenter_relationship import VirtualizationVmwareVcenterRelationship
globals()['AssetDeviceRegistrationRelationship'] = AssetDeviceRegistrationRelationship
globals()['DisplayNames'] = DisplayNames
globals()['MoBaseMoRelationship'] = MoBaseMoRelationship
globals()['MoMoRef'] = MoMoRef
globals()['MoTag'] = MoTag
globals()['MoVersionContext'] = MoVersionContext
globals()['VirtualizationVmwareDatacenter'] = VirtualizationVmwareDatacenter
globals()['VirtualizationVmwareFolderRelationship'] = VirtualizationVmwareFolderRelationship
globals()['VirtualizationVmwareVcenterRelationship'] = VirtualizationVmwareVcenterRelationship
class VirtualizationVmwareDatacenterRelationship(ModelComposed):
allowed_values = {
('class_id',): {
'MO.MOREF': "mo.MoRef",
},
('object_type',): {
'AAA.AUDITRECORD': "aaa.AuditRecord",
'AAA.RETENTIONCONFIG': "aaa.RetentionConfig",
'AAA.RETENTIONPOLICY': "aaa.RetentionPolicy",
'ACCESS.POLICY': "access.Policy",
'ADAPTER.CONFIGPOLICY': "adapter.ConfigPolicy",
'ADAPTER.EXTETHINTERFACE': "adapter.ExtEthInterface",
'ADAPTER.HOSTETHINTERFACE': "adapter.HostEthInterface",
'ADAPTER.HOSTFCINTERFACE': "adapter.HostFcInterface",
'ADAPTER.HOSTISCSIINTERFACE': "adapter.HostIscsiInterface",
'ADAPTER.UNIT': "adapter.Unit",
'ADAPTER.UNITEXPANDER': "adapter.UnitExpander",
'APPLIANCE.APPSTATUS': "appliance.AppStatus",
'APPLIANCE.AUTORMAPOLICY': "appliance.AutoRmaPolicy",
'APPLIANCE.BACKUP': "appliance.Backup",
'APPLIANCE.BACKUPPOLICY': "appliance.BackupPolicy",
'APPLIANCE.CERTIFICATESETTING': "appliance.CertificateSetting",
'APPLIANCE.DATAEXPORTPOLICY': "appliance.DataExportPolicy",
'APPLIANCE.DEVICECERTIFICATE': "appliance.DeviceCertificate",
'APPLIANCE.DEVICECLAIM': "appliance.DeviceClaim",
'APPLIANCE.DEVICEUPGRADEPOLICY': "appliance.DeviceUpgradePolicy",
'APPLIANCE.DIAGSETTING': "appliance.DiagSetting",
'APPLIANCE.EXTERNALSYSLOGSETTING': "appliance.ExternalSyslogSetting",
'APPLIANCE.FILEGATEWAY': "appliance.FileGateway",
'APPLIANCE.FILESYSTEMSTATUS': "appliance.FileSystemStatus",
'APPLIANCE.GROUPSTATUS': "appliance.GroupStatus",
'APPLIANCE.IMAGEBUNDLE': "appliance.ImageBundle",
'APPLIANCE.NODEINFO': "appliance.NodeInfo",
'APPLIANCE.NODESTATUS': "appliance.NodeStatus",
'APPLIANCE.RELEASENOTE': "appliance.ReleaseNote",
'APPLIANCE.REMOTEFILEIMPORT': "appliance.RemoteFileImport",
'APPLIANCE.RESTORE': "appliance.Restore",
'APPLIANCE.SETUPINFO': "appliance.SetupInfo",
'APPLIANCE.SYSTEMINFO': "appliance.SystemInfo",
'APPLIANCE.SYSTEMSTATUS': "appliance.SystemStatus",
'APPLIANCE.UPGRADE': "appliance.Upgrade",
'APPLIANCE.UPGRADEPOLICY': "appliance.UpgradePolicy",
'ASSET.CLUSTERMEMBER': "asset.ClusterMember",
'ASSET.DEPLOYMENT': "asset.Deployment",
'ASSET.DEPLOYMENTDEVICE': "asset.DeploymentDevice",
'ASSET.DEVICECLAIM': "asset.DeviceClaim",
'ASSET.DEVICECONFIGURATION': "asset.DeviceConfiguration",
'ASSET.DEVICECONNECTORMANAGER': "asset.DeviceConnectorManager",
'ASSET.DEVICECONTRACTINFORMATION': "asset.DeviceContractInformation",
'ASSET.DEVICECONTRACTNOTIFICATION': "asset.DeviceContractNotification",
'ASSET.DEVICEREGISTRATION': "asset.DeviceRegistration",
'ASSET.SUBSCRIPTION': "asset.Subscription",
'ASSET.SUBSCRIPTIONACCOUNT': "asset.SubscriptionAccount",
'ASSET.SUBSCRIPTIONDEVICECONTRACTINFORMATION': "asset.SubscriptionDeviceContractInformation",
'ASSET.TARGET': "asset.Target",
'BIOS.BOOTDEVICE': "bios.BootDevice",
'BIOS.BOOTMODE': "bios.BootMode",
'BIOS.POLICY': "bios.Policy",
'BIOS.SYSTEMBOOTORDER': "bios.SystemBootOrder",
'BIOS.TOKENSETTINGS': "bios.TokenSettings",
'BIOS.UNIT': "bios.Unit",
'BIOS.VFSELECTMEMORYRASCONFIGURATION': "bios.VfSelectMemoryRasConfiguration",
'BOOT.CDDDEVICE': "boot.CddDevice",
'BOOT.DEVICEBOOTMODE': "boot.DeviceBootMode",
'BOOT.DEVICEBOOTSECURITY': "boot.DeviceBootSecurity",
'BOOT.HDDDEVICE': "boot.HddDevice",
'BOOT.ISCSIDEVICE': "boot.IscsiDevice",
'BOOT.NVMEDEVICE': "boot.NvmeDevice",
'BOOT.PCHSTORAGEDEVICE': "boot.PchStorageDevice",
'BOOT.PRECISIONPOLICY': "boot.PrecisionPolicy",
'BOOT.PXEDEVICE': "boot.PxeDevice",
'BOOT.SANDEVICE': "boot.SanDevice",
'BOOT.SDDEVICE': "boot.SdDevice",
'BOOT.UEFISHELLDEVICE': "boot.UefiShellDevice",
'BOOT.USBDEVICE': "boot.UsbDevice",
'BOOT.VMEDIADEVICE': "boot.VmediaDevice",
'BULK.EXPORT': "bulk.Export",
'BULK.EXPORTEDITEM': "bulk.ExportedItem",
'BULK.MOCLONER': "bulk.MoCloner",
'BULK.MOMERGER': "bulk.MoMerger",
'BULK.REQUEST': "bulk.Request",
'BULK.SUBREQUESTOBJ': "bulk.SubRequestObj",
'CAPABILITY.ADAPTERUNITDESCRIPTOR': "capability.AdapterUnitDescriptor",
'CAPABILITY.CATALOG': "capability.Catalog",
'CAPABILITY.CHASSISDESCRIPTOR': "capability.ChassisDescriptor",
'CAPABILITY.CHASSISMANUFACTURINGDEF': "capability.ChassisManufacturingDef",
'CAPABILITY.CIMCFIRMWAREDESCRIPTOR': "capability.CimcFirmwareDescriptor",
'CAPABILITY.EQUIPMENTPHYSICALDEF': "capability.EquipmentPhysicalDef",
'CAPABILITY.EQUIPMENTSLOTARRAY': "capability.EquipmentSlotArray",
'CAPABILITY.FANMODULEDESCRIPTOR': "capability.FanModuleDescriptor",
'CAPABILITY.FANMODULEMANUFACTURINGDEF': "capability.FanModuleManufacturingDef",
'CAPABILITY.IOCARDCAPABILITYDEF': "capability.IoCardCapabilityDef",
'CAPABILITY.IOCARDDESCRIPTOR': "capability.IoCardDescriptor",
'CAPABILITY.IOCARDMANUFACTURINGDEF': "capability.IoCardManufacturingDef",
'CAPABILITY.PORTGROUPAGGREGATIONDEF': "capability.PortGroupAggregationDef",
'CAPABILITY.PSUDESCRIPTOR': "capability.PsuDescriptor",
'CAPABILITY.PSUMANUFACTURINGDEF': "capability.PsuManufacturingDef",
'CAPABILITY.SERVERMODELSCAPABILITYDEF': "capability.ServerModelsCapabilityDef",
'CAPABILITY.SERVERSCHEMADESCRIPTOR': "capability.ServerSchemaDescriptor",
'CAPABILITY.SIOCMODULECAPABILITYDEF': "capability.SiocModuleCapabilityDef",
'CAPABILITY.SIOCMODULEDESCRIPTOR': "capability.SiocModuleDescriptor",
'CAPABILITY.SIOCMODULEMANUFACTURINGDEF': "capability.SiocModuleManufacturingDef",
'CAPABILITY.SWITCHCAPABILITY': "capability.SwitchCapability",
'CAPABILITY.SWITCHDESCRIPTOR': "capability.SwitchDescriptor",
'CAPABILITY.SWITCHMANUFACTURINGDEF': "capability.SwitchManufacturingDef",
'CERTIFICATEMANAGEMENT.POLICY': "certificatemanagement.Policy",
'CHASSIS.CONFIGCHANGEDETAIL': "chassis.ConfigChangeDetail",
'CHASSIS.CONFIGIMPORT': "chassis.ConfigImport",
'CHASSIS.CONFIGRESULT': "chassis.ConfigResult",
'CHASSIS.CONFIGRESULTENTRY': "chassis.ConfigResultEntry",
'CHASSIS.IOMPROFILE': "chassis.IomProfile",
'CHASSIS.PROFILE': "chassis.Profile",
'CLOUD.AWSBILLINGUNIT': "cloud.AwsBillingUnit",
'CLOUD.AWSKEYPAIR': "cloud.AwsKeyPair",
'CLOUD.AWSNETWORKINTERFACE': "cloud.AwsNetworkInterface",
'CLOUD.AWSORGANIZATIONALUNIT': "cloud.AwsOrganizationalUnit",
'CLOUD.AWSSECURITYGROUP': "cloud.AwsSecurityGroup",
'CLOUD.AWSSUBNET': "cloud.AwsSubnet",
'CLOUD.AWSVIRTUALMACHINE': "cloud.AwsVirtualMachine",
'CLOUD.AWSVOLUME': "cloud.AwsVolume",
'CLOUD.AWSVPC': "cloud.AwsVpc",
'CLOUD.COLLECTINVENTORY': "cloud.CollectInventory",
'CLOUD.REGIONS': "cloud.Regions",
'CLOUD.SKUCONTAINERTYPE': "cloud.SkuContainerType",
'CLOUD.SKUDATABASETYPE': "cloud.SkuDatabaseType",
'CLOUD.SKUINSTANCETYPE': "cloud.SkuInstanceType",
'CLOUD.SKUNETWORKTYPE': "cloud.SkuNetworkType",
'CLOUD.SKUREGIONRATECARDS': "cloud.SkuRegionRateCards",
'CLOUD.SKUVOLUMETYPE': "cloud.SkuVolumeType",
'CLOUD.TFCAGENTPOOL': "cloud.TfcAgentpool",
'CLOUD.TFCORGANIZATION': "cloud.TfcOrganization",
'CLOUD.TFCWORKSPACE': "cloud.TfcWorkspace",
'COMM.HTTPPROXYPOLICY': "comm.HttpProxyPolicy",
'COMPUTE.BIOSPOSTPOLICY': "compute.BiosPostPolicy",
'COMPUTE.BLADE': "compute.Blade",
'COMPUTE.BLADEIDENTITY': "compute.BladeIdentity",
'COMPUTE.BOARD': "compute.Board",
'COMPUTE.MAPPING': "compute.Mapping",
'COMPUTE.PHYSICALSUMMARY': "compute.PhysicalSummary",
'COMPUTE.RACKUNIT': "compute.RackUnit",
'COMPUTE.RACKUNITIDENTITY': "compute.RackUnitIdentity",
'COMPUTE.SERVERPOWERPOLICY': "compute.ServerPowerPolicy",
'COMPUTE.SERVERSETTING': "compute.ServerSetting",
'COMPUTE.VMEDIA': "compute.Vmedia",
'COND.ALARM': "cond.Alarm",
'COND.ALARMAGGREGATION': "cond.AlarmAggregation",
'COND.HCLSTATUS': "cond.HclStatus",
'COND.HCLSTATUSDETAIL': "cond.HclStatusDetail",
'COND.HCLSTATUSJOB': "cond.HclStatusJob",
'CONNECTORPACK.CONNECTORPACKUPGRADE': "connectorpack.ConnectorPackUpgrade",
'CONNECTORPACK.UPGRADEIMPACT': "connectorpack.UpgradeImpact",
'CONVERGEDINFRA.HEALTHCHECKDEFINITION': "convergedinfra.HealthCheckDefinition",
'CONVERGEDINFRA.HEALTHCHECKEXECUTION': "convergedinfra.HealthCheckExecution",
'CONVERGEDINFRA.POD': "convergedinfra.Pod",
'CRD.CUSTOMRESOURCE': "crd.CustomResource",
'DEVICECONNECTOR.POLICY': "deviceconnector.Policy",
'EQUIPMENT.CHASSIS': "equipment.Chassis",
'EQUIPMENT.CHASSISIDENTITY': "equipment.ChassisIdentity",
'EQUIPMENT.CHASSISOPERATION': "equipment.ChassisOperation",
'EQUIPMENT.DEVICESUMMARY': "equipment.DeviceSummary",
'EQUIPMENT.EXPANDERMODULE': "equipment.ExpanderModule",
'EQUIPMENT.FAN': "equipment.Fan",
'EQUIPMENT.FANCONTROL': "equipment.FanControl",
'EQUIPMENT.FANMODULE': "equipment.FanModule",
'EQUIPMENT.FEX': "equipment.Fex",
'EQUIPMENT.FEXIDENTITY': "equipment.FexIdentity",
'EQUIPMENT.FEXOPERATION': "equipment.FexOperation",
'EQUIPMENT.FRU': "equipment.Fru",
'EQUIPMENT.IDENTITYSUMMARY': "equipment.IdentitySummary",
'EQUIPMENT.IOCARD': "equipment.IoCard",
'EQUIPMENT.IOCARDOPERATION': "equipment.IoCardOperation",
'EQUIPMENT.IOEXPANDER': "equipment.IoExpander",
'EQUIPMENT.LOCATORLED': "equipment.LocatorLed",
'EQUIPMENT.PSU': "equipment.Psu",
'EQUIPMENT.PSUCONTROL': "equipment.PsuControl",
'EQUIPMENT.RACKENCLOSURE': "equipment.RackEnclosure",
'EQUIPMENT.RACKENCLOSURESLOT': "equipment.RackEnclosureSlot",
'EQUIPMENT.SHAREDIOMODULE': "equipment.SharedIoModule",
'EQUIPMENT.SWITCHCARD': "equipment.SwitchCard",
'EQUIPMENT.SYSTEMIOCONTROLLER': "equipment.SystemIoController",
'EQUIPMENT.TPM': "equipment.Tpm",
'EQUIPMENT.TRANSCEIVER': "equipment.Transceiver",
'ETHER.HOSTPORT': "ether.HostPort",
'ETHER.NETWORKPORT': "ether.NetworkPort",
'ETHER.PHYSICALPORT': "ether.PhysicalPort",
'ETHER.PORTCHANNEL': "ether.PortChannel",
'EXTERNALSITE.AUTHORIZATION': "externalsite.Authorization",
'FABRIC.APPLIANCEPCROLE': "fabric.AppliancePcRole",
'FABRIC.APPLIANCEROLE': "fabric.ApplianceRole",
'FABRIC.CONFIGCHANGEDETAIL': "fabric.ConfigChangeDetail",
'FABRIC.CONFIGRESULT': "fabric.ConfigResult",
'FABRIC.CONFIGRESULTENTRY': "fabric.ConfigResultEntry",
'FABRIC.ELEMENTIDENTITY': "fabric.ElementIdentity",
'FABRIC.ESTIMATEIMPACT': "fabric.EstimateImpact",
'FABRIC.ETHNETWORKCONTROLPOLICY': "fabric.EthNetworkControlPolicy",
'FABRIC.ETHNETWORKGROUPPOLICY': "fabric.EthNetworkGroupPolicy",
'FABRIC.ETHNETWORKPOLICY': "fabric.EthNetworkPolicy",
'FABRIC.FCNETWORKPOLICY': "fabric.FcNetworkPolicy",
'FABRIC.FCSTORAGEROLE': "fabric.FcStorageRole",
'FABRIC.FCUPLINKPCROLE': "fabric.FcUplinkPcRole",
'FABRIC.FCUPLINKROLE': "fabric.FcUplinkRole",
'FABRIC.FCOEUPLINKPCROLE': "fabric.FcoeUplinkPcRole",
'FABRIC.FCOEUPLINKROLE': "fabric.FcoeUplinkRole",
'FABRIC.FLOWCONTROLPOLICY': "fabric.FlowControlPolicy",
'FABRIC.LINKAGGREGATIONPOLICY': "fabric.LinkAggregationPolicy",
'FABRIC.LINKCONTROLPOLICY': "fabric.LinkControlPolicy",
'FABRIC.MULTICASTPOLICY': "fabric.MulticastPolicy",
'FABRIC.PCMEMBER': "fabric.PcMember",
'FABRIC.PCOPERATION': "fabric.PcOperation",
'FABRIC.PORTMODE': "fabric.PortMode",
'FABRIC.PORTOPERATION': "fabric.PortOperation",
'FABRIC.PORTPOLICY': "fabric.PortPolicy",
'FABRIC.SERVERROLE': "fabric.ServerRole",
'FABRIC.SWITCHCLUSTERPROFILE': "fabric.SwitchClusterProfile",
'FABRIC.SWITCHCONTROLPOLICY': "fabric.SwitchControlPolicy",
'FABRIC.SWITCHPROFILE': "fabric.SwitchProfile",
'FABRIC.SYSTEMQOSPOLICY': "fabric.SystemQosPolicy",
'FABRIC.UPLINKPCROLE': "fabric.UplinkPcRole",
'FABRIC.UPLINKROLE': "fabric.UplinkRole",
'FABRIC.VLAN': "fabric.Vlan",
'FABRIC.VSAN': "fabric.Vsan",
'FAULT.INSTANCE': "fault.Instance",
'FC.PHYSICALPORT': "fc.PhysicalPort",
'FC.PORTCHANNEL': "fc.PortChannel",
'FCPOOL.FCBLOCK': "fcpool.FcBlock",
'FCPOOL.LEASE': "fcpool.Lease",
'FCPOOL.POOL': "fcpool.Pool",
'FCPOOL.POOLMEMBER': "fcpool.PoolMember",
'FCPOOL.UNIVERSE': "fcpool.Universe",
'FEEDBACK.FEEDBACKPOST': "feedback.FeedbackPost",
'FIRMWARE.BIOSDESCRIPTOR': "firmware.BiosDescriptor",
'FIRMWARE.BOARDCONTROLLERDESCRIPTOR': "firmware.BoardControllerDescriptor",
'FIRMWARE.CHASSISUPGRADE': "firmware.ChassisUpgrade",
'FIRMWARE.CIMCDESCRIPTOR': "firmware.CimcDescriptor",
'FIRMWARE.DIMMDESCRIPTOR': "firmware.DimmDescriptor",
'FIRMWARE.DISTRIBUTABLE': "firmware.Distributable",
'FIRMWARE.DISTRIBUTABLEMETA': "firmware.DistributableMeta",
'FIRMWARE.DRIVEDESCRIPTOR': "firmware.DriveDescriptor",
'FIRMWARE.DRIVERDISTRIBUTABLE': "firmware.DriverDistributable",
'FIRMWARE.EULA': "firmware.Eula",
'FIRMWARE.FIRMWARESUMMARY': "firmware.FirmwareSummary",
'FIRMWARE.GPUDESCRIPTOR': "firmware.GpuDescriptor",
'FIRMWARE.HBADESCRIPTOR': "firmware.HbaDescriptor",
'FIRMWARE.IOMDESCRIPTOR': "firmware.IomDescriptor",
'FIRMWARE.MSWITCHDESCRIPTOR': "firmware.MswitchDescriptor",
'FIRMWARE.NXOSDESCRIPTOR': "firmware.NxosDescriptor",
'FIRMWARE.PCIEDESCRIPTOR': "firmware.PcieDescriptor",
'FIRMWARE.PSUDESCRIPTOR': "firmware.PsuDescriptor",
'FIRMWARE.RUNNINGFIRMWARE': "firmware.RunningFirmware",
'FIRMWARE.SASEXPANDERDESCRIPTOR': "firmware.SasExpanderDescriptor",
'FIRMWARE.SERVERCONFIGURATIONUTILITYDISTRIBUTABLE': "firmware.ServerConfigurationUtilityDistributable",
'FIRMWARE.STORAGECONTROLLERDESCRIPTOR': "firmware.StorageControllerDescriptor",
'FIRMWARE.SWITCHUPGRADE': "firmware.SwitchUpgrade",
'FIRMWARE.UNSUPPORTEDVERSIONUPGRADE': "firmware.UnsupportedVersionUpgrade",
'FIRMWARE.UPGRADE': "firmware.Upgrade",
'FIRMWARE.UPGRADEIMPACT': "firmware.UpgradeImpact",
'FIRMWARE.UPGRADEIMPACTSTATUS': "firmware.UpgradeImpactStatus",
'FIRMWARE.UPGRADESTATUS': "firmware.UpgradeStatus",
'FORECAST.CATALOG': "forecast.Catalog",
'FORECAST.DEFINITION': "forecast.Definition",
'FORECAST.INSTANCE': "forecast.Instance",
'GRAPHICS.CARD': "graphics.Card",
'GRAPHICS.CONTROLLER': "graphics.Controller",
'HCL.COMPATIBILITYSTATUS': "hcl.CompatibilityStatus",
'HCL.DRIVERIMAGE': "hcl.DriverImage",
'HCL.EXEMPTEDCATALOG': "hcl.ExemptedCatalog",
'HCL.HYPERFLEXSOFTWARECOMPATIBILITYINFO': "hcl.HyperflexSoftwareCompatibilityInfo",
'HCL.OPERATINGSYSTEM': "hcl.OperatingSystem",
'HCL.OPERATINGSYSTEMVENDOR': "hcl.OperatingSystemVendor",
'HCL.SUPPORTEDDRIVERNAME': "hcl.SupportedDriverName",
'HYPERFLEX.ALARM': "hyperflex.Alarm",
'HYPERFLEX.APPCATALOG': "hyperflex.AppCatalog",
'HYPERFLEX.AUTOSUPPORTPOLICY': "hyperflex.AutoSupportPolicy",
'HYPERFLEX.BACKUPCLUSTER': "hyperflex.BackupCluster",
'HYPERFLEX.CAPABILITYINFO': "hyperflex.CapabilityInfo",
'HYPERFLEX.CLUSTER': "hyperflex.Cluster",
'HYPERFLEX.CLUSTERBACKUPPOLICY': "hyperflex.ClusterBackupPolicy",
'HYPERFLEX.CLUSTERBACKUPPOLICYDEPLOYMENT': "hyperflex.ClusterBackupPolicyDeployment",
'HYPERFLEX.CLUSTERBACKUPPOLICYINVENTORY': "hyperflex.ClusterBackupPolicyInventory",
'HYPERFLEX.CLUSTERHEALTHCHECKEXECUTIONSNAPSHOT': "hyperflex.ClusterHealthCheckExecutionSnapshot",
'HYPERFLEX.CLUSTERNETWORKPOLICY': "hyperflex.ClusterNetworkPolicy",
'HYPERFLEX.CLUSTERPROFILE': "hyperflex.ClusterProfile",
'HYPERFLEX.CLUSTERREPLICATIONNETWORKPOLICY': "hyperflex.ClusterReplicationNetworkPolicy",
'HYPERFLEX.CLUSTERREPLICATIONNETWORKPOLICYDEPLOYMENT': "hyperflex.ClusterReplicationNetworkPolicyDeployment",
'HYPERFLEX.CLUSTERSTORAGEPOLICY': "hyperflex.ClusterStoragePolicy",
'HYPERFLEX.CONFIGRESULT': "hyperflex.ConfigResult",
'HYPERFLEX.CONFIGRESULTENTRY': "hyperflex.ConfigResultEntry",
'HYPERFLEX.DATAPROTECTIONPEER': "hyperflex.DataProtectionPeer",
'HYPERFLEX.DATASTORESTATISTIC': "hyperflex.DatastoreStatistic",
'HYPERFLEX.DEVICEPACKAGEDOWNLOADSTATE': "hyperflex.DevicePackageDownloadState",
'HYPERFLEX.DRIVE': "hyperflex.Drive",
'HYPERFLEX.EXTFCSTORAGEPOLICY': "hyperflex.ExtFcStoragePolicy",
'HYPERFLEX.EXTISCSISTORAGEPOLICY': "hyperflex.ExtIscsiStoragePolicy",
'HYPERFLEX.FEATURELIMITEXTERNAL': "hyperflex.FeatureLimitExternal",
'HYPERFLEX.FEATURELIMITINTERNAL': "hyperflex.FeatureLimitInternal",
'HYPERFLEX.HEALTH': "hyperflex.Health",
'HYPERFLEX.HEALTHCHECKDEFINITION': "hyperflex.HealthCheckDefinition",
'HYPERFLEX.HEALTHCHECKEXECUTION': "hyperflex.HealthCheckExecution",
'HYPERFLEX.HEALTHCHECKEXECUTIONSNAPSHOT': "hyperflex.HealthCheckExecutionSnapshot",
'HYPERFLEX.HEALTHCHECKPACKAGECHECKSUM': "hyperflex.HealthCheckPackageChecksum",
'HYPERFLEX.HXDPVERSION': "hyperflex.HxdpVersion",
'HYPERFLEX.LICENSE': "hyperflex.License",
'HYPERFLEX.LOCALCREDENTIALPOLICY': "hyperflex.LocalCredentialPolicy",
'HYPERFLEX.NODE': "hyperflex.Node",
'HYPERFLEX.NODECONFIGPOLICY': "hyperflex.NodeConfigPolicy",
'HYPERFLEX.NODEPROFILE': "hyperflex.NodeProfile",
'HYPERFLEX.PROTECTEDCLUSTER': "hyperflex.ProtectedCluster",
'HYPERFLEX.PROXYSETTINGPOLICY': "hyperflex.ProxySettingPolicy",
'HYPERFLEX.SERVERFIRMWAREVERSION': "hyperflex.ServerFirmwareVersion",
'HYPERFLEX.SERVERFIRMWAREVERSIONENTRY': "hyperflex.ServerFirmwareVersionEntry",
'HYPERFLEX.SERVERMODEL': "hyperflex.ServerModel",
'HYPERFLEX.SERVICEAUTHTOKEN': "hyperflex.ServiceAuthToken",
'HYPERFLEX.SOFTWAREDISTRIBUTIONCOMPONENT': "hyperflex.SoftwareDistributionComponent",
'HYPERFLEX.SOFTWAREDISTRIBUTIONENTRY': "hyperflex.SoftwareDistributionEntry",
'HYPERFLEX.SOFTWAREDISTRIBUTIONVERSION': "hyperflex.SoftwareDistributionVersion",
'HYPERFLEX.SOFTWAREVERSIONPOLICY': "hyperflex.SoftwareVersionPolicy",
'HYPERFLEX.STORAGECONTAINER': "hyperflex.StorageContainer",
'HYPERFLEX.SYSCONFIGPOLICY': "hyperflex.SysConfigPolicy",
'HYPERFLEX.UCSMCONFIGPOLICY': "hyperflex.UcsmConfigPolicy",
'HYPERFLEX.VCENTERCONFIGPOLICY': "hyperflex.VcenterConfigPolicy",
'HYPERFLEX.VMBACKUPINFO': "hyperflex.VmBackupInfo",
'HYPERFLEX.VMIMPORTOPERATION': "hyperflex.VmImportOperation",
'HYPERFLEX.VMRESTOREOPERATION': "hyperflex.VmRestoreOperation",
'HYPERFLEX.VMSNAPSHOTINFO': "hyperflex.VmSnapshotInfo",
'HYPERFLEX.VOLUME': "hyperflex.Volume",
'HYPERFLEX.WITNESSCONFIGURATION': "hyperflex.WitnessConfiguration",
'IAAS.CONNECTORPACK': "iaas.ConnectorPack",
'IAAS.DEVICESTATUS': "iaas.DeviceStatus",
'IAAS.DIAGNOSTICMESSAGES': "iaas.DiagnosticMessages",
'IAAS.LICENSEINFO': "iaas.LicenseInfo",
'IAAS.MOSTRUNTASKS': "iaas.MostRunTasks",
'IAAS.SERVICEREQUEST': "iaas.ServiceRequest",
'IAAS.UCSDINFO': "iaas.UcsdInfo",
'IAAS.UCSDMANAGEDINFRA': "iaas.UcsdManagedInfra",
'IAAS.UCSDMESSAGES': "iaas.UcsdMessages",
'IAM.ACCOUNT': "iam.Account",
'IAM.ACCOUNTEXPERIENCE': "iam.AccountExperience",
'IAM.APIKEY': "iam.ApiKey",
'IAM.APPREGISTRATION': "iam.AppRegistration",
'IAM.BANNERMESSAGE': "iam.BannerMessage",
'IAM.CERTIFICATE': "iam.Certificate",
'IAM.CERTIFICATEREQUEST': "iam.CertificateRequest",
'IAM.DOMAINGROUP': "iam.DomainGroup",
'IAM.ENDPOINTPRIVILEGE': "iam.EndPointPrivilege",
'IAM.ENDPOINTROLE': "iam.EndPointRole",
'IAM.ENDPOINTUSER': "iam.EndPointUser",
'IAM.ENDPOINTUSERPOLICY': "iam.EndPointUserPolicy",
'IAM.ENDPOINTUSERROLE': "iam.EndPointUserRole",
'IAM.IDP': "iam.Idp",
'IAM.IDPREFERENCE': "iam.IdpReference",
'IAM.IPACCESSMANAGEMENT': "iam.IpAccessManagement",
'IAM.IPADDRESS': "iam.IpAddress",
'IAM.LDAPGROUP': "iam.LdapGroup",
'IAM.LDAPPOLICY': "iam.LdapPolicy",
'IAM.LDAPPROVIDER': "iam.LdapProvider",
'IAM.LOCALUSERPASSWORD': "iam.LocalUserPassword",
'IAM.LOCALUSERPASSWORDPOLICY': "iam.LocalUserPasswordPolicy",
'IAM.OAUTHTOKEN': "iam.OAuthToken",
'IAM.PERMISSION': "iam.Permission",
'IAM.PRIVATEKEYSPEC': "iam.PrivateKeySpec",
'IAM.PRIVILEGE': "iam.Privilege",
'IAM.PRIVILEGESET': "iam.PrivilegeSet",
'IAM.QUALIFIER': "iam.Qualifier",
'IAM.RESOURCELIMITS': "iam.ResourceLimits",
'IAM.RESOURCEPERMISSION': "iam.ResourcePermission",
'IAM.RESOURCEROLES': "iam.ResourceRoles",
'IAM.ROLE': "iam.Role",
'IAM.SECURITYHOLDER': "iam.SecurityHolder",
'IAM.SERVICEPROVIDER': "iam.ServiceProvider",
'IAM.SESSION': "iam.Session",
'IAM.SESSIONLIMITS': "iam.SessionLimits",
'IAM.SYSTEM': "iam.System",
'IAM.TRUSTPOINT': "iam.TrustPoint",
'IAM.USER': "iam.User",
'IAM.USERGROUP': "iam.UserGroup",
'IAM.USERPREFERENCE': "iam.UserPreference",
'INVENTORY.DEVICEINFO': "inventory.DeviceInfo",
'INVENTORY.DNMOBINDING': "inventory.DnMoBinding",
'INVENTORY.GENERICINVENTORY': "inventory.GenericInventory",
'INVENTORY.GENERICINVENTORYHOLDER': "inventory.GenericInventoryHolder",
'INVENTORY.REQUEST': "inventory.Request",
'IPMIOVERLAN.POLICY': "ipmioverlan.Policy",
'IPPOOL.BLOCKLEASE': "ippool.BlockLease",
'IPPOOL.IPLEASE': "ippool.IpLease",
'IPPOOL.POOL': "ippool.Pool",
'IPPOOL.POOLMEMBER': "ippool.PoolMember",
'IPPOOL.SHADOWBLOCK': "ippool.ShadowBlock",
'IPPOOL.SHADOWPOOL': "ippool.ShadowPool",
'IPPOOL.UNIVERSE': "ippool.Universe",
'IQNPOOL.BLOCK': "iqnpool.Block",
'IQNPOOL.LEASE': "iqnpool.Lease",
'IQNPOOL.POOL': "iqnpool.Pool",
'IQNPOOL.POOLMEMBER': "iqnpool.PoolMember",
'IQNPOOL.UNIVERSE': "iqnpool.Universe",
'IWOTENANT.TENANTSTATUS': "iwotenant.TenantStatus",
'KUBERNETES.ACICNIAPIC': "kubernetes.AciCniApic",
'KUBERNETES.ACICNIPROFILE': "kubernetes.AciCniProfile",
'KUBERNETES.ACICNITENANTCLUSTERALLOCATION': "kubernetes.AciCniTenantClusterAllocation",
'KUBERNETES.ADDONDEFINITION': "kubernetes.AddonDefinition",
'KUBERNETES.ADDONPOLICY': "kubernetes.AddonPolicy",
'KUBERNETES.ADDONREPOSITORY': "kubernetes.AddonRepository",
'KUBERNETES.BAREMETALNODEPROFILE': "kubernetes.BaremetalNodeProfile",
'KUBERNETES.CATALOG': "kubernetes.Catalog",
'KUBERNETES.CLUSTER': "kubernetes.Cluster",
'KUBERNETES.CLUSTERADDONPROFILE': "kubernetes.ClusterAddonProfile",
'KUBERNETES.CLUSTERPROFILE': "kubernetes.ClusterProfile",
'KUBERNETES.CONFIGRESULT': "kubernetes.ConfigResult",
'KUBERNETES.CONFIGRESULTENTRY': "kubernetes.ConfigResultEntry",
'KUBERNETES.CONTAINERRUNTIMEPOLICY': "kubernetes.ContainerRuntimePolicy",
'KUBERNETES.DAEMONSET': "kubernetes.DaemonSet",
'KUBERNETES.DEPLOYMENT': "kubernetes.Deployment",
'KUBERNETES.INGRESS': "kubernetes.Ingress",
'KUBERNETES.NETWORKPOLICY': "kubernetes.NetworkPolicy",
'KUBERNETES.NODE': "kubernetes.Node",
'KUBERNETES.NODEGROUPPROFILE': "kubernetes.NodeGroupProfile",
'KUBERNETES.POD': "kubernetes.Pod",
'KUBERNETES.SERVICE': "kubernetes.Service",
'KUBERNETES.STATEFULSET': "kubernetes.StatefulSet",
'KUBERNETES.SYSCONFIGPOLICY': "kubernetes.SysConfigPolicy",
'KUBERNETES.TRUSTEDREGISTRIESPOLICY': "kubernetes.TrustedRegistriesPolicy",
'KUBERNETES.VERSION': "kubernetes.Version",
'KUBERNETES.VERSIONPOLICY': "kubernetes.VersionPolicy",
'KUBERNETES.VIRTUALMACHINEINFRACONFIGPOLICY': "kubernetes.VirtualMachineInfraConfigPolicy",
'KUBERNETES.VIRTUALMACHINEINFRASTRUCTUREPROVIDER': "kubernetes.VirtualMachineInfrastructureProvider",
'KUBERNETES.VIRTUALMACHINEINSTANCETYPE': "kubernetes.VirtualMachineInstanceType",
'KUBERNETES.VIRTUALMACHINENODEPROFILE': "kubernetes.VirtualMachineNodeProfile",
'KVM.POLICY': "kvm.Policy",
'KVM.SESSION': "kvm.Session",
'KVM.TUNNEL': "kvm.Tunnel",
'LICENSE.ACCOUNTLICENSEDATA': "license.AccountLicenseData",
'LICENSE.CUSTOMEROP': "license.CustomerOp",
'LICENSE.IKSCUSTOMEROP': "license.IksCustomerOp",
'LICENSE.IKSLICENSECOUNT': "license.IksLicenseCount",
'LICENSE.IWOCUSTOMEROP': "license.IwoCustomerOp",
'LICENSE.IWOLICENSECOUNT': "license.IwoLicenseCount",
'LICENSE.LICENSEINFO': "license.LicenseInfo",
'LICENSE.LICENSERESERVATIONOP': "license.LicenseReservationOp",
'LICENSE.SMARTLICENSETOKEN': "license.SmartlicenseToken",
'LS.SERVICEPROFILE': "ls.ServiceProfile",
'MACPOOL.IDBLOCK': "macpool.IdBlock",
'MACPOOL.LEASE': "macpool.Lease",
'MACPOOL.POOL': "macpool.Pool",
'MACPOOL.POOLMEMBER': "macpool.PoolMember",
'MACPOOL.UNIVERSE': "macpool.Universe",
'MANAGEMENT.CONTROLLER': "management.Controller",
'MANAGEMENT.ENTITY': "management.Entity",
'MANAGEMENT.INTERFACE': "management.Interface",
'MEMORY.ARRAY': "memory.Array",
'MEMORY.PERSISTENTMEMORYCONFIGRESULT': "memory.PersistentMemoryConfigResult",
'MEMORY.PERSISTENTMEMORYCONFIGURATION': "memory.PersistentMemoryConfiguration",
'MEMORY.PERSISTENTMEMORYNAMESPACE': "memory.PersistentMemoryNamespace",
'MEMORY.PERSISTENTMEMORYNAMESPACECONFIGRESULT': "memory.PersistentMemoryNamespaceConfigResult",
'MEMORY.PERSISTENTMEMORYPOLICY': "memory.PersistentMemoryPolicy",
'MEMORY.PERSISTENTMEMORYREGION': "memory.PersistentMemoryRegion",
'MEMORY.PERSISTENTMEMORYUNIT': "memory.PersistentMemoryUnit",
'MEMORY.UNIT': "memory.Unit",
'META.DEFINITION': "meta.Definition",
'NETWORK.ELEMENT': "network.Element",
'NETWORK.ELEMENTSUMMARY': "network.ElementSummary",
'NETWORK.FCZONEINFO': "network.FcZoneInfo",
'NETWORK.VLANPORTINFO': "network.VlanPortInfo",
'NETWORKCONFIG.POLICY': "networkconfig.Policy",
'NIAAPI.APICCCOPOST': "niaapi.ApicCcoPost",
'NIAAPI.APICFIELDNOTICE': "niaapi.ApicFieldNotice",
'NIAAPI.APICHWEOL': "niaapi.ApicHweol",
'NIAAPI.APICLATESTMAINTAINEDRELEASE': "niaapi.ApicLatestMaintainedRelease",
'NIAAPI.APICRELEASERECOMMEND': "niaapi.ApicReleaseRecommend",
'NIAAPI.APICSWEOL': "niaapi.ApicSweol",
'NIAAPI.DCNMCCOPOST': "niaapi.DcnmCcoPost",
'NIAAPI.DCNMFIELDNOTICE': "niaapi.DcnmFieldNotice",
'NIAAPI.DCNMHWEOL': "niaapi.DcnmHweol",
'NIAAPI.DCNMLATESTMAINTAINEDRELEASE': "niaapi.DcnmLatestMaintainedRelease",
'NIAAPI.DCNMRELEASERECOMMEND': "niaapi.DcnmReleaseRecommend",
'NIAAPI.DCNMSWEOL': "niaapi.DcnmSweol",
'NIAAPI.FILEDOWNLOADER': "niaapi.FileDownloader",
'NIAAPI.NIAMETADATA': "niaapi.NiaMetadata",
'NIAAPI.NIBFILEDOWNLOADER': "niaapi.NibFileDownloader",
'NIAAPI.NIBMETADATA': "niaapi.NibMetadata",
'NIAAPI.VERSIONREGEX': "niaapi.VersionRegex",
'NIATELEMETRY.AAALDAPPROVIDERDETAILS': "niatelemetry.AaaLdapProviderDetails",
'NIATELEMETRY.AAARADIUSPROVIDERDETAILS': "niatelemetry.AaaRadiusProviderDetails",
'NIATELEMETRY.AAATACACSPROVIDERDETAILS': "niatelemetry.AaaTacacsProviderDetails",
'NIATELEMETRY.APICAPPPLUGINDETAILS': "niatelemetry.ApicAppPluginDetails",
'NIATELEMETRY.APICCOREFILEDETAILS': "niatelemetry.ApicCoreFileDetails",
'NIATELEMETRY.APICDBGEXPRSEXPORTDEST': "niatelemetry.ApicDbgexpRsExportDest",
'NIATELEMETRY.APICDBGEXPRSTSSCHEDULER': "niatelemetry.ApicDbgexpRsTsScheduler",
'NIATELEMETRY.APICFANDETAILS': "niatelemetry.ApicFanDetails",
'NIATELEMETRY.APICFEXDETAILS': "niatelemetry.ApicFexDetails",
'NIATELEMETRY.APICFLASHDETAILS': "niatelemetry.ApicFlashDetails",
'NIATELEMETRY.APICNTPAUTH': "niatelemetry.ApicNtpAuth",
'NIATELEMETRY.APICPSUDETAILS': "niatelemetry.ApicPsuDetails",
'NIATELEMETRY.APICREALMDETAILS': "niatelemetry.ApicRealmDetails",
'NIATELEMETRY.APICSNMPCLIENTGRPDETAILS': "niatelemetry.ApicSnmpClientGrpDetails",
'NIATELEMETRY.APICSNMPCOMMUNITYACCESSDETAILS': "niatelemetry.ApicSnmpCommunityAccessDetails",
'NIATELEMETRY.APICSNMPCOMMUNITYDETAILS': "niatelemetry.ApicSnmpCommunityDetails",
'NIATELEMETRY.APICSNMPTRAPDETAILS': "niatelemetry.ApicSnmpTrapDetails",
'NIATELEMETRY.APICSNMPTRAPFWDSERVERDETAILS': "niatelemetry.ApicSnmpTrapFwdServerDetails",
'NIATELEMETRY.APICSNMPVERSIONTHREEDETAILS': "niatelemetry.ApicSnmpVersionThreeDetails",
'NIATELEMETRY.APICSYSLOGGRP': "niatelemetry.ApicSysLogGrp",
'NIATELEMETRY.APICSYSLOGSRC': "niatelemetry.ApicSysLogSrc",
'NIATELEMETRY.APICTRANSCEIVERDETAILS': "niatelemetry.ApicTransceiverDetails",
'NIATELEMETRY.APICUIPAGECOUNTS': "niatelemetry.ApicUiPageCounts",
'NIATELEMETRY.APPDETAILS': "niatelemetry.AppDetails",
'NIATELEMETRY.COMMONPOLICIES': "niatelemetry.CommonPolicies",
'NIATELEMETRY.DCNMFANDETAILS': "niatelemetry.DcnmFanDetails",
'NIATELEMETRY.DCNMFEXDETAILS': "niatelemetry.DcnmFexDetails",
'NIATELEMETRY.DCNMMODULEDETAILS': "niatelemetry.DcnmModuleDetails",
'NIATELEMETRY.DCNMPSUDETAILS': "niatelemetry.DcnmPsuDetails",
'NIATELEMETRY.DCNMTRANSCEIVERDETAILS': "niatelemetry.DcnmTransceiverDetails",
'NIATELEMETRY.EPG': "niatelemetry.Epg",
'NIATELEMETRY.FABRICMODULEDETAILS': "niatelemetry.FabricModuleDetails",
'NIATELEMETRY.FABRICPODPROFILE': "niatelemetry.FabricPodProfile",
'NIATELEMETRY.FABRICPODSS': "niatelemetry.FabricPodSs",
'NIATELEMETRY.FAULT': "niatelemetry.Fault",
'NIATELEMETRY.HTTPSACLCONTRACTDETAILS': "niatelemetry.HttpsAclContractDetails",
'NIATELEMETRY.HTTPSACLCONTRACTFILTERMAP': "niatelemetry.HttpsAclContractFilterMap",
'NIATELEMETRY.HTTPSACLEPGCONTRACTMAP': "niatelemetry.HttpsAclEpgContractMap",
'NIATELEMETRY.HTTPSACLEPGDETAILS': "niatelemetry.HttpsAclEpgDetails",
'NIATELEMETRY.HTTPSACLFILTERDETAILS': "niatelemetry.HttpsAclFilterDetails",
'NIATELEMETRY.LC': "niatelemetry.Lc",
'NIATELEMETRY.MSOCONTRACTDETAILS': "niatelemetry.MsoContractDetails",
'NIATELEMETRY.MSOEPGDETAILS': "niatelemetry.MsoEpgDetails",
'NIATELEMETRY.MSOSCHEMADETAILS': "niatelemetry.MsoSchemaDetails",
'NIATELEMETRY.MSOSITEDETAILS': "niatelemetry.MsoSiteDetails",
'NIATELEMETRY.MSOTENANTDETAILS': "niatelemetry.MsoTenantDetails",
'NIATELEMETRY.NEXUSDASHBOARDCONTROLLERDETAILS': "niatelemetry.NexusDashboardControllerDetails",
'NIATELEMETRY.NEXUSDASHBOARDDETAILS': "niatelemetry.NexusDashboardDetails",
'NIATELEMETRY.NEXUSDASHBOARDMEMORYDETAILS': "niatelemetry.NexusDashboardMemoryDetails",
'NIATELEMETRY.NEXUSDASHBOARDS': "niatelemetry.NexusDashboards",
'NIATELEMETRY.NIAFEATUREUSAGE': "niatelemetry.NiaFeatureUsage",
'NIATELEMETRY.NIAINVENTORY': "niatelemetry.NiaInventory",
'NIATELEMETRY.NIAINVENTORYDCNM': "niatelemetry.NiaInventoryDcnm",
'NIATELEMETRY.NIAINVENTORYFABRIC': "niatelemetry.NiaInventoryFabric",
'NIATELEMETRY.NIALICENSESTATE': "niatelemetry.NiaLicenseState",
'NIATELEMETRY.PASSWORDSTRENGTHCHECK': "niatelemetry.PasswordStrengthCheck",
'NIATELEMETRY.PODCOMMPOLICIES': "niatelemetry.PodCommPolicies",
'NIATELEMETRY.PODSNMPPOLICIES': "niatelemetry.PodSnmpPolicies",
'NIATELEMETRY.PODTIMESERVERPOLICIES': "niatelemetry.PodTimeServerPolicies",
'NIATELEMETRY.SITEINVENTORY': "niatelemetry.SiteInventory",
'NIATELEMETRY.SNMPSRC': "niatelemetry.SnmpSrc",
'NIATELEMETRY.SSHVERSIONTWO': "niatelemetry.SshVersionTwo",
'NIATELEMETRY.SUPERVISORMODULEDETAILS': "niatelemetry.SupervisorModuleDetails",
'NIATELEMETRY.SYSLOGREMOTEDEST': "niatelemetry.SyslogRemoteDest",
'NIATELEMETRY.SYSLOGSYSMSG': "niatelemetry.SyslogSysMsg",
'NIATELEMETRY.SYSLOGSYSMSGFACFILTER': "niatelemetry.SyslogSysMsgFacFilter",
'NIATELEMETRY.SYSTEMCONTROLLERDETAILS': "niatelemetry.SystemControllerDetails",
'NIATELEMETRY.TENANT': "niatelemetry.Tenant",
'NOTIFICATION.ACCOUNTSUBSCRIPTION': "notification.AccountSubscription",
'NTP.POLICY': "ntp.Policy",
'OAUTH.ACCESSTOKEN': "oauth.AccessToken",
'OAUTH.AUTHORIZATION': "oauth.Authorization",
'OPRS.DEPLOYMENT': "oprs.Deployment",
'OPRS.SYNCTARGETLISTMESSAGE': "oprs.SyncTargetListMessage",
'ORGANIZATION.ORGANIZATION': "organization.Organization",
'OS.BULKINSTALLINFO': "os.BulkInstallInfo",
'OS.CATALOG': "os.Catalog",
'OS.CONFIGURATIONFILE': "os.ConfigurationFile",
'OS.DISTRIBUTION': "os.Distribution",
'OS.INSTALL': "os.Install",
'OS.OSSUPPORT': "os.OsSupport",
'OS.SUPPORTEDVERSION': "os.SupportedVersion",
'OS.TEMPLATEFILE': "os.TemplateFile",
'OS.VALIDINSTALLTARGET': "os.ValidInstallTarget",
'PCI.COPROCESSORCARD': "pci.CoprocessorCard",
'PCI.DEVICE': "pci.Device",
'PCI.LINK': "pci.Link",
'PCI.SWITCH': "pci.Switch",
'PORT.GROUP': "port.Group",
'PORT.MACBINDING': "port.MacBinding",
'PORT.SUBGROUP': "port.SubGroup",
'POWER.CONTROLSTATE': "power.ControlState",
'POWER.POLICY': "power.Policy",
'PROCESSOR.UNIT': "processor.Unit",
'RACK.UNITPERSONALITY': "rack.UnitPersonality",
'RECOMMENDATION.CAPACITYRUNWAY': "recommendation.CapacityRunway",
'RECOMMENDATION.PHYSICALITEM': "recommendation.PhysicalItem",
'RECOVERY.BACKUPCONFIGPOLICY': "recovery.BackupConfigPolicy",
'RECOVERY.BACKUPPROFILE': "recovery.BackupProfile",
'RECOVERY.CONFIGRESULT': "recovery.ConfigResult",
'RECOVERY.CONFIGRESULTENTRY': "recovery.ConfigResultEntry",
'RECOVERY.ONDEMANDBACKUP': "recovery.OnDemandBackup",
'RECOVERY.RESTORE': "recovery.Restore",
'RECOVERY.SCHEDULECONFIGPOLICY': "recovery.ScheduleConfigPolicy",
'RESOURCE.GROUP': "resource.Group",
'RESOURCE.GROUPMEMBER': "resource.GroupMember",
'RESOURCE.LICENSERESOURCECOUNT': "resource.LicenseResourceCount",
'RESOURCE.MEMBERSHIP': "resource.Membership",
'RESOURCE.MEMBERSHIPHOLDER': "resource.MembershipHolder",
'RESOURCE.RESERVATION': "resource.Reservation",
'RESOURCEPOOL.LEASE': "resourcepool.Lease",
'RESOURCEPOOL.LEASERESOURCE': "resourcepool.LeaseResource",
'RESOURCEPOOL.POOL': "resourcepool.Pool",
'RESOURCEPOOL.POOLMEMBER': "resourcepool.PoolMember",
'RESOURCEPOOL.UNIVERSE': "resourcepool.Universe",
'RPROXY.REVERSEPROXY': "rproxy.ReverseProxy",
'SDCARD.POLICY': "sdcard.Policy",
'SDWAN.PROFILE': "sdwan.Profile",
'SDWAN.ROUTERNODE': "sdwan.RouterNode",
'SDWAN.ROUTERPOLICY': "sdwan.RouterPolicy",
'SDWAN.VMANAGEACCOUNTPOLICY': "sdwan.VmanageAccountPolicy",
'SEARCH.SEARCHITEM': "search.SearchItem",
'SEARCH.TAGITEM': "search.TagItem",
'SECURITY.UNIT': "security.Unit",
'SERVER.CONFIGCHANGEDETAIL': "server.ConfigChangeDetail",
'SERVER.CONFIGIMPORT': "server.ConfigImport",
'SERVER.CONFIGRESULT': "server.ConfigResult",
'SERVER.CONFIGRESULTENTRY': "server.ConfigResultEntry",
'SERVER.PROFILE': "server.Profile",
'SERVER.PROFILETEMPLATE': "server.ProfileTemplate",
'SMTP.POLICY': "smtp.Policy",
'SNMP.POLICY': "snmp.Policy",
'SOFTWARE.APPLIANCEDISTRIBUTABLE': "software.ApplianceDistributable",
'SOFTWARE.DOWNLOADHISTORY': "software.DownloadHistory",
'SOFTWARE.HCLMETA': "software.HclMeta",
'SOFTWARE.HYPERFLEXBUNDLEDISTRIBUTABLE': "software.HyperflexBundleDistributable",
'SOFTWARE.HYPERFLEXDISTRIBUTABLE': "software.HyperflexDistributable",
'SOFTWARE.RELEASEMETA': "software.ReleaseMeta",
'SOFTWARE.SOLUTIONDISTRIBUTABLE': "software.SolutionDistributable",
'SOFTWARE.UCSDBUNDLEDISTRIBUTABLE': "software.UcsdBundleDistributable",
'SOFTWARE.UCSDDISTRIBUTABLE': "software.UcsdDistributable",
'SOFTWAREREPOSITORY.AUTHORIZATION': "softwarerepository.Authorization",
'SOFTWAREREPOSITORY.CACHEDIMAGE': "softwarerepository.CachedImage",
'SOFTWAREREPOSITORY.CATALOG': "softwarerepository.Catalog",
'SOFTWAREREPOSITORY.CATEGORYMAPPER': "softwarerepository.CategoryMapper",
'SOFTWAREREPOSITORY.CATEGORYMAPPERMODEL': "softwarerepository.CategoryMapperModel",
'SOFTWAREREPOSITORY.CATEGORYSUPPORTCONSTRAINT': "softwarerepository.CategorySupportConstraint",
'SOFTWAREREPOSITORY.DOWNLOADSPEC': "softwarerepository.DownloadSpec",
'SOFTWAREREPOSITORY.OPERATINGSYSTEMFILE': "softwarerepository.OperatingSystemFile",
'SOFTWAREREPOSITORY.RELEASE': "softwarerepository.Release",
'SOL.POLICY': "sol.Policy",
'SSH.POLICY': "ssh.Policy",
'STORAGE.CONTROLLER': "storage.Controller",
'STORAGE.DISKGROUP': "storage.DiskGroup",
'STORAGE.DISKSLOT': "storage.DiskSlot",
'STORAGE.DRIVEGROUP': "storage.DriveGroup",
'STORAGE.ENCLOSURE': "storage.Enclosure",
'STORAGE.ENCLOSUREDISK': "storage.EnclosureDisk",
'STORAGE.ENCLOSUREDISKSLOTEP': "storage.EnclosureDiskSlotEp",
'STORAGE.FLEXFLASHCONTROLLER': "storage.FlexFlashController",
'STORAGE.FLEXFLASHCONTROLLERPROPS': "storage.FlexFlashControllerProps",
'STORAGE.FLEXFLASHPHYSICALDRIVE': "storage.FlexFlashPhysicalDrive",
'STORAGE.FLEXFLASHVIRTUALDRIVE': "storage.FlexFlashVirtualDrive",
'STORAGE.FLEXUTILCONTROLLER': "storage.FlexUtilController",
'STORAGE.FLEXUTILPHYSICALDRIVE': "storage.FlexUtilPhysicalDrive",
'STORAGE.FLEXUTILVIRTUALDRIVE': "storage.FlexUtilVirtualDrive",
'STORAGE.HITACHIARRAY': "storage.HitachiArray",
'STORAGE.HITACHICONTROLLER': "storage.HitachiController",
'STORAGE.HITACHIDISK': "storage.HitachiDisk",
'STORAGE.HITACHIHOST': "storage.HitachiHost",
'STORAGE.HITACHIHOSTLUN': "storage.HitachiHostLun",
'STORAGE.HITACHIPARITYGROUP': "storage.HitachiParityGroup",
'STORAGE.HITACHIPOOL': "storage.HitachiPool",
'STORAGE.HITACHIPORT': "storage.HitachiPort",
'STORAGE.HITACHIVOLUME': "storage.HitachiVolume",
'STORAGE.HYPERFLEXSTORAGECONTAINER': "storage.HyperFlexStorageContainer",
'STORAGE.HYPERFLEXVOLUME': "storage.HyperFlexVolume",
'STORAGE.ITEM': "storage.Item",
'STORAGE.NETAPPAGGREGATE': "storage.NetAppAggregate",
'STORAGE.NETAPPBASEDISK': "storage.NetAppBaseDisk",
'STORAGE.NETAPPCLUSTER': "storage.NetAppCluster",
'STORAGE.NETAPPETHERNETPORT': "storage.NetAppEthernetPort",
'STORAGE.NETAPPEXPORTPOLICY': "storage.NetAppExportPolicy",
'STORAGE.NETAPPFCINTERFACE': "storage.NetAppFcInterface",
'STORAGE.NETAPPFCPORT': "storage.NetAppFcPort",
'STORAGE.NETAPPINITIATORGROUP': "storage.NetAppInitiatorGroup",
'STORAGE.NETAPPIPINTERFACE': "storage.NetAppIpInterface",
'STORAGE.NETAPPLICENSE': "storage.NetAppLicense",
'STORAGE.NETAPPLUN': "storage.NetAppLun",
'STORAGE.NETAPPLUNMAP': "storage.NetAppLunMap",
'STORAGE.NETAPPNODE': "storage.NetAppNode",
'STORAGE.NETAPPNTPSERVER': "storage.NetAppNtpServer",
'STORAGE.NETAPPSENSOR': "storage.NetAppSensor",
'STORAGE.NETAPPSTORAGEVM': "storage.NetAppStorageVm",
'STORAGE.NETAPPVOLUME': "storage.NetAppVolume",
'STORAGE.NETAPPVOLUMESNAPSHOT': "storage.NetAppVolumeSnapshot",
'STORAGE.PHYSICALDISK': "storage.PhysicalDisk",
'STORAGE.PHYSICALDISKEXTENSION': "storage.PhysicalDiskExtension",
'STORAGE.PHYSICALDISKUSAGE': "storage.PhysicalDiskUsage",
'STORAGE.PUREARRAY': "storage.PureArray",
'STORAGE.PURECONTROLLER': "storage.PureController",
'STORAGE.PUREDISK': "storage.PureDisk",
'STORAGE.PUREHOST': "storage.PureHost",
'STORAGE.PUREHOSTGROUP': "storage.PureHostGroup",
'STORAGE.PUREHOSTLUN': "storage.PureHostLun",
'STORAGE.PUREPORT': "storage.PurePort",
'STORAGE.PUREPROTECTIONGROUP': "storage.PureProtectionGroup",
'STORAGE.PUREPROTECTIONGROUPSNAPSHOT': "storage.PureProtectionGroupSnapshot",
'STORAGE.PUREREPLICATIONSCHEDULE': "storage.PureReplicationSchedule",
'STORAGE.PURESNAPSHOTSCHEDULE': "storage.PureSnapshotSchedule",
'STORAGE.PUREVOLUME': "storage.PureVolume",
'STORAGE.PUREVOLUMESNAPSHOT': "storage.PureVolumeSnapshot",
'STORAGE.SASEXPANDER': "storage.SasExpander",
'STORAGE.SASPORT': "storage.SasPort",
'STORAGE.SPAN': "storage.Span",
'STORAGE.STORAGEPOLICY': "storage.StoragePolicy",
'STORAGE.VDMEMBEREP': "storage.VdMemberEp",
'STORAGE.VIRTUALDRIVE': "storage.VirtualDrive",
'STORAGE.VIRTUALDRIVECONTAINER': "storage.VirtualDriveContainer",
'STORAGE.VIRTUALDRIVEEXTENSION': "storage.VirtualDriveExtension",
'STORAGE.VIRTUALDRIVEIDENTITY': "storage.VirtualDriveIdentity",
'SYSLOG.POLICY': "syslog.Policy",
'TAM.ADVISORYCOUNT': "tam.AdvisoryCount",
'TAM.ADVISORYDEFINITION': "tam.AdvisoryDefinition",
'TAM.ADVISORYINFO': "tam.AdvisoryInfo",
'TAM.ADVISORYINSTANCE': "tam.AdvisoryInstance",
'TAM.SECURITYADVISORY': "tam.SecurityAdvisory",
'TASK.HITACHISCOPEDINVENTORY': "task.HitachiScopedInventory",
'TASK.HYPERFLEXSCOPEDINVENTORY': "task.HyperflexScopedInventory",
'TASK.IWESCOPEDINVENTORY': "task.IweScopedInventory",
'TASK.NETAPPSCOPEDINVENTORY': "task.NetAppScopedInventory",
'TASK.PUBLICCLOUDSCOPEDINVENTORY': "task.PublicCloudScopedInventory",
'TASK.PURESCOPEDINVENTORY': "task.PureScopedInventory",
'TASK.SERVERSCOPEDINVENTORY': "task.ServerScopedInventory",
'TECHSUPPORTMANAGEMENT.COLLECTIONCONTROLPOLICY': "techsupportmanagement.CollectionControlPolicy",
'TECHSUPPORTMANAGEMENT.DOWNLOAD': "techsupportmanagement.Download",
'TECHSUPPORTMANAGEMENT.TECHSUPPORTBUNDLE': "techsupportmanagement.TechSupportBundle",
'TECHSUPPORTMANAGEMENT.TECHSUPPORTSTATUS': "techsupportmanagement.TechSupportStatus",
'TERMINAL.AUDITLOG': "terminal.AuditLog",
'TERRAFORM.EXECUTOR': "terraform.Executor",
'THERMAL.POLICY': "thermal.Policy",
'TOP.SYSTEM': "top.System",
'UCSD.BACKUPINFO': "ucsd.BackupInfo",
'UUIDPOOL.BLOCK': "uuidpool.Block",
'UUIDPOOL.POOL': "uuidpool.Pool",
'UUIDPOOL.POOLMEMBER': "uuidpool.PoolMember",
'UUIDPOOL.UNIVERSE': "uuidpool.Universe",
'UUIDPOOL.UUIDLEASE': "uuidpool.UuidLease",
'VIRTUALIZATION.CISCOHYPERVISORMANAGER': "virtualization.CiscoHypervisorManager",
'VIRTUALIZATION.ESXICONSOLE': "virtualization.EsxiConsole",
'VIRTUALIZATION.HOST': "virtualization.Host",
'VIRTUALIZATION.IWECLUSTER': "virtualization.IweCluster",
'VIRTUALIZATION.IWEDATACENTER': "virtualization.IweDatacenter",
'VIRTUALIZATION.IWEDVUPLINK': "virtualization.IweDvUplink",
'VIRTUALIZATION.IWEDVSWITCH': "virtualization.IweDvswitch",
'VIRTUALIZATION.IWEHOST': "virtualization.IweHost",
'VIRTUALIZATION.IWEHOSTINTERFACE': "virtualization.IweHostInterface",
'VIRTUALIZATION.IWEHOSTVSWITCH': "virtualization.IweHostVswitch",
'VIRTUALIZATION.IWENETWORK': "virtualization.IweNetwork",
'VIRTUALIZATION.IWEVIRTUALDISK': "virtualization.IweVirtualDisk",
'VIRTUALIZATION.IWEVIRTUALMACHINE': "virtualization.IweVirtualMachine",
'VIRTUALIZATION.IWEVIRTUALMACHINENETWORKINTERFACE': "virtualization.IweVirtualMachineNetworkInterface",
'VIRTUALIZATION.VIRTUALDISK': "virtualization.VirtualDisk",
'VIRTUALIZATION.VIRTUALMACHINE': "virtualization.VirtualMachine",
'VIRTUALIZATION.VIRTUALNETWORK': "virtualization.VirtualNetwork",
'VIRTUALIZATION.VMWARECLUSTER': "virtualization.VmwareCluster",
'VIRTUALIZATION.VMWAREDATACENTER': "virtualization.VmwareDatacenter",
'VIRTUALIZATION.VMWAREDATASTORE': "virtualization.VmwareDatastore",
'VIRTUALIZATION.VMWAREDATASTORECLUSTER': "virtualization.VmwareDatastoreCluster",
'VIRTUALIZATION.VMWAREDISTRIBUTEDNETWORK': "virtualization.VmwareDistributedNetwork",
'VIRTUALIZATION.VMWAREDISTRIBUTEDSWITCH': "virtualization.VmwareDistributedSwitch",
'VIRTUALIZATION.VMWAREFOLDER': "virtualization.VmwareFolder",
'VIRTUALIZATION.VMWAREHOST': "virtualization.VmwareHost",
'VIRTUALIZATION.VMWAREKERNELNETWORK': "virtualization.VmwareKernelNetwork",
'VIRTUALIZATION.VMWARENETWORK': "virtualization.VmwareNetwork",
'VIRTUALIZATION.VMWAREPHYSICALNETWORKINTERFACE': "virtualization.VmwarePhysicalNetworkInterface",
'VIRTUALIZATION.VMWAREUPLINKPORT': "virtualization.VmwareUplinkPort",
'VIRTUALIZATION.VMWAREVCENTER': "virtualization.VmwareVcenter",
'VIRTUALIZATION.VMWAREVIRTUALDISK': "virtualization.VmwareVirtualDisk",
'VIRTUALIZATION.VMWAREVIRTUALMACHINE': "virtualization.VmwareVirtualMachine",
'VIRTUALIZATION.VMWAREVIRTUALMACHINESNAPSHOT': "virtualization.VmwareVirtualMachineSnapshot",
'VIRTUALIZATION.VMWAREVIRTUALNETWORKINTERFACE': "virtualization.VmwareVirtualNetworkInterface",
'VIRTUALIZATION.VMWAREVIRTUALSWITCH': "virtualization.VmwareVirtualSwitch",
'VMEDIA.POLICY': "vmedia.Policy",
'VMRC.CONSOLE': "vmrc.Console",
'VNC.CONSOLE': "vnc.Console",
'VNIC.ETHADAPTERPOLICY': "vnic.EthAdapterPolicy",
'VNIC.ETHIF': "vnic.EthIf",
'VNIC.ETHNETWORKPOLICY': "vnic.EthNetworkPolicy",
'VNIC.ETHQOSPOLICY': "vnic.EthQosPolicy",
'VNIC.FCADAPTERPOLICY': "vnic.FcAdapterPolicy",
'VNIC.FCIF': "vnic.FcIf",
'VNIC.FCNETWORKPOLICY': "vnic.FcNetworkPolicy",
'VNIC.FCQOSPOLICY': "vnic.FcQosPolicy",
'VNIC.ISCSIADAPTERPOLICY': "vnic.IscsiAdapterPolicy",
'VNIC.ISCSIBOOTPOLICY': "vnic.IscsiBootPolicy",
'VNIC.ISCSISTATICTARGETPOLICY': "vnic.IscsiStaticTargetPolicy",
'VNIC.LANCONNECTIVITYPOLICY': "vnic.LanConnectivityPolicy",
'VNIC.LCPSTATUS': "vnic.LcpStatus",
'VNIC.SANCONNECTIVITYPOLICY': "vnic.SanConnectivityPolicy",
'VNIC.SCPSTATUS': "vnic.ScpStatus",
'VRF.VRF': "vrf.Vrf",
'WORKFLOW.ANSIBLEBATCHEXECUTOR': "workflow.AnsibleBatchExecutor",
'WORKFLOW.BATCHAPIEXECUTOR': "workflow.BatchApiExecutor",
'WORKFLOW.BUILDTASKMETA': "workflow.BuildTaskMeta",
'WORKFLOW.BUILDTASKMETAOWNER': "workflow.BuildTaskMetaOwner",
'WORKFLOW.CATALOG': "workflow.Catalog",
'WORKFLOW.CUSTOMDATATYPEDEFINITION': "workflow.CustomDataTypeDefinition",
'WORKFLOW.ERRORRESPONSEHANDLER': "workflow.ErrorResponseHandler",
'WORKFLOW.PENDINGDYNAMICWORKFLOWINFO': "workflow.PendingDynamicWorkflowInfo",
'WORKFLOW.ROLLBACKWORKFLOW': "workflow.RollbackWorkflow",
'WORKFLOW.SOLUTIONACTIONDEFINITION': "workflow.SolutionActionDefinition",
'WORKFLOW.SOLUTIONACTIONINSTANCE': "workflow.SolutionActionInstance",
'WORKFLOW.SOLUTIONDEFINITION': "workflow.SolutionDefinition",
'WORKFLOW.SOLUTIONINSTANCE': "workflow.SolutionInstance",
'WORKFLOW.SOLUTIONOUTPUT': "workflow.SolutionOutput",
'WORKFLOW.SSHBATCHEXECUTOR': "workflow.SshBatchExecutor",
'WORKFLOW.TASKDEBUGLOG': "workflow.TaskDebugLog",
'WORKFLOW.TASKDEFINITION': "workflow.TaskDefinition",
'WORKFLOW.TASKINFO': "workflow.TaskInfo",
'WORKFLOW.TASKMETADATA': "workflow.TaskMetadata",
'WORKFLOW.TASKNOTIFICATION': "workflow.TaskNotification",
'WORKFLOW.TEMPLATEEVALUATION': "workflow.TemplateEvaluation",
'WORKFLOW.TEMPLATEFUNCTIONMETA': "workflow.TemplateFunctionMeta",
'WORKFLOW.WORKFLOWDEFINITION': "workflow.WorkflowDefinition",
'WORKFLOW.WORKFLOWINFO': "workflow.WorkflowInfo",
'WORKFLOW.WORKFLOWMETA': "workflow.WorkflowMeta",
'WORKFLOW.WORKFLOWMETADATA': "workflow.WorkflowMetadata",
'WORKFLOW.WORKFLOWNOTIFICATION': "workflow.WorkflowNotification",
},
}
validations = {
('uuid',): {
'regex': {
'pattern': r'^$|^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$',
},
},
}
@cached_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,)
_nullable = False
@cached_property
def openapi_types():
lazy_import()
return {
'class_id': (str,),
'moid': (str,),
'selector': (str,),
'link': (str,),
'account_moid': (str,),
'create_time': (datetime,),
'domain_group_moid': (str,),
'mod_time': (datetime,),
'owners': ([str], none_type,),
'shared_scope': (str,),
'tags': ([MoTag], none_type,),
'version_context': (MoVersionContext,),
'ancestors': ([MoBaseMoRelationship], none_type,),
'parent': (MoBaseMoRelationship,),
'permission_resources': ([MoBaseMoRelationship], none_type,),
'display_names': (DisplayNames,),
'registered_device': (AssetDeviceRegistrationRelationship,),
'name': (str,),
'uuid': (str,),
'identity': (str,),
'cluster_count': (int,),
'datastore_count': (int,),
'host_count': (int,),
'inventory_path': (str,),
'network_count': (int,),
'vm_count': (int,),
'vm_template_count': (int,),
'hypervisor_manager': (VirtualizationVmwareVcenterRelationship,),
'parent_folder': (VirtualizationVmwareFolderRelationship,),
'object_type': (str,),
}
@cached_property
def discriminator():
lazy_import()
val = {
'mo.MoRef': MoMoRef,
'virtualization.VmwareDatacenter': VirtualizationVmwareDatacenter,
}
if not val:
return None
return {'class_id': val}
attribute_map = {
'class_id': 'ClassId',
'moid': 'Moid',
'selector': 'Selector',
'link': 'link',
'account_moid': 'AccountMoid',
'create_time': 'CreateTime',
'domain_group_moid': 'DomainGroupMoid',
'mod_time': 'ModTime',
'owners': 'Owners',
'shared_scope': 'SharedScope',
'tags': 'Tags',
'version_context': 'VersionContext',
'ancestors': 'Ancestors',
'parent': 'Parent',
'permission_resources': 'PermissionResources',
'display_names': 'DisplayNames',
'registered_device': 'RegisteredDevice',
'name': 'Name',
'uuid': 'Uuid',
'identity': 'Identity',
'cluster_count': 'ClusterCount',
'datastore_count': 'DatastoreCount',
'host_count': 'HostCount',
'inventory_path': 'InventoryPath',
'network_count': 'NetworkCount',
'vm_count': 'VmCount',
'vm_template_count': 'VmTemplateCount',
'hypervisor_manager': 'HypervisorManager',
'parent_folder': 'ParentFolder',
'object_type': 'ObjectType',
}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
'_composed_instances',
'_var_name_to_model_instances',
'_additional_properties_model_instances',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
class_id = kwargs.get('class_id', "mo.MoRef")
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
constant_args = {
'_check_type': _check_type,
'_path_to_item': _path_to_item,
'_spec_property_naming': _spec_property_naming,
'_configuration': _configuration,
'_visited_composed_classes': self._visited_composed_classes,
}
required_args = {
'class_id': class_id,
}
model_args = {}
model_args.update(required_args)
model_args.update(kwargs)
composed_info = validate_get_composed_info(
constant_args, model_args, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
unused_args = composed_info[3]
for var_name, var_value in required_args.items():
setattr(self, var_name, var_value)
for var_name, var_value in kwargs.items():
if var_name in unused_args and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
not self._additional_properties_model_instances:
continue
setattr(self, var_name, var_value)
@cached_property
def _composed_schemas():
# loading
lazy_import()
return {
'anyOf': [
],
'allOf': [
],
'oneOf': [
MoMoRef,
VirtualizationVmwareDatacenter,
none_type,
],
}
| true | true |
1c455fa5c65ef452e853463256b07757be1b7bac | 41,048 | py | Python | lib/matplotlib/lines.py | SoftwareDev/mat-plot-lib | abaf94859d5ef6e653a4d8a7ce2c59cea1724a57 | [
"MIT",
"BSD-3-Clause"
] | 3 | 2015-02-25T21:51:26.000Z | 2020-01-05T14:11:52.000Z | lib/matplotlib/lines.py | SoftwareDev/mat-plot-lib | abaf94859d5ef6e653a4d8a7ce2c59cea1724a57 | [
"MIT",
"BSD-3-Clause"
] | 7 | 2015-05-08T19:36:25.000Z | 2015-06-30T15:32:17.000Z | lib/matplotlib/lines.py | OceanWolf/matplotlib | a429c415bdb6e54ccfe004a48fdc034ea8e9d329 | [
"MIT",
"BSD-3-Clause"
] | 6 | 2015-06-05T03:34:06.000Z | 2022-01-25T09:07:10.000Z | """
This module contains all the 2D line class which can draw with a
variety of line styles, markers and colors.
"""
# TODO: expose cap and join style attrs
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import warnings
import numpy as np
from numpy import ma
from matplotlib import verbose
from . import artist
from .artist import Artist
from .cbook import iterable, is_string_like, is_numlike, ls_mapper
from .colors import colorConverter
from .path import Path
from .transforms import Bbox, TransformedPath, IdentityTransform
from matplotlib import rcParams
from .artist import allow_rasterization
from matplotlib import docstring
from matplotlib.markers import MarkerStyle
# Imported here for backward compatibility, even though they don't
# really belong.
from matplotlib.markers import TICKLEFT, TICKRIGHT, TICKUP, TICKDOWN
from matplotlib.markers import CARETLEFT, CARETRIGHT, CARETUP, CARETDOWN
def segment_hits(cx, cy, x, y, radius):
"""
Determine if any line segments are within radius of a
point. Returns the list of line segments that are within that
radius.
"""
# Process single points specially
if len(x) < 2:
res, = np.nonzero((cx - x) ** 2 + (cy - y) ** 2 <= radius ** 2)
return res
# We need to lop the last element off a lot.
xr, yr = x[:-1], y[:-1]
# Only look at line segments whose nearest point to C on the line
# lies within the segment.
dx, dy = x[1:] - xr, y[1:] - yr
Lnorm_sq = dx ** 2 + dy ** 2 # Possibly want to eliminate Lnorm==0
u = ((cx - xr) * dx + (cy - yr) * dy) / Lnorm_sq
candidates = (u >= 0) & (u <= 1)
#if any(candidates): print "candidates",xr[candidates]
# Note that there is a little area near one side of each point
# which will be near neither segment, and another which will
# be near both, depending on the angle of the lines. The
# following radius test eliminates these ambiguities.
point_hits = (cx - x) ** 2 + (cy - y) ** 2 <= radius ** 2
#if any(point_hits): print "points",xr[candidates]
candidates = candidates & ~(point_hits[:-1] | point_hits[1:])
# For those candidates which remain, determine how far they lie away
# from the line.
px, py = xr + u * dx, yr + u * dy
line_hits = (cx - px) ** 2 + (cy - py) ** 2 <= radius ** 2
#if any(line_hits): print "lines",xr[candidates]
line_hits = line_hits & candidates
points, = point_hits.ravel().nonzero()
lines, = line_hits.ravel().nonzero()
#print points,lines
return np.concatenate((points, lines))
class Line2D(Artist):
"""
A line - the line can have both a solid linestyle connecting all
the vertices, and a marker at each vertex. Additionally, the
drawing of the solid line is influenced by the drawstyle, eg one
can create "stepped" lines in various styles.
"""
lineStyles = _lineStyles = { # hidden names deprecated
'-': '_draw_solid',
'--': '_draw_dashed',
'-.': '_draw_dash_dot',
':': '_draw_dotted',
'None': '_draw_nothing',
' ': '_draw_nothing',
'': '_draw_nothing',
}
_drawStyles_l = {
'default': '_draw_lines',
'steps-mid': '_draw_steps_mid',
'steps-pre': '_draw_steps_pre',
'steps-post': '_draw_steps_post',
}
_drawStyles_s = {
'steps': '_draw_steps_pre',
}
drawStyles = {}
drawStyles.update(_drawStyles_l)
drawStyles.update(_drawStyles_s)
# Need a list ordered with long names first:
drawStyleKeys = (list(six.iterkeys(_drawStyles_l)) +
list(six.iterkeys(_drawStyles_s)))
# Referenced here to maintain API. These are defined in
# MarkerStyle
markers = MarkerStyle.markers
filled_markers = MarkerStyle.filled_markers
fillStyles = MarkerStyle.fillstyles
zorder = 2
validCap = ('butt', 'round', 'projecting')
validJoin = ('miter', 'round', 'bevel')
def __str__(self):
if self._label != "":
return "Line2D(%s)" % (self._label)
elif hasattr(self, '_x') and len(self._x) > 3:
return "Line2D((%g,%g),(%g,%g),...,(%g,%g))"\
% (self._x[0], self._y[0], self._x[0],
self._y[0], self._x[-1], self._y[-1])
elif hasattr(self, '_x'):
return "Line2D(%s)"\
% (",".join(["(%g,%g)" % (x, y) for x, y
in zip(self._x, self._y)]))
else:
return "Line2D()"
def __init__(self, xdata, ydata,
linewidth=None, # all Nones default to rc
linestyle=None,
color=None,
marker=None,
markersize=None,
markeredgewidth=None,
markeredgecolor=None,
markerfacecolor=None,
markerfacecoloralt='none',
fillstyle='full',
antialiased=None,
dash_capstyle=None,
solid_capstyle=None,
dash_joinstyle=None,
solid_joinstyle=None,
pickradius=5,
drawstyle=None,
markevery=None,
**kwargs
):
"""
Create a :class:`~matplotlib.lines.Line2D` instance with *x*
and *y* data in sequences *xdata*, *ydata*.
The kwargs are :class:`~matplotlib.lines.Line2D` properties:
%(Line2D)s
See :meth:`set_linestyle` for a decription of the line styles,
:meth:`set_marker` for a description of the markers, and
:meth:`set_drawstyle` for a description of the draw styles.
"""
Artist.__init__(self)
#convert sequences to numpy arrays
if not iterable(xdata):
raise RuntimeError('xdata must be a sequence')
if not iterable(ydata):
raise RuntimeError('ydata must be a sequence')
if linewidth is None:
linewidth = rcParams['lines.linewidth']
if linestyle is None:
linestyle = rcParams['lines.linestyle']
if marker is None:
marker = rcParams['lines.marker']
if color is None:
color = rcParams['lines.color']
if markersize is None:
markersize = rcParams['lines.markersize']
if antialiased is None:
antialiased = rcParams['lines.antialiased']
if dash_capstyle is None:
dash_capstyle = rcParams['lines.dash_capstyle']
if dash_joinstyle is None:
dash_joinstyle = rcParams['lines.dash_joinstyle']
if solid_capstyle is None:
solid_capstyle = rcParams['lines.solid_capstyle']
if solid_joinstyle is None:
solid_joinstyle = rcParams['lines.solid_joinstyle']
if drawstyle is None:
drawstyle = 'default'
self.set_dash_capstyle(dash_capstyle)
self.set_dash_joinstyle(dash_joinstyle)
self.set_solid_capstyle(solid_capstyle)
self.set_solid_joinstyle(solid_joinstyle)
self.set_linestyle(linestyle)
self.set_drawstyle(drawstyle)
self.set_linewidth(linewidth)
self.set_color(color)
self._marker = MarkerStyle()
self.set_marker(marker)
self.set_markevery(markevery)
self.set_antialiased(antialiased)
self.set_markersize(markersize)
self._dashSeq = None
self.set_markerfacecolor(markerfacecolor)
self.set_markerfacecoloralt(markerfacecoloralt)
self.set_markeredgecolor(markeredgecolor)
self.set_markeredgewidth(markeredgewidth)
self.set_fillstyle(fillstyle)
self.verticalOffset = None
# update kwargs before updating data to give the caller a
# chance to init axes (and hence unit support)
self.update(kwargs)
self.pickradius = pickradius
self.ind_offset = 0
if is_numlike(self._picker):
self.pickradius = self._picker
self._xorig = np.asarray([])
self._yorig = np.asarray([])
self._invalidx = True
self._invalidy = True
self.set_data(xdata, ydata)
def contains(self, mouseevent):
"""
Test whether the mouse event occurred on the line. The pick
radius determines the precision of the location test (usually
within five points of the value). Use
:meth:`~matplotlib.lines.Line2D.get_pickradius` or
:meth:`~matplotlib.lines.Line2D.set_pickradius` to view or
modify it.
Returns *True* if any values are within the radius along with
``{'ind': pointlist}``, where *pointlist* is the set of points
within the radius.
TODO: sort returned indices by distance
"""
if six.callable(self._contains):
return self._contains(self, mouseevent)
if not is_numlike(self.pickradius):
raise ValueError("pick radius should be a distance")
# Make sure we have data to plot
if self._invalidy or self._invalidx:
self.recache()
if len(self._xy) == 0:
return False, {}
# Convert points to pixels
transformed_path = self._get_transformed_path()
path, affine = transformed_path.get_transformed_path_and_affine()
path = affine.transform_path(path)
xy = path.vertices
xt = xy[:, 0]
yt = xy[:, 1]
# Convert pick radius from points to pixels
if self.figure is None:
warnings.warn('no figure set when check if mouse is on line')
pixels = self.pickradius
else:
pixels = self.figure.dpi / 72. * self.pickradius
# the math involved in checking for containment (here and inside of
# segment_hits) assumes that it is OK to overflow. In case the
# application has set the error flags such that an exception is raised
# on overflow, we temporarily set the appropriate error flags here and
# set them back when we are finished.
olderrflags = np.seterr(all='ignore')
try:
# Check for collision
if self._linestyle in ['None', None]:
# If no line, return the nearby point(s)
d = (xt - mouseevent.x) ** 2 + (yt - mouseevent.y) ** 2
ind, = np.nonzero(np.less_equal(d, pixels ** 2))
else:
# If line, return the nearby segment(s)
ind = segment_hits(mouseevent.x, mouseevent.y, xt, yt, pixels)
finally:
np.seterr(**olderrflags)
ind += self.ind_offset
# Debugging message
if False and self._label != '':
print("Checking line", self._label,
"at", mouseevent.x, mouseevent.y)
print('xt', xt)
print('yt', yt)
#print 'dx,dy', (xt-mouseevent.x)**2., (yt-mouseevent.y)**2.
print('ind', ind)
# Return the point(s) within radius
return len(ind) > 0, dict(ind=ind)
def get_pickradius(self):
"""return the pick radius used for containment tests"""
return self.pickradius
def set_pickradius(self, d):
"""Sets the pick radius used for containment tests
ACCEPTS: float distance in points
"""
self.pickradius = d
def get_fillstyle(self):
"""
return the marker fillstyle
"""
return self._marker.get_fillstyle()
def set_fillstyle(self, fs):
"""
Set the marker fill style; 'full' means fill the whole marker.
'none' means no filling; other options are for half-filled markers.
ACCEPTS: ['full' | 'left' | 'right' | 'bottom' | 'top' | 'none']
"""
self._marker.set_fillstyle(fs)
def set_markevery(self, every):
"""
Set the markevery property to subsample the plot when using
markers. e.g., if ``markevery=5``, every 5-th marker will be
plotted. *every* can be
None
Every point will be plotted
an integer N
Every N-th marker will be plotted starting with marker 0
A length-2 tuple of integers
every=(start, N) will start at point start and plot every N-th
marker
ACCEPTS: None | integer | (startind, stride)
"""
self._markevery = every
def get_markevery(self):
"""return the markevery setting"""
return self._markevery
def set_picker(self, p):
"""Sets the event picker details for the line.
ACCEPTS: float distance in points or callable pick function
``fn(artist, event)``
"""
if six.callable(p):
self._contains = p
else:
self.pickradius = p
self._picker = p
def get_window_extent(self, renderer):
bbox = Bbox([[0, 0], [0, 0]])
trans_data_to_xy = self.get_transform().transform
bbox.update_from_data_xy(trans_data_to_xy(self.get_xydata()),
ignore=True)
# correct for marker size, if any
if self._marker:
ms = (self._markersize / 72.0 * self.figure.dpi) * 0.5
bbox = bbox.padded(ms)
return bbox
def set_axes(self, ax):
Artist.set_axes(self, ax)
if ax.xaxis is not None:
self._xcid = ax.xaxis.callbacks.connect('units',
self.recache_always)
if ax.yaxis is not None:
self._ycid = ax.yaxis.callbacks.connect('units',
self.recache_always)
set_axes.__doc__ = Artist.set_axes.__doc__
def set_data(self, *args):
"""
Set the x and y data
ACCEPTS: 2D array (rows are x, y) or two 1D arrays
"""
if len(args) == 1:
x, y = args[0]
else:
x, y = args
self.set_xdata(x)
self.set_ydata(y)
def recache_always(self):
self.recache(always=True)
def recache(self, always=False):
if always or self._invalidx:
xconv = self.convert_xunits(self._xorig)
if ma.isMaskedArray(self._xorig):
x = ma.asarray(xconv, np.float_)
else:
x = np.asarray(xconv, np.float_)
x = x.ravel()
else:
x = self._x
if always or self._invalidy:
yconv = self.convert_yunits(self._yorig)
if ma.isMaskedArray(self._yorig):
y = ma.asarray(yconv, np.float_)
else:
y = np.asarray(yconv, np.float_)
y = y.ravel()
else:
y = self._y
if len(x) == 1 and len(y) > 1:
x = x * np.ones(y.shape, np.float_)
if len(y) == 1 and len(x) > 1:
y = y * np.ones(x.shape, np.float_)
if len(x) != len(y):
raise RuntimeError('xdata and ydata must be the same length')
x = x.reshape((len(x), 1))
y = y.reshape((len(y), 1))
if ma.isMaskedArray(x) or ma.isMaskedArray(y):
self._xy = ma.concatenate((x, y), 1)
else:
self._xy = np.concatenate((x, y), 1)
self._x = self._xy[:, 0] # just a view
self._y = self._xy[:, 1] # just a view
self._subslice = False
if (self.axes and len(x) > 100 and self._is_sorted(x) and
self.axes.name == 'rectilinear' and
self.axes.get_xscale() == 'linear' and
self._markevery is None and
self.get_clip_on() is True):
self._subslice = True
if hasattr(self, '_path'):
interpolation_steps = self._path._interpolation_steps
else:
interpolation_steps = 1
self._path = Path(self._xy, None, interpolation_steps)
self._transformed_path = None
self._invalidx = False
self._invalidy = False
def _transform_path(self, subslice=None):
"""
Puts a TransformedPath instance at self._transformed_path,
all invalidation of the transform is then handled by the
TransformedPath instance.
"""
# Masked arrays are now handled by the Path class itself
if subslice is not None:
_path = Path(self._xy[subslice, :])
else:
_path = self._path
self._transformed_path = TransformedPath(_path, self.get_transform())
def _get_transformed_path(self):
"""
Return the :class:`~matplotlib.transforms.TransformedPath` instance
of this line.
"""
if self._transformed_path is None:
self._transform_path()
return self._transformed_path
def set_transform(self, t):
"""
set the Transformation instance used by this artist
ACCEPTS: a :class:`matplotlib.transforms.Transform` instance
"""
Artist.set_transform(self, t)
self._invalidx = True
self._invalidy = True
def _is_sorted(self, x):
"""return true if x is sorted"""
if len(x) < 2:
return 1
return np.amin(x[1:] - x[0:-1]) >= 0
@allow_rasterization
def draw(self, renderer):
"""draw the Line with `renderer` unless visibility is False"""
if not self.get_visible():
return
if self._invalidy or self._invalidx:
self.recache()
self.ind_offset = 0 # Needed for contains() method.
if self._subslice and self.axes:
# Need to handle monotonically decreasing case also...
x0, x1 = self.axes.get_xbound()
i0, = self._x.searchsorted([x0], 'left')
i1, = self._x.searchsorted([x1], 'right')
subslice = slice(max(i0 - 1, 0), i1 + 1)
self.ind_offset = subslice.start
self._transform_path(subslice)
transf_path = self._get_transformed_path()
if self.get_path_effects():
from matplotlib.patheffects import PathEffectRenderer
renderer = PathEffectRenderer(self.get_path_effects(), renderer)
renderer.open_group('line2d', self.get_gid())
gc = renderer.new_gc()
self._set_gc_clip(gc)
ln_color_rgba = self._get_rgba_ln_color()
gc.set_foreground(ln_color_rgba, isRGBA=True)
gc.set_alpha(ln_color_rgba[3])
gc.set_antialiased(self._antialiased)
gc.set_linewidth(self._linewidth)
if self.is_dashed():
cap = self._dashcapstyle
join = self._dashjoinstyle
else:
cap = self._solidcapstyle
join = self._solidjoinstyle
gc.set_joinstyle(join)
gc.set_capstyle(cap)
gc.set_snap(self.get_snap())
if self.get_sketch_params() is not None:
gc.set_sketch_params(*self.get_sketch_params())
funcname = self._lineStyles.get(self._linestyle, '_draw_nothing')
if funcname != '_draw_nothing':
tpath, affine = transf_path.get_transformed_path_and_affine()
if len(tpath.vertices):
self._lineFunc = getattr(self, funcname)
funcname = self.drawStyles.get(self._drawstyle, '_draw_lines')
drawFunc = getattr(self, funcname)
drawFunc(renderer, gc, tpath, affine.frozen())
if self._marker:
gc = renderer.new_gc()
self._set_gc_clip(gc)
rgbaFace = self._get_rgba_face()
rgbaFaceAlt = self._get_rgba_face(alt=True)
edgecolor = self.get_markeredgecolor()
if is_string_like(edgecolor) and edgecolor.lower() == 'none':
gc.set_linewidth(0)
gc.set_foreground(rgbaFace, isRGBA=True)
else:
gc.set_foreground(edgecolor)
gc.set_linewidth(self._markeredgewidth)
marker = self._marker
tpath, affine = transf_path.get_transformed_points_and_affine()
if len(tpath.vertices):
# subsample the markers if markevery is not None
markevery = self.get_markevery()
if markevery is not None:
if iterable(markevery):
startind, stride = markevery
else:
startind, stride = 0, markevery
if tpath.codes is not None:
codes = tpath.codes[startind::stride]
else:
codes = None
vertices = tpath.vertices[startind::stride]
subsampled = Path(vertices, codes)
else:
subsampled = tpath
snap = marker.get_snap_threshold()
if type(snap) == float:
snap = renderer.points_to_pixels(self._markersize) >= snap
gc.set_snap(snap)
gc.set_joinstyle(marker.get_joinstyle())
gc.set_capstyle(marker.get_capstyle())
marker_path = marker.get_path()
marker_trans = marker.get_transform()
w = renderer.points_to_pixels(self._markersize)
if marker.get_marker() != ',':
# Don't scale for pixels, and don't stroke them
marker_trans = marker_trans.scale(w)
else:
gc.set_linewidth(0)
if rgbaFace is not None:
gc.set_alpha(rgbaFace[3])
renderer.draw_markers(gc, marker_path, marker_trans,
subsampled, affine.frozen(),
rgbaFace)
alt_marker_path = marker.get_alt_path()
if alt_marker_path:
if rgbaFaceAlt is not None:
gc.set_alpha(rgbaFaceAlt[3])
alt_marker_trans = marker.get_alt_transform()
alt_marker_trans = alt_marker_trans.scale(w)
renderer.draw_markers(
gc, alt_marker_path, alt_marker_trans, subsampled,
affine.frozen(), rgbaFaceAlt)
gc.restore()
gc.restore()
renderer.close_group('line2d')
def get_antialiased(self):
return self._antialiased
def get_color(self):
return self._color
def get_drawstyle(self):
return self._drawstyle
def get_linestyle(self):
return self._linestyle
def get_linewidth(self):
return self._linewidth
def get_marker(self):
return self._marker.get_marker()
def get_markeredgecolor(self):
mec = self._markeredgecolor
if (is_string_like(mec) and mec == 'auto'):
if self._marker.get_marker() in ('.', ','):
return self._color
if self._marker.is_filled() and self.get_fillstyle() != 'none':
return 'k' # Bad hard-wired default...
else:
return self._color
else:
return mec
def get_markeredgewidth(self):
return self._markeredgewidth
def _get_markerfacecolor(self, alt=False):
if alt:
fc = self._markerfacecoloralt
else:
fc = self._markerfacecolor
if (is_string_like(fc) and fc.lower() == 'auto'):
if self.get_fillstyle() == 'none':
return 'none'
else:
return self._color
else:
return fc
def get_markerfacecolor(self):
return self._get_markerfacecolor(alt=False)
def get_markerfacecoloralt(self):
return self._get_markerfacecolor(alt=True)
def get_markersize(self):
return self._markersize
def get_data(self, orig=True):
"""
Return the xdata, ydata.
If *orig* is *True*, return the original data.
"""
return self.get_xdata(orig=orig), self.get_ydata(orig=orig)
def get_xdata(self, orig=True):
"""
Return the xdata.
If *orig* is *True*, return the original data, else the
processed data.
"""
if orig:
return self._xorig
if self._invalidx:
self.recache()
return self._x
def get_ydata(self, orig=True):
"""
Return the ydata.
If *orig* is *True*, return the original data, else the
processed data.
"""
if orig:
return self._yorig
if self._invalidy:
self.recache()
return self._y
def get_path(self):
"""
Return the :class:`~matplotlib.path.Path` object associated
with this line.
"""
if self._invalidy or self._invalidx:
self.recache()
return self._path
def get_xydata(self):
"""
Return the *xy* data as a Nx2 numpy array.
"""
if self._invalidy or self._invalidx:
self.recache()
return self._xy
def set_antialiased(self, b):
"""
True if line should be drawin with antialiased rendering
ACCEPTS: [True | False]
"""
self._antialiased = b
def set_color(self, color):
"""
Set the color of the line
ACCEPTS: any matplotlib color
"""
self._color = color
def set_drawstyle(self, drawstyle):
"""
Set the drawstyle of the plot
'default' connects the points with lines. The steps variants
produce step-plots. 'steps' is equivalent to 'steps-pre' and
is maintained for backward-compatibility.
ACCEPTS: ['default' | 'steps' | 'steps-pre' | 'steps-mid' |
'steps-post']
"""
self._drawstyle = drawstyle
def set_linewidth(self, w):
"""
Set the line width in points
ACCEPTS: float value in points
"""
self._linewidth = w
def set_linestyle(self, linestyle):
"""
Set the linestyle of the line (also accepts drawstyles)
================ =================
linestyle description
================ =================
``'-'`` solid
``'--'`` dashed
``'-.'`` dash_dot
``':'`` dotted
``'None'`` draw nothing
``' '`` draw nothing
``''`` draw nothing
================ =================
'steps' is equivalent to 'steps-pre' and is maintained for
backward-compatibility.
.. seealso::
:meth:`set_drawstyle`
To set the drawing style (stepping) of the plot.
ACCEPTS: [``'-'`` | ``'--'`` | ``'-.'`` | ``':'`` | ``'None'`` |
``' '`` | ``''``]
and any drawstyle in combination with a linestyle, e.g., ``'steps--'``.
"""
for ds in self.drawStyleKeys: # long names are first in the list
if linestyle.startswith(ds):
self.set_drawstyle(ds)
if len(linestyle) > len(ds):
linestyle = linestyle[len(ds):]
else:
linestyle = '-'
break
if linestyle not in self._lineStyles:
if linestyle in ls_mapper:
linestyle = ls_mapper[linestyle]
else:
verbose.report('Unrecognized line style %s, %s' %
(linestyle, type(linestyle)))
if linestyle in [' ', '']:
linestyle = 'None'
self._linestyle = linestyle
@docstring.dedent_interpd
def set_marker(self, marker):
"""
Set the line marker
Parameters
-----------
marker: marker style
See `~matplotlib.markers` for full description of possible
argument
"""
self._marker.set_marker(marker)
def set_markeredgecolor(self, ec):
"""
Set the marker edge color
ACCEPTS: any matplotlib color
"""
if ec is None:
ec = 'auto'
self._markeredgecolor = ec
def set_markeredgewidth(self, ew):
"""
Set the marker edge width in points
ACCEPTS: float value in points
"""
if ew is None:
ew = rcParams['lines.markeredgewidth']
self._markeredgewidth = ew
def set_markerfacecolor(self, fc):
"""
Set the marker face color.
ACCEPTS: any matplotlib color
"""
if fc is None:
fc = 'auto'
self._markerfacecolor = fc
def set_markerfacecoloralt(self, fc):
"""
Set the alternate marker face color.
ACCEPTS: any matplotlib color
"""
if fc is None:
fc = 'auto'
self._markerfacecoloralt = fc
def set_markersize(self, sz):
"""
Set the marker size in points
ACCEPTS: float
"""
self._markersize = sz
def set_xdata(self, x):
"""
Set the data np.array for x
ACCEPTS: 1D array
"""
self._xorig = x
self._invalidx = True
def set_ydata(self, y):
"""
Set the data np.array for y
ACCEPTS: 1D array
"""
self._yorig = y
self._invalidy = True
def set_dashes(self, seq):
"""
Set the dash sequence, sequence of dashes with on off ink in
points. If seq is empty or if seq = (None, None), the
linestyle will be set to solid.
ACCEPTS: sequence of on/off ink in points
"""
if seq == (None, None) or len(seq) == 0:
self.set_linestyle('-')
else:
self.set_linestyle('--')
self._dashSeq = seq # TODO: offset ignored for now
def _draw_lines(self, renderer, gc, path, trans):
self._lineFunc(renderer, gc, path, trans)
def _draw_steps_pre(self, renderer, gc, path, trans):
vertices = self._xy
steps = ma.zeros((2 * len(vertices) - 1, 2), np.float_)
steps[0::2, 0], steps[1::2, 0] = vertices[:, 0], vertices[:-1, 0]
steps[0::2, 1], steps[1:-1:2, 1] = vertices[:, 1], vertices[1:, 1]
path = Path(steps)
path = path.transformed(self.get_transform())
self._lineFunc(renderer, gc, path, IdentityTransform())
def _draw_steps_post(self, renderer, gc, path, trans):
vertices = self._xy
steps = ma.zeros((2 * len(vertices) - 1, 2), np.float_)
steps[::2, 0], steps[1:-1:2, 0] = vertices[:, 0], vertices[1:, 0]
steps[0::2, 1], steps[1::2, 1] = vertices[:, 1], vertices[:-1, 1]
path = Path(steps)
path = path.transformed(self.get_transform())
self._lineFunc(renderer, gc, path, IdentityTransform())
def _draw_steps_mid(self, renderer, gc, path, trans):
vertices = self._xy
steps = ma.zeros((2 * len(vertices), 2), np.float_)
steps[1:-1:2, 0] = 0.5 * (vertices[:-1, 0] + vertices[1:, 0])
steps[2::2, 0] = 0.5 * (vertices[:-1, 0] + vertices[1:, 0])
steps[0, 0] = vertices[0, 0]
steps[-1, 0] = vertices[-1, 0]
steps[0::2, 1], steps[1::2, 1] = vertices[:, 1], vertices[:, 1]
path = Path(steps)
path = path.transformed(self.get_transform())
self._lineFunc(renderer, gc, path, IdentityTransform())
def _draw_solid(self, renderer, gc, path, trans):
gc.set_linestyle('solid')
renderer.draw_path(gc, path, trans)
def _draw_dashed(self, renderer, gc, path, trans):
gc.set_linestyle('dashed')
if self._dashSeq is not None:
gc.set_dashes(0, self._dashSeq)
renderer.draw_path(gc, path, trans)
def _draw_dash_dot(self, renderer, gc, path, trans):
gc.set_linestyle('dashdot')
renderer.draw_path(gc, path, trans)
def _draw_dotted(self, renderer, gc, path, trans):
gc.set_linestyle('dotted')
renderer.draw_path(gc, path, trans)
def update_from(self, other):
"""copy properties from other to self"""
Artist.update_from(self, other)
self._linestyle = other._linestyle
self._linewidth = other._linewidth
self._color = other._color
self._markersize = other._markersize
self._markerfacecolor = other._markerfacecolor
self._markerfacecoloralt = other._markerfacecoloralt
self._markeredgecolor = other._markeredgecolor
self._markeredgewidth = other._markeredgewidth
self._dashSeq = other._dashSeq
self._dashcapstyle = other._dashcapstyle
self._dashjoinstyle = other._dashjoinstyle
self._solidcapstyle = other._solidcapstyle
self._solidjoinstyle = other._solidjoinstyle
self._linestyle = other._linestyle
self._marker = MarkerStyle(other._marker.get_marker(),
other._marker.get_fillstyle())
self._drawstyle = other._drawstyle
def _get_rgb_face(self, alt=False):
facecolor = self._get_markerfacecolor(alt=alt)
if is_string_like(facecolor) and facecolor.lower() == 'none':
rgbFace = None
else:
rgbFace = colorConverter.to_rgb(facecolor)
return rgbFace
def _get_rgba_face(self, alt=False):
facecolor = self._get_markerfacecolor(alt=alt)
if is_string_like(facecolor) and facecolor.lower() == 'none':
rgbaFace = None
else:
rgbaFace = colorConverter.to_rgba(facecolor, self._alpha)
return rgbaFace
def _get_rgba_ln_color(self, alt=False):
return colorConverter.to_rgba(self._color, self._alpha)
# some aliases....
def set_aa(self, val):
'alias for set_antialiased'
self.set_antialiased(val)
def set_c(self, val):
'alias for set_color'
self.set_color(val)
def set_ls(self, val):
"""alias for set_linestyle"""
self.set_linestyle(val)
def set_lw(self, val):
"""alias for set_linewidth"""
self.set_linewidth(val)
def set_mec(self, val):
"""alias for set_markeredgecolor"""
self.set_markeredgecolor(val)
def set_mew(self, val):
"""alias for set_markeredgewidth"""
self.set_markeredgewidth(val)
def set_mfc(self, val):
"""alias for set_markerfacecolor"""
self.set_markerfacecolor(val)
def set_mfcalt(self, val):
"""alias for set_markerfacecoloralt"""
self.set_markerfacecoloralt(val)
def set_ms(self, val):
"""alias for set_markersize"""
self.set_markersize(val)
def get_aa(self):
"""alias for get_antialiased"""
return self.get_antialiased()
def get_c(self):
"""alias for get_color"""
return self.get_color()
def get_ls(self):
"""alias for get_linestyle"""
return self.get_linestyle()
def get_lw(self):
"""alias for get_linewidth"""
return self.get_linewidth()
def get_mec(self):
"""alias for get_markeredgecolor"""
return self.get_markeredgecolor()
def get_mew(self):
"""alias for get_markeredgewidth"""
return self.get_markeredgewidth()
def get_mfc(self):
"""alias for get_markerfacecolor"""
return self.get_markerfacecolor()
def get_mfcalt(self, alt=False):
"""alias for get_markerfacecoloralt"""
return self.get_markerfacecoloralt()
def get_ms(self):
"""alias for get_markersize"""
return self.get_markersize()
def set_dash_joinstyle(self, s):
"""
Set the join style for dashed linestyles
ACCEPTS: ['miter' | 'round' | 'bevel']
"""
s = s.lower()
if s not in self.validJoin:
raise ValueError('set_dash_joinstyle passed "%s";\n' % (s,)
+ 'valid joinstyles are %s' % (self.validJoin,))
self._dashjoinstyle = s
def set_solid_joinstyle(self, s):
"""
Set the join style for solid linestyles
ACCEPTS: ['miter' | 'round' | 'bevel']
"""
s = s.lower()
if s not in self.validJoin:
raise ValueError('set_solid_joinstyle passed "%s";\n' % (s,)
+ 'valid joinstyles are %s' % (self.validJoin,))
self._solidjoinstyle = s
def get_dash_joinstyle(self):
"""
Get the join style for dashed linestyles
"""
return self._dashjoinstyle
def get_solid_joinstyle(self):
"""
Get the join style for solid linestyles
"""
return self._solidjoinstyle
def set_dash_capstyle(self, s):
"""
Set the cap style for dashed linestyles
ACCEPTS: ['butt' | 'round' | 'projecting']
"""
s = s.lower()
if s not in self.validCap:
raise ValueError('set_dash_capstyle passed "%s";\n' % (s,)
+ 'valid capstyles are %s' % (self.validCap,))
self._dashcapstyle = s
def set_solid_capstyle(self, s):
"""
Set the cap style for solid linestyles
ACCEPTS: ['butt' | 'round' | 'projecting']
"""
s = s.lower()
if s not in self.validCap:
raise ValueError('set_solid_capstyle passed "%s";\n' % (s,)
+ 'valid capstyles are %s' % (self.validCap,))
self._solidcapstyle = s
def get_dash_capstyle(self):
"""
Get the cap style for dashed linestyles
"""
return self._dashcapstyle
def get_solid_capstyle(self):
"""
Get the cap style for solid linestyles
"""
return self._solidcapstyle
def is_dashed(self):
'return True if line is dashstyle'
return self._linestyle in ('--', '-.', ':')
class VertexSelector:
"""
Manage the callbacks to maintain a list of selected vertices for
:class:`matplotlib.lines.Line2D`. Derived classes should override
:meth:`~matplotlib.lines.VertexSelector.process_selected` to do
something with the picks.
Here is an example which highlights the selected verts with red
circles::
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.lines as lines
class HighlightSelected(lines.VertexSelector):
def __init__(self, line, fmt='ro', **kwargs):
lines.VertexSelector.__init__(self, line)
self.markers, = self.axes.plot([], [], fmt, **kwargs)
def process_selected(self, ind, xs, ys):
self.markers.set_data(xs, ys)
self.canvas.draw()
fig = plt.figure()
ax = fig.add_subplot(111)
x, y = np.random.rand(2, 30)
line, = ax.plot(x, y, 'bs-', picker=5)
selector = HighlightSelected(line)
plt.show()
"""
def __init__(self, line):
"""
Initialize the class with a :class:`matplotlib.lines.Line2D`
instance. The line should already be added to some
:class:`matplotlib.axes.Axes` instance and should have the
picker property set.
"""
if not hasattr(line, 'axes'):
raise RuntimeError('You must first add the line to the Axes')
if line.get_picker() is None:
raise RuntimeError('You must first set the picker property '
'of the line')
self.axes = line.axes
self.line = line
self.canvas = self.axes.figure.canvas
self.cid = self.canvas.mpl_connect('pick_event', self.onpick)
self.ind = set()
def process_selected(self, ind, xs, ys):
"""
Default "do nothing" implementation of the
:meth:`process_selected` method.
*ind* are the indices of the selected vertices. *xs* and *ys*
are the coordinates of the selected vertices.
"""
pass
def onpick(self, event):
"""When the line is picked, update the set of selected indicies."""
if event.artist is not self.line:
return
for i in event.ind:
if i in self.ind:
self.ind.remove(i)
else:
self.ind.add(i)
ind = list(self.ind)
ind.sort()
xdata, ydata = self.line.get_data()
self.process_selected(ind, xdata[ind], ydata[ind])
lineStyles = Line2D._lineStyles
lineMarkers = MarkerStyle.markers
drawStyles = Line2D.drawStyles
fillStyles = MarkerStyle.fillstyles
docstring.interpd.update(Line2D=artist.kwdoc(Line2D))
# You can not set the docstring of an instancemethod,
# but you can on the underlying function. Go figure.
docstring.dedent_interpd(Line2D.__init__)
| 32.474684 | 79 | 0.568018 |
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import warnings
import numpy as np
from numpy import ma
from matplotlib import verbose
from . import artist
from .artist import Artist
from .cbook import iterable, is_string_like, is_numlike, ls_mapper
from .colors import colorConverter
from .path import Path
from .transforms import Bbox, TransformedPath, IdentityTransform
from matplotlib import rcParams
from .artist import allow_rasterization
from matplotlib import docstring
from matplotlib.markers import MarkerStyle
# really belong.
from matplotlib.markers import TICKLEFT, TICKRIGHT, TICKUP, TICKDOWN
from matplotlib.markers import CARETLEFT, CARETRIGHT, CARETUP, CARETDOWN
def segment_hits(cx, cy, x, y, radius):
# Process single points specially
if len(x) < 2:
res, = np.nonzero((cx - x) ** 2 + (cy - y) ** 2 <= radius ** 2)
return res
# We need to lop the last element off a lot.
xr, yr = x[:-1], y[:-1]
# Only look at line segments whose nearest point to C on the line
# lies within the segment.
dx, dy = x[1:] - xr, y[1:] - yr
Lnorm_sq = dx ** 2 + dy ** 2 # Possibly want to eliminate Lnorm==0
u = ((cx - xr) * dx + (cy - yr) * dy) / Lnorm_sq
candidates = (u >= 0) & (u <= 1)
#if any(candidates): print "candidates",xr[candidates]
# Note that there is a little area near one side of each point
# which will be near neither segment, and another which will
# be near both, depending on the angle of the lines. The
# following radius test eliminates these ambiguities.
point_hits = (cx - x) ** 2 + (cy - y) ** 2 <= radius ** 2
#if any(point_hits): print "points",xr[candidates]
candidates = candidates & ~(point_hits[:-1] | point_hits[1:])
# For those candidates which remain, determine how far they lie away
# from the line.
px, py = xr + u * dx, yr + u * dy
line_hits = (cx - px) ** 2 + (cy - py) ** 2 <= radius ** 2
#if any(line_hits): print "lines",xr[candidates]
line_hits = line_hits & candidates
points, = point_hits.ravel().nonzero()
lines, = line_hits.ravel().nonzero()
#print points,lines
return np.concatenate((points, lines))
class Line2D(Artist):
lineStyles = _lineStyles = { # hidden names deprecated
'-': '_draw_solid',
'--': '_draw_dashed',
'-.': '_draw_dash_dot',
':': '_draw_dotted',
'None': '_draw_nothing',
' ': '_draw_nothing',
'': '_draw_nothing',
}
_drawStyles_l = {
'default': '_draw_lines',
'steps-mid': '_draw_steps_mid',
'steps-pre': '_draw_steps_pre',
'steps-post': '_draw_steps_post',
}
_drawStyles_s = {
'steps': '_draw_steps_pre',
}
drawStyles = {}
drawStyles.update(_drawStyles_l)
drawStyles.update(_drawStyles_s)
# Need a list ordered with long names first:
drawStyleKeys = (list(six.iterkeys(_drawStyles_l)) +
list(six.iterkeys(_drawStyles_s)))
# Referenced here to maintain API. These are defined in
# MarkerStyle
markers = MarkerStyle.markers
filled_markers = MarkerStyle.filled_markers
fillStyles = MarkerStyle.fillstyles
zorder = 2
validCap = ('butt', 'round', 'projecting')
validJoin = ('miter', 'round', 'bevel')
def __str__(self):
if self._label != "":
return "Line2D(%s)" % (self._label)
elif hasattr(self, '_x') and len(self._x) > 3:
return "Line2D((%g,%g),(%g,%g),...,(%g,%g))"\
% (self._x[0], self._y[0], self._x[0],
self._y[0], self._x[-1], self._y[-1])
elif hasattr(self, '_x'):
return "Line2D(%s)"\
% (",".join(["(%g,%g)" % (x, y) for x, y
in zip(self._x, self._y)]))
else:
return "Line2D()"
def __init__(self, xdata, ydata,
linewidth=None, # all Nones default to rc
linestyle=None,
color=None,
marker=None,
markersize=None,
markeredgewidth=None,
markeredgecolor=None,
markerfacecolor=None,
markerfacecoloralt='none',
fillstyle='full',
antialiased=None,
dash_capstyle=None,
solid_capstyle=None,
dash_joinstyle=None,
solid_joinstyle=None,
pickradius=5,
drawstyle=None,
markevery=None,
**kwargs
):
Artist.__init__(self)
#convert sequences to numpy arrays
if not iterable(xdata):
raise RuntimeError('xdata must be a sequence')
if not iterable(ydata):
raise RuntimeError('ydata must be a sequence')
if linewidth is None:
linewidth = rcParams['lines.linewidth']
if linestyle is None:
linestyle = rcParams['lines.linestyle']
if marker is None:
marker = rcParams['lines.marker']
if color is None:
color = rcParams['lines.color']
if markersize is None:
markersize = rcParams['lines.markersize']
if antialiased is None:
antialiased = rcParams['lines.antialiased']
if dash_capstyle is None:
dash_capstyle = rcParams['lines.dash_capstyle']
if dash_joinstyle is None:
dash_joinstyle = rcParams['lines.dash_joinstyle']
if solid_capstyle is None:
solid_capstyle = rcParams['lines.solid_capstyle']
if solid_joinstyle is None:
solid_joinstyle = rcParams['lines.solid_joinstyle']
if drawstyle is None:
drawstyle = 'default'
self.set_dash_capstyle(dash_capstyle)
self.set_dash_joinstyle(dash_joinstyle)
self.set_solid_capstyle(solid_capstyle)
self.set_solid_joinstyle(solid_joinstyle)
self.set_linestyle(linestyle)
self.set_drawstyle(drawstyle)
self.set_linewidth(linewidth)
self.set_color(color)
self._marker = MarkerStyle()
self.set_marker(marker)
self.set_markevery(markevery)
self.set_antialiased(antialiased)
self.set_markersize(markersize)
self._dashSeq = None
self.set_markerfacecolor(markerfacecolor)
self.set_markerfacecoloralt(markerfacecoloralt)
self.set_markeredgecolor(markeredgecolor)
self.set_markeredgewidth(markeredgewidth)
self.set_fillstyle(fillstyle)
self.verticalOffset = None
# update kwargs before updating data to give the caller a
# chance to init axes (and hence unit support)
self.update(kwargs)
self.pickradius = pickradius
self.ind_offset = 0
if is_numlike(self._picker):
self.pickradius = self._picker
self._xorig = np.asarray([])
self._yorig = np.asarray([])
self._invalidx = True
self._invalidy = True
self.set_data(xdata, ydata)
def contains(self, mouseevent):
if six.callable(self._contains):
return self._contains(self, mouseevent)
if not is_numlike(self.pickradius):
raise ValueError("pick radius should be a distance")
# Make sure we have data to plot
if self._invalidy or self._invalidx:
self.recache()
if len(self._xy) == 0:
return False, {}
# Convert points to pixels
transformed_path = self._get_transformed_path()
path, affine = transformed_path.get_transformed_path_and_affine()
path = affine.transform_path(path)
xy = path.vertices
xt = xy[:, 0]
yt = xy[:, 1]
# Convert pick radius from points to pixels
if self.figure is None:
warnings.warn('no figure set when check if mouse is on line')
pixels = self.pickradius
else:
pixels = self.figure.dpi / 72. * self.pickradius
# the math involved in checking for containment (here and inside of
# segment_hits) assumes that it is OK to overflow. In case the
# application has set the error flags such that an exception is raised
# on overflow, we temporarily set the appropriate error flags here and
# set them back when we are finished.
olderrflags = np.seterr(all='ignore')
try:
# Check for collision
if self._linestyle in ['None', None]:
# If no line, return the nearby point(s)
d = (xt - mouseevent.x) ** 2 + (yt - mouseevent.y) ** 2
ind, = np.nonzero(np.less_equal(d, pixels ** 2))
else:
# If line, return the nearby segment(s)
ind = segment_hits(mouseevent.x, mouseevent.y, xt, yt, pixels)
finally:
np.seterr(**olderrflags)
ind += self.ind_offset
# Debugging message
if False and self._label != '':
print("Checking line", self._label,
"at", mouseevent.x, mouseevent.y)
print('xt', xt)
print('yt', yt)
#print 'dx,dy', (xt-mouseevent.x)**2., (yt-mouseevent.y)**2.
print('ind', ind)
# Return the point(s) within radius
return len(ind) > 0, dict(ind=ind)
def get_pickradius(self):
return self.pickradius
def set_pickradius(self, d):
self.pickradius = d
def get_fillstyle(self):
return self._marker.get_fillstyle()
def set_fillstyle(self, fs):
self._marker.set_fillstyle(fs)
def set_markevery(self, every):
self._markevery = every
def get_markevery(self):
return self._markevery
def set_picker(self, p):
if six.callable(p):
self._contains = p
else:
self.pickradius = p
self._picker = p
def get_window_extent(self, renderer):
bbox = Bbox([[0, 0], [0, 0]])
trans_data_to_xy = self.get_transform().transform
bbox.update_from_data_xy(trans_data_to_xy(self.get_xydata()),
ignore=True)
# correct for marker size, if any
if self._marker:
ms = (self._markersize / 72.0 * self.figure.dpi) * 0.5
bbox = bbox.padded(ms)
return bbox
def set_axes(self, ax):
Artist.set_axes(self, ax)
if ax.xaxis is not None:
self._xcid = ax.xaxis.callbacks.connect('units',
self.recache_always)
if ax.yaxis is not None:
self._ycid = ax.yaxis.callbacks.connect('units',
self.recache_always)
set_axes.__doc__ = Artist.set_axes.__doc__
def set_data(self, *args):
if len(args) == 1:
x, y = args[0]
else:
x, y = args
self.set_xdata(x)
self.set_ydata(y)
def recache_always(self):
self.recache(always=True)
def recache(self, always=False):
if always or self._invalidx:
xconv = self.convert_xunits(self._xorig)
if ma.isMaskedArray(self._xorig):
x = ma.asarray(xconv, np.float_)
else:
x = np.asarray(xconv, np.float_)
x = x.ravel()
else:
x = self._x
if always or self._invalidy:
yconv = self.convert_yunits(self._yorig)
if ma.isMaskedArray(self._yorig):
y = ma.asarray(yconv, np.float_)
else:
y = np.asarray(yconv, np.float_)
y = y.ravel()
else:
y = self._y
if len(x) == 1 and len(y) > 1:
x = x * np.ones(y.shape, np.float_)
if len(y) == 1 and len(x) > 1:
y = y * np.ones(x.shape, np.float_)
if len(x) != len(y):
raise RuntimeError('xdata and ydata must be the same length')
x = x.reshape((len(x), 1))
y = y.reshape((len(y), 1))
if ma.isMaskedArray(x) or ma.isMaskedArray(y):
self._xy = ma.concatenate((x, y), 1)
else:
self._xy = np.concatenate((x, y), 1)
self._x = self._xy[:, 0] # just a view
self._y = self._xy[:, 1] # just a view
self._subslice = False
if (self.axes and len(x) > 100 and self._is_sorted(x) and
self.axes.name == 'rectilinear' and
self.axes.get_xscale() == 'linear' and
self._markevery is None and
self.get_clip_on() is True):
self._subslice = True
if hasattr(self, '_path'):
interpolation_steps = self._path._interpolation_steps
else:
interpolation_steps = 1
self._path = Path(self._xy, None, interpolation_steps)
self._transformed_path = None
self._invalidx = False
self._invalidy = False
def _transform_path(self, subslice=None):
# Masked arrays are now handled by the Path class itself
if subslice is not None:
_path = Path(self._xy[subslice, :])
else:
_path = self._path
self._transformed_path = TransformedPath(_path, self.get_transform())
def _get_transformed_path(self):
if self._transformed_path is None:
self._transform_path()
return self._transformed_path
def set_transform(self, t):
Artist.set_transform(self, t)
self._invalidx = True
self._invalidy = True
def _is_sorted(self, x):
if len(x) < 2:
return 1
return np.amin(x[1:] - x[0:-1]) >= 0
@allow_rasterization
def draw(self, renderer):
if not self.get_visible():
return
if self._invalidy or self._invalidx:
self.recache()
self.ind_offset = 0 # Needed for contains() method.
if self._subslice and self.axes:
# Need to handle monotonically decreasing case also...
x0, x1 = self.axes.get_xbound()
i0, = self._x.searchsorted([x0], 'left')
i1, = self._x.searchsorted([x1], 'right')
subslice = slice(max(i0 - 1, 0), i1 + 1)
self.ind_offset = subslice.start
self._transform_path(subslice)
transf_path = self._get_transformed_path()
if self.get_path_effects():
from matplotlib.patheffects import PathEffectRenderer
renderer = PathEffectRenderer(self.get_path_effects(), renderer)
renderer.open_group('line2d', self.get_gid())
gc = renderer.new_gc()
self._set_gc_clip(gc)
ln_color_rgba = self._get_rgba_ln_color()
gc.set_foreground(ln_color_rgba, isRGBA=True)
gc.set_alpha(ln_color_rgba[3])
gc.set_antialiased(self._antialiased)
gc.set_linewidth(self._linewidth)
if self.is_dashed():
cap = self._dashcapstyle
join = self._dashjoinstyle
else:
cap = self._solidcapstyle
join = self._solidjoinstyle
gc.set_joinstyle(join)
gc.set_capstyle(cap)
gc.set_snap(self.get_snap())
if self.get_sketch_params() is not None:
gc.set_sketch_params(*self.get_sketch_params())
funcname = self._lineStyles.get(self._linestyle, '_draw_nothing')
if funcname != '_draw_nothing':
tpath, affine = transf_path.get_transformed_path_and_affine()
if len(tpath.vertices):
self._lineFunc = getattr(self, funcname)
funcname = self.drawStyles.get(self._drawstyle, '_draw_lines')
drawFunc = getattr(self, funcname)
drawFunc(renderer, gc, tpath, affine.frozen())
if self._marker:
gc = renderer.new_gc()
self._set_gc_clip(gc)
rgbaFace = self._get_rgba_face()
rgbaFaceAlt = self._get_rgba_face(alt=True)
edgecolor = self.get_markeredgecolor()
if is_string_like(edgecolor) and edgecolor.lower() == 'none':
gc.set_linewidth(0)
gc.set_foreground(rgbaFace, isRGBA=True)
else:
gc.set_foreground(edgecolor)
gc.set_linewidth(self._markeredgewidth)
marker = self._marker
tpath, affine = transf_path.get_transformed_points_and_affine()
if len(tpath.vertices):
# subsample the markers if markevery is not None
markevery = self.get_markevery()
if markevery is not None:
if iterable(markevery):
startind, stride = markevery
else:
startind, stride = 0, markevery
if tpath.codes is not None:
codes = tpath.codes[startind::stride]
else:
codes = None
vertices = tpath.vertices[startind::stride]
subsampled = Path(vertices, codes)
else:
subsampled = tpath
snap = marker.get_snap_threshold()
if type(snap) == float:
snap = renderer.points_to_pixels(self._markersize) >= snap
gc.set_snap(snap)
gc.set_joinstyle(marker.get_joinstyle())
gc.set_capstyle(marker.get_capstyle())
marker_path = marker.get_path()
marker_trans = marker.get_transform()
w = renderer.points_to_pixels(self._markersize)
if marker.get_marker() != ',':
# Don't scale for pixels, and don't stroke them
marker_trans = marker_trans.scale(w)
else:
gc.set_linewidth(0)
if rgbaFace is not None:
gc.set_alpha(rgbaFace[3])
renderer.draw_markers(gc, marker_path, marker_trans,
subsampled, affine.frozen(),
rgbaFace)
alt_marker_path = marker.get_alt_path()
if alt_marker_path:
if rgbaFaceAlt is not None:
gc.set_alpha(rgbaFaceAlt[3])
alt_marker_trans = marker.get_alt_transform()
alt_marker_trans = alt_marker_trans.scale(w)
renderer.draw_markers(
gc, alt_marker_path, alt_marker_trans, subsampled,
affine.frozen(), rgbaFaceAlt)
gc.restore()
gc.restore()
renderer.close_group('line2d')
def get_antialiased(self):
return self._antialiased
def get_color(self):
return self._color
def get_drawstyle(self):
return self._drawstyle
def get_linestyle(self):
return self._linestyle
def get_linewidth(self):
return self._linewidth
def get_marker(self):
return self._marker.get_marker()
def get_markeredgecolor(self):
mec = self._markeredgecolor
if (is_string_like(mec) and mec == 'auto'):
if self._marker.get_marker() in ('.', ','):
return self._color
if self._marker.is_filled() and self.get_fillstyle() != 'none':
return 'k' # Bad hard-wired default...
else:
return self._color
else:
return mec
def get_markeredgewidth(self):
return self._markeredgewidth
def _get_markerfacecolor(self, alt=False):
if alt:
fc = self._markerfacecoloralt
else:
fc = self._markerfacecolor
if (is_string_like(fc) and fc.lower() == 'auto'):
if self.get_fillstyle() == 'none':
return 'none'
else:
return self._color
else:
return fc
def get_markerfacecolor(self):
return self._get_markerfacecolor(alt=False)
def get_markerfacecoloralt(self):
return self._get_markerfacecolor(alt=True)
def get_markersize(self):
return self._markersize
def get_data(self, orig=True):
return self.get_xdata(orig=orig), self.get_ydata(orig=orig)
def get_xdata(self, orig=True):
if orig:
return self._xorig
if self._invalidx:
self.recache()
return self._x
def get_ydata(self, orig=True):
if orig:
return self._yorig
if self._invalidy:
self.recache()
return self._y
def get_path(self):
if self._invalidy or self._invalidx:
self.recache()
return self._path
def get_xydata(self):
if self._invalidy or self._invalidx:
self.recache()
return self._xy
def set_antialiased(self, b):
self._antialiased = b
def set_color(self, color):
self._color = color
def set_drawstyle(self, drawstyle):
self._drawstyle = drawstyle
def set_linewidth(self, w):
self._linewidth = w
def set_linestyle(self, linestyle):
for ds in self.drawStyleKeys: # long names are first in the list
if linestyle.startswith(ds):
self.set_drawstyle(ds)
if len(linestyle) > len(ds):
linestyle = linestyle[len(ds):]
else:
linestyle = '-'
break
if linestyle not in self._lineStyles:
if linestyle in ls_mapper:
linestyle = ls_mapper[linestyle]
else:
verbose.report('Unrecognized line style %s, %s' %
(linestyle, type(linestyle)))
if linestyle in [' ', '']:
linestyle = 'None'
self._linestyle = linestyle
@docstring.dedent_interpd
def set_marker(self, marker):
self._marker.set_marker(marker)
def set_markeredgecolor(self, ec):
if ec is None:
ec = 'auto'
self._markeredgecolor = ec
def set_markeredgewidth(self, ew):
if ew is None:
ew = rcParams['lines.markeredgewidth']
self._markeredgewidth = ew
def set_markerfacecolor(self, fc):
if fc is None:
fc = 'auto'
self._markerfacecolor = fc
def set_markerfacecoloralt(self, fc):
if fc is None:
fc = 'auto'
self._markerfacecoloralt = fc
def set_markersize(self, sz):
self._markersize = sz
def set_xdata(self, x):
self._xorig = x
self._invalidx = True
def set_ydata(self, y):
self._yorig = y
self._invalidy = True
def set_dashes(self, seq):
if seq == (None, None) or len(seq) == 0:
self.set_linestyle('-')
else:
self.set_linestyle('--')
self._dashSeq = seq # TODO: offset ignored for now
def _draw_lines(self, renderer, gc, path, trans):
self._lineFunc(renderer, gc, path, trans)
def _draw_steps_pre(self, renderer, gc, path, trans):
vertices = self._xy
steps = ma.zeros((2 * len(vertices) - 1, 2), np.float_)
steps[0::2, 0], steps[1::2, 0] = vertices[:, 0], vertices[:-1, 0]
steps[0::2, 1], steps[1:-1:2, 1] = vertices[:, 1], vertices[1:, 1]
path = Path(steps)
path = path.transformed(self.get_transform())
self._lineFunc(renderer, gc, path, IdentityTransform())
def _draw_steps_post(self, renderer, gc, path, trans):
vertices = self._xy
steps = ma.zeros((2 * len(vertices) - 1, 2), np.float_)
steps[::2, 0], steps[1:-1:2, 0] = vertices[:, 0], vertices[1:, 0]
steps[0::2, 1], steps[1::2, 1] = vertices[:, 1], vertices[:-1, 1]
path = Path(steps)
path = path.transformed(self.get_transform())
self._lineFunc(renderer, gc, path, IdentityTransform())
def _draw_steps_mid(self, renderer, gc, path, trans):
vertices = self._xy
steps = ma.zeros((2 * len(vertices), 2), np.float_)
steps[1:-1:2, 0] = 0.5 * (vertices[:-1, 0] + vertices[1:, 0])
steps[2::2, 0] = 0.5 * (vertices[:-1, 0] + vertices[1:, 0])
steps[0, 0] = vertices[0, 0]
steps[-1, 0] = vertices[-1, 0]
steps[0::2, 1], steps[1::2, 1] = vertices[:, 1], vertices[:, 1]
path = Path(steps)
path = path.transformed(self.get_transform())
self._lineFunc(renderer, gc, path, IdentityTransform())
def _draw_solid(self, renderer, gc, path, trans):
gc.set_linestyle('solid')
renderer.draw_path(gc, path, trans)
def _draw_dashed(self, renderer, gc, path, trans):
gc.set_linestyle('dashed')
if self._dashSeq is not None:
gc.set_dashes(0, self._dashSeq)
renderer.draw_path(gc, path, trans)
def _draw_dash_dot(self, renderer, gc, path, trans):
gc.set_linestyle('dashdot')
renderer.draw_path(gc, path, trans)
def _draw_dotted(self, renderer, gc, path, trans):
gc.set_linestyle('dotted')
renderer.draw_path(gc, path, trans)
def update_from(self, other):
Artist.update_from(self, other)
self._linestyle = other._linestyle
self._linewidth = other._linewidth
self._color = other._color
self._markersize = other._markersize
self._markerfacecolor = other._markerfacecolor
self._markerfacecoloralt = other._markerfacecoloralt
self._markeredgecolor = other._markeredgecolor
self._markeredgewidth = other._markeredgewidth
self._dashSeq = other._dashSeq
self._dashcapstyle = other._dashcapstyle
self._dashjoinstyle = other._dashjoinstyle
self._solidcapstyle = other._solidcapstyle
self._solidjoinstyle = other._solidjoinstyle
self._linestyle = other._linestyle
self._marker = MarkerStyle(other._marker.get_marker(),
other._marker.get_fillstyle())
self._drawstyle = other._drawstyle
def _get_rgb_face(self, alt=False):
facecolor = self._get_markerfacecolor(alt=alt)
if is_string_like(facecolor) and facecolor.lower() == 'none':
rgbFace = None
else:
rgbFace = colorConverter.to_rgb(facecolor)
return rgbFace
def _get_rgba_face(self, alt=False):
facecolor = self._get_markerfacecolor(alt=alt)
if is_string_like(facecolor) and facecolor.lower() == 'none':
rgbaFace = None
else:
rgbaFace = colorConverter.to_rgba(facecolor, self._alpha)
return rgbaFace
def _get_rgba_ln_color(self, alt=False):
return colorConverter.to_rgba(self._color, self._alpha)
# some aliases....
def set_aa(self, val):
self.set_antialiased(val)
def set_c(self, val):
self.set_color(val)
def set_ls(self, val):
self.set_linestyle(val)
def set_lw(self, val):
self.set_linewidth(val)
def set_mec(self, val):
self.set_markeredgecolor(val)
def set_mew(self, val):
self.set_markeredgewidth(val)
def set_mfc(self, val):
self.set_markerfacecolor(val)
def set_mfcalt(self, val):
self.set_markerfacecoloralt(val)
def set_ms(self, val):
self.set_markersize(val)
def get_aa(self):
return self.get_antialiased()
def get_c(self):
return self.get_color()
def get_ls(self):
return self.get_linestyle()
def get_lw(self):
return self.get_linewidth()
def get_mec(self):
return self.get_markeredgecolor()
def get_mew(self):
return self.get_markeredgewidth()
def get_mfc(self):
return self.get_markerfacecolor()
def get_mfcalt(self, alt=False):
return self.get_markerfacecoloralt()
def get_ms(self):
return self.get_markersize()
def set_dash_joinstyle(self, s):
s = s.lower()
if s not in self.validJoin:
raise ValueError('set_dash_joinstyle passed "%s";\n' % (s,)
+ 'valid joinstyles are %s' % (self.validJoin,))
self._dashjoinstyle = s
def set_solid_joinstyle(self, s):
s = s.lower()
if s not in self.validJoin:
raise ValueError('set_solid_joinstyle passed "%s";\n' % (s,)
+ 'valid joinstyles are %s' % (self.validJoin,))
self._solidjoinstyle = s
def get_dash_joinstyle(self):
return self._dashjoinstyle
def get_solid_joinstyle(self):
return self._solidjoinstyle
def set_dash_capstyle(self, s):
s = s.lower()
if s not in self.validCap:
raise ValueError('set_dash_capstyle passed "%s";\n' % (s,)
+ 'valid capstyles are %s' % (self.validCap,))
self._dashcapstyle = s
def set_solid_capstyle(self, s):
s = s.lower()
if s not in self.validCap:
raise ValueError('set_solid_capstyle passed "%s";\n' % (s,)
+ 'valid capstyles are %s' % (self.validCap,))
self._solidcapstyle = s
def get_dash_capstyle(self):
return self._dashcapstyle
def get_solid_capstyle(self):
return self._solidcapstyle
def is_dashed(self):
return self._linestyle in ('--', '-.', ':')
class VertexSelector:
def __init__(self, line):
if not hasattr(line, 'axes'):
raise RuntimeError('You must first add the line to the Axes')
if line.get_picker() is None:
raise RuntimeError('You must first set the picker property '
'of the line')
self.axes = line.axes
self.line = line
self.canvas = self.axes.figure.canvas
self.cid = self.canvas.mpl_connect('pick_event', self.onpick)
self.ind = set()
def process_selected(self, ind, xs, ys):
pass
def onpick(self, event):
if event.artist is not self.line:
return
for i in event.ind:
if i in self.ind:
self.ind.remove(i)
else:
self.ind.add(i)
ind = list(self.ind)
ind.sort()
xdata, ydata = self.line.get_data()
self.process_selected(ind, xdata[ind], ydata[ind])
lineStyles = Line2D._lineStyles
lineMarkers = MarkerStyle.markers
drawStyles = Line2D.drawStyles
fillStyles = MarkerStyle.fillstyles
docstring.interpd.update(Line2D=artist.kwdoc(Line2D))
# You can not set the docstring of an instancemethod,
# but you can on the underlying function. Go figure.
docstring.dedent_interpd(Line2D.__init__)
| true | true |
1c456142bbc95af7e87173cb0cb84afd5f28b013 | 929 | py | Python | interprete/src/models/gpt/example.py | serjtroshin/PLBART | 58e5de3041a2fc8b98e54648c6489fb3c23db9cb | [
"MIT"
] | null | null | null | interprete/src/models/gpt/example.py | serjtroshin/PLBART | 58e5de3041a2fc8b98e54648c6489fb3c23db9cb | [
"MIT"
] | null | null | null | interprete/src/models/gpt/example.py | serjtroshin/PLBART | 58e5de3041a2fc8b98e54648c6489fb3c23db9cb | [
"MIT"
] | null | null | null | # from transformers import pipeline
# generator = pipeline('text-generation', model='EleutherAI/gpt-neo-2.7B')
# generator("EleutherAI has", do_sample=True, min_length=50)
# [{'generated_text': 'EleutherAI has made a commitment to create new software packages for each of its major clients and has'}]
from transformers import GPT2Tokenizer, GPT2Model
model_name = "microsoft/CodeGPT-small-java-adaptedGPT2"
# model_name = "./CodeGPT-small-java-adaptedGPT2"
tokenizer = GPT2Tokenizer.from_pretrained(model_name) # CodeGPT-small-java-adaptedGPT2
model = GPT2Model.from_pretrained(model_name)
# tokenizer.save_pretrained(f"./{model_name}")
# model.save_pretrained(f"./{model_name}")
text = "Replace me by any text you'd like."
encoded_input = tokenizer(text, return_tensors='pt')
print(model)
output = model(**encoded_input, output_hidden_states=True)
print(len(output["hidden_states"]))
print(output["hidden_states"][0].shape) | 42.227273 | 128 | 0.779333 |
from transformers import GPT2Tokenizer, GPT2Model
model_name = "microsoft/CodeGPT-small-java-adaptedGPT2"
tokenizer = GPT2Tokenizer.from_pretrained(model_name)
model = GPT2Model.from_pretrained(model_name)
text = "Replace me by any text you'd like."
encoded_input = tokenizer(text, return_tensors='pt')
print(model)
output = model(**encoded_input, output_hidden_states=True)
print(len(output["hidden_states"]))
print(output["hidden_states"][0].shape) | true | true |
1c456237e48e7b21db5e6e1bb2ccae546249bad4 | 7,590 | py | Python | prov/constants.py | AndreiFrunze/wrangler | 076a07de00fc966dcf18ca6b6a6e804be5245ed9 | [
"Apache-2.0"
] | 2 | 2017-09-07T04:33:18.000Z | 2019-01-07T13:32:15.000Z | prov/constants.py | AndreiFrunze/wrangler | 076a07de00fc966dcf18ca6b6a6e804be5245ed9 | [
"Apache-2.0"
] | 2 | 2016-10-06T13:07:05.000Z | 2017-12-20T09:47:08.000Z | prov/constants.py | AndreiFrunze/wrangler | 076a07de00fc966dcf18ca6b6a6e804be5245ed9 | [
"Apache-2.0"
] | 5 | 2016-09-01T08:38:20.000Z | 2018-08-28T12:08:39.000Z | from __future__ import (absolute_import, division, print_function,
unicode_literals)
__author__ = 'Trung Dong Huynh'
__email__ = 'trungdong@donggiang.com'
import six
# # PROV record constants - PROV-DM
# Built-in namespaces
from prov.identifier import Namespace
XSD = Namespace('xsd', 'http://www.w3.org/2001/XMLSchema#')
PROV = Namespace('prov', 'http://www.w3.org/ns/prov#')
XSI = Namespace('xsi', 'http://www.w3.org/2001/XMLSchema-instance')
# C1. Entities/Activities
PROV_ENTITY = PROV['Entity']
PROV_ACTIVITY = PROV['Activity']
PROV_GENERATION = PROV['Generation']
PROV_USAGE = PROV['Usage']
PROV_COMMUNICATION = PROV['Communication']
PROV_START = PROV['Start']
PROV_END = PROV['End']
PROV_INVALIDATION = PROV['Invalidation']
# C2. Derivations
PROV_DERIVATION = PROV['Derivation']
# C3. Agents/Responsibility
PROV_AGENT = PROV['Agent']
PROV_ATTRIBUTION = PROV['Attribution']
PROV_ASSOCIATION = PROV['Association']
PROV_DELEGATION = PROV['Delegation']
PROV_INFLUENCE = PROV['Influence']
# C4. Bundles
PROV_BUNDLE = PROV['Bundle']
# C5. Alternate
PROV_ALTERNATE = PROV['Alternate']
PROV_SPECIALIZATION = PROV['Specialization']
PROV_MENTION = PROV['Mention']
# C6. Collections
PROV_MEMBERSHIP = PROV['Membership']
PROV_N_MAP = {
PROV_ENTITY: u'entity',
PROV_ACTIVITY: u'activity',
PROV_GENERATION: u'wasGeneratedBy',
PROV_USAGE: u'used',
PROV_COMMUNICATION: u'wasInformedBy',
PROV_START: u'wasStartedBy',
PROV_END: u'wasEndedBy',
PROV_INVALIDATION: u'wasInvalidatedBy',
PROV_DERIVATION: u'wasDerivedFrom',
PROV_AGENT: u'agent',
PROV_ATTRIBUTION: u'wasAttributedTo',
PROV_ASSOCIATION: u'wasAssociatedWith',
PROV_DELEGATION: u'actedOnBehalfOf',
PROV_INFLUENCE: u'wasInfluencedBy',
PROV_ALTERNATE: u'alternateOf',
PROV_SPECIALIZATION: u'specializationOf',
PROV_MENTION: u'mentionOf',
PROV_MEMBERSHIP: u'hadMember',
PROV_BUNDLE: u'bundle',
}
# Records defined as subtypes in PROV-N but top level types in for example
# PROV XML also need a mapping.
ADDITIONAL_N_MAP = {
PROV['Revision']: u'wasRevisionOf',
PROV['Quotation']: u'wasQuotedFrom',
PROV['PrimarySource']: u'hadPrimarySource',
PROV['SoftwareAgent']: u'softwareAgent',
PROV['Person']: u'person',
PROV['Organization']: u'organization',
PROV['Plan']: u'plan',
PROV['Collection']: u'collection',
PROV['EmptyCollection']: u'emptyCollection',
}
# Maps qualified names from the PROV namespace to their base class. If it
# has no baseclass it maps to itsself. This is needed for example for PROV
# XML (de)serializer where extended types are used a lot.
PROV_BASE_CLS = {
PROV_ENTITY: PROV_ENTITY,
PROV_ACTIVITY: PROV_ACTIVITY,
PROV_GENERATION: PROV_GENERATION,
PROV_USAGE: PROV_USAGE,
PROV_COMMUNICATION: PROV_COMMUNICATION,
PROV_START: PROV_START,
PROV_END: PROV_END,
PROV_INVALIDATION: PROV_INVALIDATION,
PROV_DERIVATION: PROV_DERIVATION,
PROV['Revision']: PROV_DERIVATION,
PROV['Quotation']: PROV_DERIVATION,
PROV['PrimarySource']: PROV_DERIVATION,
PROV_AGENT: PROV_AGENT,
PROV['SoftwareAgent']: PROV_AGENT,
PROV['Person']: PROV_AGENT,
PROV['Organization']: PROV_AGENT,
PROV_ATTRIBUTION: PROV_ATTRIBUTION,
PROV_ASSOCIATION: PROV_ASSOCIATION,
PROV['Plan']: PROV_ENTITY,
PROV_DELEGATION: PROV_DELEGATION,
PROV_INFLUENCE: PROV_INFLUENCE,
PROV_ALTERNATE: PROV_ALTERNATE,
PROV_SPECIALIZATION: PROV_SPECIALIZATION,
PROV_MENTION: PROV_MENTION,
PROV['Collection']: PROV_ENTITY,
PROV['EmptyCollection']: PROV_ENTITY,
PROV_MEMBERSHIP: PROV_MEMBERSHIP,
PROV_BUNDLE: PROV_ENTITY
}
# Identifiers for PROV's attributes
PROV_ATTR_ENTITY = PROV['entity']
PROV_ATTR_ACTIVITY = PROV['activity']
PROV_ATTR_TRIGGER = PROV['trigger']
PROV_ATTR_INFORMED = PROV['informed']
PROV_ATTR_INFORMANT = PROV['informant']
PROV_ATTR_STARTER = PROV['starter']
PROV_ATTR_ENDER = PROV['ender']
PROV_ATTR_AGENT = PROV['agent']
PROV_ATTR_PLAN = PROV['plan']
PROV_ATTR_DELEGATE = PROV['delegate']
PROV_ATTR_RESPONSIBLE = PROV['responsible']
PROV_ATTR_GENERATED_ENTITY = PROV['generatedEntity']
PROV_ATTR_USED_ENTITY = PROV['usedEntity']
PROV_ATTR_GENERATION = PROV['generation']
PROV_ATTR_USAGE = PROV['usage']
PROV_ATTR_SPECIFIC_ENTITY = PROV['specificEntity']
PROV_ATTR_GENERAL_ENTITY = PROV['generalEntity']
PROV_ATTR_ALTERNATE1 = PROV['alternate1']
PROV_ATTR_ALTERNATE2 = PROV['alternate2']
PROV_ATTR_BUNDLE = PROV['bundle']
PROV_ATTR_INFLUENCEE = PROV['influencee']
PROV_ATTR_INFLUENCER = PROV['influencer']
PROV_ATTR_COLLECTION = PROV['collection']
# Literal properties
PROV_ATTR_TIME = PROV['time']
PROV_ATTR_STARTTIME = PROV['startTime']
PROV_ATTR_ENDTIME = PROV['endTime']
PROV_ATTRIBUTE_QNAMES = {
PROV_ATTR_ENTITY,
PROV_ATTR_ACTIVITY,
PROV_ATTR_TRIGGER,
PROV_ATTR_INFORMED,
PROV_ATTR_INFORMANT,
PROV_ATTR_STARTER,
PROV_ATTR_ENDER,
PROV_ATTR_AGENT,
PROV_ATTR_PLAN,
PROV_ATTR_DELEGATE,
PROV_ATTR_RESPONSIBLE,
PROV_ATTR_GENERATED_ENTITY,
PROV_ATTR_USED_ENTITY,
PROV_ATTR_GENERATION,
PROV_ATTR_USAGE,
PROV_ATTR_SPECIFIC_ENTITY,
PROV_ATTR_GENERAL_ENTITY,
PROV_ATTR_ALTERNATE1,
PROV_ATTR_ALTERNATE2,
PROV_ATTR_BUNDLE,
PROV_ATTR_INFLUENCEE,
PROV_ATTR_INFLUENCER,
PROV_ATTR_COLLECTION
}
PROV_ATTRIBUTE_LITERALS = {
PROV_ATTR_TIME, PROV_ATTR_STARTTIME, PROV_ATTR_ENDTIME
}
# Set of formal attributes of PROV records
PROV_ATTRIBUTES = PROV_ATTRIBUTE_QNAMES | PROV_ATTRIBUTE_LITERALS
PROV_RECORD_ATTRIBUTES = list((attr, six.text_type(attr)) for attr in
PROV_ATTRIBUTES)
PROV_RECORD_IDS_MAP = dict(
(PROV_N_MAP[rec_type_id], rec_type_id) for rec_type_id in PROV_N_MAP
)
PROV_ID_ATTRIBUTES_MAP = dict(
(prov_id, attribute) for (prov_id, attribute) in PROV_RECORD_ATTRIBUTES
)
PROV_ATTRIBUTES_ID_MAP = dict(
(attribute, prov_id) for (prov_id, attribute) in PROV_RECORD_ATTRIBUTES
)
# Extra definition for convenience
PROV_TYPE = PROV['type']
PROV_LABEL = PROV['label']
PROV_VALUE = PROV['value']
PROV_LOCATION = PROV['location']
PROV_ROLE = PROV['role']
PROV_QUALIFIEDNAME = PROV['QUALIFIED_NAME']
# XSD DATA TYPES
XSD_ANYURI = XSD['anyURI']
XSD_QNAME = XSD['QName']
XSD_DATETIME = XSD['dateTime']
XSD_TIME = XSD['time']
XSD_DATE = XSD['date']
XSD_STRING = XSD['string']
XSD_BOOLEAN = XSD['boolean']
# All XSD Integer types
XSD_INTEGER = XSD['integer']
XSD_LONG = XSD['long']
XSD_INT = XSD['int']
XSD_SHORT = XSD['short']
XSD_BYTE = XSD['byte']
XSD_NONNEGATIVEINTEGER = XSD['nonNegativeInteger']
XSD_UNSIGNEDLONG = XSD['unsignedLong']
XSD_UNSIGNEDINT = XSD['unsignedInt']
XSD_UNSIGNEDSHORT = XSD['unsignedShort']
XSD_UNSIGNEDBYTE = XSD['unsignedByte']
XSD_POSITIVEINTEGER = XSD['positiveInteger']
XSD_NONPOSITIVEINTEGER = XSD['nonPositiveInteger']
XSD_NEGATIVEINTEGER = XSD['negativeInteger']
# All XSD real number types
XSD_FLOAT = XSD['float']
XSD_DOUBLE = XSD['double']
XSD_DECIMAL = XSD['decimal']
| 33.584071 | 75 | 0.691304 | from __future__ import (absolute_import, division, print_function,
unicode_literals)
__author__ = 'Trung Dong Huynh'
__email__ = 'trungdong@donggiang.com'
import six
mespace
XSD = Namespace('xsd', 'http://www.w3.org/2001/XMLSchema#')
PROV = Namespace('prov', 'http://www.w3.org/ns/prov#')
XSI = Namespace('xsi', 'http://www.w3.org/2001/XMLSchema-instance')
PROV_ENTITY = PROV['Entity']
PROV_ACTIVITY = PROV['Activity']
PROV_GENERATION = PROV['Generation']
PROV_USAGE = PROV['Usage']
PROV_COMMUNICATION = PROV['Communication']
PROV_START = PROV['Start']
PROV_END = PROV['End']
PROV_INVALIDATION = PROV['Invalidation']
PROV_DERIVATION = PROV['Derivation']
PROV_AGENT = PROV['Agent']
PROV_ATTRIBUTION = PROV['Attribution']
PROV_ASSOCIATION = PROV['Association']
PROV_DELEGATION = PROV['Delegation']
PROV_INFLUENCE = PROV['Influence']
PROV_BUNDLE = PROV['Bundle']
PROV_ALTERNATE = PROV['Alternate']
PROV_SPECIALIZATION = PROV['Specialization']
PROV_MENTION = PROV['Mention']
PROV_MEMBERSHIP = PROV['Membership']
PROV_N_MAP = {
PROV_ENTITY: u'entity',
PROV_ACTIVITY: u'activity',
PROV_GENERATION: u'wasGeneratedBy',
PROV_USAGE: u'used',
PROV_COMMUNICATION: u'wasInformedBy',
PROV_START: u'wasStartedBy',
PROV_END: u'wasEndedBy',
PROV_INVALIDATION: u'wasInvalidatedBy',
PROV_DERIVATION: u'wasDerivedFrom',
PROV_AGENT: u'agent',
PROV_ATTRIBUTION: u'wasAttributedTo',
PROV_ASSOCIATION: u'wasAssociatedWith',
PROV_DELEGATION: u'actedOnBehalfOf',
PROV_INFLUENCE: u'wasInfluencedBy',
PROV_ALTERNATE: u'alternateOf',
PROV_SPECIALIZATION: u'specializationOf',
PROV_MENTION: u'mentionOf',
PROV_MEMBERSHIP: u'hadMember',
PROV_BUNDLE: u'bundle',
}
ADDITIONAL_N_MAP = {
PROV['Revision']: u'wasRevisionOf',
PROV['Quotation']: u'wasQuotedFrom',
PROV['PrimarySource']: u'hadPrimarySource',
PROV['SoftwareAgent']: u'softwareAgent',
PROV['Person']: u'person',
PROV['Organization']: u'organization',
PROV['Plan']: u'plan',
PROV['Collection']: u'collection',
PROV['EmptyCollection']: u'emptyCollection',
}
PROV_BASE_CLS = {
PROV_ENTITY: PROV_ENTITY,
PROV_ACTIVITY: PROV_ACTIVITY,
PROV_GENERATION: PROV_GENERATION,
PROV_USAGE: PROV_USAGE,
PROV_COMMUNICATION: PROV_COMMUNICATION,
PROV_START: PROV_START,
PROV_END: PROV_END,
PROV_INVALIDATION: PROV_INVALIDATION,
PROV_DERIVATION: PROV_DERIVATION,
PROV['Revision']: PROV_DERIVATION,
PROV['Quotation']: PROV_DERIVATION,
PROV['PrimarySource']: PROV_DERIVATION,
PROV_AGENT: PROV_AGENT,
PROV['SoftwareAgent']: PROV_AGENT,
PROV['Person']: PROV_AGENT,
PROV['Organization']: PROV_AGENT,
PROV_ATTRIBUTION: PROV_ATTRIBUTION,
PROV_ASSOCIATION: PROV_ASSOCIATION,
PROV['Plan']: PROV_ENTITY,
PROV_DELEGATION: PROV_DELEGATION,
PROV_INFLUENCE: PROV_INFLUENCE,
PROV_ALTERNATE: PROV_ALTERNATE,
PROV_SPECIALIZATION: PROV_SPECIALIZATION,
PROV_MENTION: PROV_MENTION,
PROV['Collection']: PROV_ENTITY,
PROV['EmptyCollection']: PROV_ENTITY,
PROV_MEMBERSHIP: PROV_MEMBERSHIP,
PROV_BUNDLE: PROV_ENTITY
}
PROV_ATTR_ENTITY = PROV['entity']
PROV_ATTR_ACTIVITY = PROV['activity']
PROV_ATTR_TRIGGER = PROV['trigger']
PROV_ATTR_INFORMED = PROV['informed']
PROV_ATTR_INFORMANT = PROV['informant']
PROV_ATTR_STARTER = PROV['starter']
PROV_ATTR_ENDER = PROV['ender']
PROV_ATTR_AGENT = PROV['agent']
PROV_ATTR_PLAN = PROV['plan']
PROV_ATTR_DELEGATE = PROV['delegate']
PROV_ATTR_RESPONSIBLE = PROV['responsible']
PROV_ATTR_GENERATED_ENTITY = PROV['generatedEntity']
PROV_ATTR_USED_ENTITY = PROV['usedEntity']
PROV_ATTR_GENERATION = PROV['generation']
PROV_ATTR_USAGE = PROV['usage']
PROV_ATTR_SPECIFIC_ENTITY = PROV['specificEntity']
PROV_ATTR_GENERAL_ENTITY = PROV['generalEntity']
PROV_ATTR_ALTERNATE1 = PROV['alternate1']
PROV_ATTR_ALTERNATE2 = PROV['alternate2']
PROV_ATTR_BUNDLE = PROV['bundle']
PROV_ATTR_INFLUENCEE = PROV['influencee']
PROV_ATTR_INFLUENCER = PROV['influencer']
PROV_ATTR_COLLECTION = PROV['collection']
# Literal properties
PROV_ATTR_TIME = PROV['time']
PROV_ATTR_STARTTIME = PROV['startTime']
PROV_ATTR_ENDTIME = PROV['endTime']
PROV_ATTRIBUTE_QNAMES = {
PROV_ATTR_ENTITY,
PROV_ATTR_ACTIVITY,
PROV_ATTR_TRIGGER,
PROV_ATTR_INFORMED,
PROV_ATTR_INFORMANT,
PROV_ATTR_STARTER,
PROV_ATTR_ENDER,
PROV_ATTR_AGENT,
PROV_ATTR_PLAN,
PROV_ATTR_DELEGATE,
PROV_ATTR_RESPONSIBLE,
PROV_ATTR_GENERATED_ENTITY,
PROV_ATTR_USED_ENTITY,
PROV_ATTR_GENERATION,
PROV_ATTR_USAGE,
PROV_ATTR_SPECIFIC_ENTITY,
PROV_ATTR_GENERAL_ENTITY,
PROV_ATTR_ALTERNATE1,
PROV_ATTR_ALTERNATE2,
PROV_ATTR_BUNDLE,
PROV_ATTR_INFLUENCEE,
PROV_ATTR_INFLUENCER,
PROV_ATTR_COLLECTION
}
PROV_ATTRIBUTE_LITERALS = {
PROV_ATTR_TIME, PROV_ATTR_STARTTIME, PROV_ATTR_ENDTIME
}
# Set of formal attributes of PROV records
PROV_ATTRIBUTES = PROV_ATTRIBUTE_QNAMES | PROV_ATTRIBUTE_LITERALS
PROV_RECORD_ATTRIBUTES = list((attr, six.text_type(attr)) for attr in
PROV_ATTRIBUTES)
PROV_RECORD_IDS_MAP = dict(
(PROV_N_MAP[rec_type_id], rec_type_id) for rec_type_id in PROV_N_MAP
)
PROV_ID_ATTRIBUTES_MAP = dict(
(prov_id, attribute) for (prov_id, attribute) in PROV_RECORD_ATTRIBUTES
)
PROV_ATTRIBUTES_ID_MAP = dict(
(attribute, prov_id) for (prov_id, attribute) in PROV_RECORD_ATTRIBUTES
)
# Extra definition for convenience
PROV_TYPE = PROV['type']
PROV_LABEL = PROV['label']
PROV_VALUE = PROV['value']
PROV_LOCATION = PROV['location']
PROV_ROLE = PROV['role']
PROV_QUALIFIEDNAME = PROV['QUALIFIED_NAME']
# XSD DATA TYPES
XSD_ANYURI = XSD['anyURI']
XSD_QNAME = XSD['QName']
XSD_DATETIME = XSD['dateTime']
XSD_TIME = XSD['time']
XSD_DATE = XSD['date']
XSD_STRING = XSD['string']
XSD_BOOLEAN = XSD['boolean']
# All XSD Integer types
XSD_INTEGER = XSD['integer']
XSD_LONG = XSD['long']
XSD_INT = XSD['int']
XSD_SHORT = XSD['short']
XSD_BYTE = XSD['byte']
XSD_NONNEGATIVEINTEGER = XSD['nonNegativeInteger']
XSD_UNSIGNEDLONG = XSD['unsignedLong']
XSD_UNSIGNEDINT = XSD['unsignedInt']
XSD_UNSIGNEDSHORT = XSD['unsignedShort']
XSD_UNSIGNEDBYTE = XSD['unsignedByte']
XSD_POSITIVEINTEGER = XSD['positiveInteger']
XSD_NONPOSITIVEINTEGER = XSD['nonPositiveInteger']
XSD_NEGATIVEINTEGER = XSD['negativeInteger']
# All XSD real number types
XSD_FLOAT = XSD['float']
XSD_DOUBLE = XSD['double']
XSD_DECIMAL = XSD['decimal']
| true | true |
1c45639a0e721d5529cabef77a05e804e7bcec90 | 3,371 | py | Python | application/app.py | StephenSpicer/airbnb_app | ea9b92dbd35d508d2f819255d026b299d63285a8 | [
"MIT"
] | null | null | null | application/app.py | StephenSpicer/airbnb_app | ea9b92dbd35d508d2f819255d026b299d63285a8 | [
"MIT"
] | null | null | null | application/app.py | StephenSpicer/airbnb_app | ea9b92dbd35d508d2f819255d026b299d63285a8 | [
"MIT"
] | null | null | null | # Import Statements
import pandas as pd
from flask import Flask, render_template, request
from joblib import load
from .predict import get_prediction
# Instantiate Application
# def create_app():
# app = Flask(__name__)
# @app.route('/')
# def hello_heroku():
# return "hello heroku"
# return app
def create_app():
"""
Function to deploy heroku application.
Contains assorment of functions which control the inputs and outputs
of interractive web application.
"""
app = Flask(__name__)
load_model = load('application/finalized_model.sav')
# as easy as changing path to /form and make a link to it in main page
@app.route('/')
def form():
return render_template('form.html')
@app.route('/run_model', methods=['POST', 'GET'])
def data():
# if user types in /run_model they get this error message
if request.method == 'GET':
message = f"ERROR: The URL /run_model is accessed directly. Try going to home page '/' to submit form"
# if user goes to /form and hits submit, they go to this page!
# in here user input gets stored into to_predict, and then to_predict gets run in model
if request.method == 'POST':
property_type = str(request.values["prop"])
room_type = str(request.values["room_type"])
bathrooms = float(request.values["bathrooms"])
cancellation_policy = str(request.values["cancellation"])
city = str(request.values["city"])
host_since = str(request.values["host_since"])
review_scores_rating = int(request.values["review_rating"])
bedrooms = int(request.values["bedrooms"])
beds = int(request.values["beds"])
# We will be adding a few more dropdowns above
# need to get this to print out T/F list
amenities = request.form.getlist('feature_checkbox')
# basics =
to_predict = [property_type, room_type, bathrooms,
cancellation_policy, city, host_since,
review_scores_rating, bedrooms, beds,
amenities]
message = model_output(to_predict)
return message
def model_output(user_input):
mod_input = []
all_amenities = [
"instant_bookable",
"host_has_profile_pic",
"host_identity_verified",
"cleaning_fee",
"Wireless Internet",
"Air conditioning",
"Kitchen",
"Heating",
"Family/kid friendly",
"Hair dryer",
"Iron",
"Shampoo",
"Fire extinguisher",
"Laptop friendly workspace",
"Indoor fireplace",
"TV",
"Cable TV"]
# Append unchanging variables to list first : check indexing there?
mod_input.extend(user_input[:9])
input = user_input[9]
# For loop through conditional varibles
for option in all_amenities:
if any(option in s for s in input):
mod_input.append(1)
else:
mod_input.append(0)
price='${}'.format(get_prediction(mod_input, load_model))
return render_template('results.html', prediction=price)
return app
| 34.752577 | 114 | 0.589143 |
import pandas as pd
from flask import Flask, render_template, request
from joblib import load
from .predict import get_prediction
def create_app():
app = Flask(__name__)
load_model = load('application/finalized_model.sav')
@app.route('/')
def form():
return render_template('form.html')
@app.route('/run_model', methods=['POST', 'GET'])
def data():
if request.method == 'GET':
message = f"ERROR: The URL /run_model is accessed directly. Try going to home page '/' to submit form"
if request.method == 'POST':
property_type = str(request.values["prop"])
room_type = str(request.values["room_type"])
bathrooms = float(request.values["bathrooms"])
cancellation_policy = str(request.values["cancellation"])
city = str(request.values["city"])
host_since = str(request.values["host_since"])
review_scores_rating = int(request.values["review_rating"])
bedrooms = int(request.values["bedrooms"])
beds = int(request.values["beds"])
amenities = request.form.getlist('feature_checkbox')
to_predict = [property_type, room_type, bathrooms,
cancellation_policy, city, host_since,
review_scores_rating, bedrooms, beds,
amenities]
message = model_output(to_predict)
return message
def model_output(user_input):
mod_input = []
all_amenities = [
"instant_bookable",
"host_has_profile_pic",
"host_identity_verified",
"cleaning_fee",
"Wireless Internet",
"Air conditioning",
"Kitchen",
"Heating",
"Family/kid friendly",
"Hair dryer",
"Iron",
"Shampoo",
"Fire extinguisher",
"Laptop friendly workspace",
"Indoor fireplace",
"TV",
"Cable TV"]
mod_input.extend(user_input[:9])
input = user_input[9]
for option in all_amenities:
if any(option in s for s in input):
mod_input.append(1)
else:
mod_input.append(0)
price='${}'.format(get_prediction(mod_input, load_model))
return render_template('results.html', prediction=price)
return app
| true | true |
1c456470d18841deab9b9d0f37410c49dd5f194b | 8,894 | py | Python | dragonfly/opt/multiobjective_optimiser.py | hase1128/dragonfly | 4be7e4c539d3edccc4d243ab9f972b1ffb0d9a5c | [
"MIT"
] | null | null | null | dragonfly/opt/multiobjective_optimiser.py | hase1128/dragonfly | 4be7e4c539d3edccc4d243ab9f972b1ffb0d9a5c | [
"MIT"
] | null | null | null | dragonfly/opt/multiobjective_optimiser.py | hase1128/dragonfly | 4be7e4c539d3edccc4d243ab9f972b1ffb0d9a5c | [
"MIT"
] | null | null | null | """
Defines a class for Multi-objective Blackbox Optimisation.
-- bparia@cs.cmu.edu
-- kandasamy@cs.cmu.edu
"""
# NB: In this file, the acronym MOO/moo refers to multi-objective optimisation. --KK
# pylint: disable=abstract-class-little-used
# pylint: disable=invalid-name
from __future__ import division
from argparse import Namespace
import numpy as np
# Local imports
from ..exd.exd_core import ExperimentDesigner, exd_core_args
from ..exd.experiment_caller import MultiFunctionCaller, FunctionCaller
from ..utils.general_utils import update_pareto_set
multiobjective_opt_args = exd_core_args
_NO_MF_FOR_MOO_ERR_MSG = 'Multi-fidelity support has not been implemented yet' + \
' for multi-objective optimisation.'
class MultiObjectiveOptimiser(ExperimentDesigner):
""" Blackbox Optimiser Class. """
# pylint: disable=attribute-defined-outside-init
def __init__(self, multi_func_caller, worker_manager, model=None, options=None,
reporter=None):
""" Constructor. """
assert isinstance(multi_func_caller, MultiFunctionCaller) and not \
isinstance(multi_func_caller, FunctionCaller)
self.multi_func_caller = multi_func_caller
# If it is not a list, computing the non-dominated set is equivalent
# to computing the maximum.
self.domain = self.multi_func_caller.domain
super(MultiObjectiveOptimiser, self).__init__(multi_func_caller, worker_manager,
model, options, reporter)
def _exd_child_set_up(self):
""" Set up for the optimisation. """
if self.multi_func_caller.is_mf():
# self.num_fidel_to_opt_calls = 0
raise NotImplementedError(_NO_MF_FOR_MOO_ERR_MSG)
self._moo_set_up()
self._multi_opt_method_set_up()
self.prev_eval_vals = [] # for optimiser, prev_eval_vals
def _moo_set_up(self):
""" Set up for black-box optimisation. """
# Initialise optimal values and points
# (Optimal point for MF problems is not defined)
# (Instead a set of pareto optimal points will be maintained)
self.curr_pareto_vals = []
self.curr_pareto_points = []
self.curr_true_pareto_vals = []
self.curr_true_pareto_points = []
# Set up history
self.history.query_vals = []
self.history.query_true_vals = []
self.history.curr_pareto_vals = []
self.history.curr_pareto_points = []
self.history.curr_true_pareto_vals = []
self.history.curr_true_pareto_points = []
if self.multi_func_caller.is_mf():
# self.history.query_at_fidel_to_opts = []
raise NotImplementedError(_NO_MF_FOR_MOO_ERR_MSG)
# Set up attributes to be copied from history
self.to_copy_from_qinfo_to_history['val'] = 'query_vals'
self.to_copy_from_qinfo_to_history['true_val'] = 'query_true_vals'
# Set up previous evaluations
self.prev_eval_vals = []
self.prev_eval_true_vals = []
self.history.prev_eval_vals = self.prev_eval_vals
self.history.prev_eval_true_vals = self.prev_eval_true_vals
def _multi_opt_method_set_up(self):
""" Any set up for the specific optimisation method. """
raise NotImplementedError('Implement in Optimisation Method class.')
def _get_problem_str(self):
""" Description of the problem. """
return 'Multi-objective Optimisation'
# Book-keeping ----------------------------------------------------------------
def _exd_child_update_history(self, qinfo):
""" Updates to the history specific to optimisation. """
# Update the best point/val
# check fidelity
if self.multi_func_caller.is_mf():
raise NotImplementedError(_NO_MF_FOR_MOO_ERR_MSG)
else:
self._update_opt_point_and_val(qinfo)
# Now add to history
self.history.curr_pareto_vals.append(self.curr_pareto_vals)
self.history.curr_pareto_points.append(self.curr_pareto_points)
self.history.curr_true_pareto_vals.append(self.curr_true_pareto_vals)
self.history.curr_true_pareto_points.append(self.curr_true_pareto_points)
# Any method specific updating
self._multi_opt_method_update_history(qinfo)
def _update_opt_point_and_val(self, qinfo, query_is_at_fidel_to_opt=None):
""" Updates the optimum point and value according the data in qinfo.
Can be overridden by a child class if you want to do anything differently.
"""
if query_is_at_fidel_to_opt is not None:
if not query_is_at_fidel_to_opt:
# if the fidelity queried at is not fidel_to_opt, then return
return
# Optimise curr_opt_val and curr_true_opt_val
self.curr_pareto_vals, self.curr_pareto_points = update_pareto_set(
self.curr_pareto_vals, self.curr_pareto_points, qinfo.val, qinfo.point)
self.curr_true_pareto_vals, self.curr_true_pareto_points = update_pareto_set(
self.curr_true_pareto_vals, self.curr_true_pareto_points, qinfo.true_val,
qinfo.point)
def _multi_opt_method_update_history(self, qinfo):
""" Any updates to the history specific to the method. """
pass # Pass by default. Not necessary to override.
def _get_exd_child_header_str(self):
""" Header for black box optimisation. """
ret = '#Pareto=<num_pareto_optimal_points_found>'
ret += self._get_opt_method_header_str()
return ret
@classmethod
def _get_opt_method_header_str(cls):
""" Header for optimisation method. """
return ''
def _get_exd_child_report_results_str(self):
""" Returns a string describing the progress in optimisation. """
best_val_str = '#Pareto: %d'%(len(self.curr_pareto_vals))
opt_method_str = self._get_opt_method_report_results_str()
return best_val_str + opt_method_str + ', '
def _get_opt_method_report_results_str(self):
""" Any details to include in a child method when reporting results.
Can be overridden by a child class.
"""
#pylint: disable=no-self-use
return ''
def _exd_child_handle_prev_evals_in_options(self):
""" Handles pre-evaluations. """
ret = 0
for qinfo in self.options.prev_evaluations.qinfos:
if not hasattr(qinfo, 'true_val'):
qinfo.true_val = [-np.inf] * len(qinfo.val)
if self.multi_func_caller.is_mf():
raise NotImplementedError(_NO_MF_FOR_MOO_ERR_MSG)
else:
self._update_opt_point_and_val(qinfo)
self.prev_eval_points.append(qinfo.point)
self.prev_eval_vals.append(qinfo.val)
self.prev_eval_true_vals.append(qinfo.true_val)
ret += 1
return ret
def _child_handle_data_loaded_from_file(self, loaded_data_from_file):
""" Handles evaluations from file. """
query_points = loaded_data_from_file['points']
num_pts_in_file = len(query_points)
query_vals = loaded_data_from_file['vals']
assert num_pts_in_file == len(query_vals)
if 'true_vals' in loaded_data_from_file:
query_true_vals = loaded_data_from_file['true_vals']
assert num_pts_in_file == len(query_vals)
else:
query_true_vals = [[-np.inf] * self.multi_func_caller.num_funcs] * len(query_vals)
# Multi-fidelity
if self.multi_func_caller.is_mf():
raise NotImplementedError('Not implemented multi-fidelity MOO yet.')
# Now Iterate through each point
for pt, val, true_val in zip(query_points, query_vals, query_true_vals):
qinfo = Namespace(point=pt, val=val, true_val=true_val)
if self.multi_func_caller.is_mf():
raise NotImplementedError('Not implemented multi-fidelity MOO yet.')
else:
self._update_opt_point_and_val(qinfo)
self.prev_eval_points.append(qinfo.point)
self.prev_eval_vals.append(qinfo.val)
self.prev_eval_true_vals.append(qinfo.true_val)
return num_pts_in_file
def _exd_child_get_data_to_save(self):
""" Return data to save. """
ret = {'points': self.prev_eval_points + self.history.query_points,
'vals': self.prev_eval_vals + self.history.query_vals,
'true_vals': self.prev_eval_true_vals + self.history.query_true_vals}
if self.multi_func_caller.is_mf():
raise NotImplementedError('Not implemented multi-fidelity MOO yet.')
num_data_saved = len(ret['points'])
return ret, num_data_saved
def _child_run_experiments_initialise(self):
""" Handles any initialisation before running experiments. """
self._opt_method_optimise_initalise()
def _opt_method_optimise_initalise(self):
""" Any routine to run for a method just before optimisation routine. """
pass # Pass by default. Not necessary to override.
def optimise(self, max_capital):
""" Calling optimise with optimise the function. A wrapper for run_experiments from
BlackboxExperimenter. """
ret = self.run_experiments(max_capital)
return ret
def _get_final_return_quantities(self):
""" Return the curr_opt_val, curr_opt_point and history. """
return self.curr_pareto_vals, self.curr_pareto_points, self.history
| 40.798165 | 88 | 0.723746 |
from __future__ import division
from argparse import Namespace
import numpy as np
from ..exd.exd_core import ExperimentDesigner, exd_core_args
from ..exd.experiment_caller import MultiFunctionCaller, FunctionCaller
from ..utils.general_utils import update_pareto_set
multiobjective_opt_args = exd_core_args
_NO_MF_FOR_MOO_ERR_MSG = 'Multi-fidelity support has not been implemented yet' + \
' for multi-objective optimisation.'
class MultiObjectiveOptimiser(ExperimentDesigner):
def __init__(self, multi_func_caller, worker_manager, model=None, options=None,
reporter=None):
assert isinstance(multi_func_caller, MultiFunctionCaller) and not \
isinstance(multi_func_caller, FunctionCaller)
self.multi_func_caller = multi_func_caller
self.domain = self.multi_func_caller.domain
super(MultiObjectiveOptimiser, self).__init__(multi_func_caller, worker_manager,
model, options, reporter)
def _exd_child_set_up(self):
if self.multi_func_caller.is_mf():
raise NotImplementedError(_NO_MF_FOR_MOO_ERR_MSG)
self._moo_set_up()
self._multi_opt_method_set_up()
self.prev_eval_vals = []
def _moo_set_up(self):
self.curr_pareto_vals = []
self.curr_pareto_points = []
self.curr_true_pareto_vals = []
self.curr_true_pareto_points = []
self.history.query_vals = []
self.history.query_true_vals = []
self.history.curr_pareto_vals = []
self.history.curr_pareto_points = []
self.history.curr_true_pareto_vals = []
self.history.curr_true_pareto_points = []
if self.multi_func_caller.is_mf():
raise NotImplementedError(_NO_MF_FOR_MOO_ERR_MSG)
self.to_copy_from_qinfo_to_history['val'] = 'query_vals'
self.to_copy_from_qinfo_to_history['true_val'] = 'query_true_vals'
self.prev_eval_vals = []
self.prev_eval_true_vals = []
self.history.prev_eval_vals = self.prev_eval_vals
self.history.prev_eval_true_vals = self.prev_eval_true_vals
def _multi_opt_method_set_up(self):
raise NotImplementedError('Implement in Optimisation Method class.')
def _get_problem_str(self):
return 'Multi-objective Optimisation'
def _exd_child_update_history(self, qinfo):
if self.multi_func_caller.is_mf():
raise NotImplementedError(_NO_MF_FOR_MOO_ERR_MSG)
else:
self._update_opt_point_and_val(qinfo)
self.history.curr_pareto_vals.append(self.curr_pareto_vals)
self.history.curr_pareto_points.append(self.curr_pareto_points)
self.history.curr_true_pareto_vals.append(self.curr_true_pareto_vals)
self.history.curr_true_pareto_points.append(self.curr_true_pareto_points)
self._multi_opt_method_update_history(qinfo)
def _update_opt_point_and_val(self, qinfo, query_is_at_fidel_to_opt=None):
if query_is_at_fidel_to_opt is not None:
if not query_is_at_fidel_to_opt:
return
self.curr_pareto_vals, self.curr_pareto_points = update_pareto_set(
self.curr_pareto_vals, self.curr_pareto_points, qinfo.val, qinfo.point)
self.curr_true_pareto_vals, self.curr_true_pareto_points = update_pareto_set(
self.curr_true_pareto_vals, self.curr_true_pareto_points, qinfo.true_val,
qinfo.point)
def _multi_opt_method_update_history(self, qinfo):
pass
def _get_exd_child_header_str(self):
ret = '#Pareto=<num_pareto_optimal_points_found>'
ret += self._get_opt_method_header_str()
return ret
@classmethod
def _get_opt_method_header_str(cls):
return ''
def _get_exd_child_report_results_str(self):
best_val_str = '#Pareto: %d'%(len(self.curr_pareto_vals))
opt_method_str = self._get_opt_method_report_results_str()
return best_val_str + opt_method_str + ', '
def _get_opt_method_report_results_str(self):
return ''
def _exd_child_handle_prev_evals_in_options(self):
ret = 0
for qinfo in self.options.prev_evaluations.qinfos:
if not hasattr(qinfo, 'true_val'):
qinfo.true_val = [-np.inf] * len(qinfo.val)
if self.multi_func_caller.is_mf():
raise NotImplementedError(_NO_MF_FOR_MOO_ERR_MSG)
else:
self._update_opt_point_and_val(qinfo)
self.prev_eval_points.append(qinfo.point)
self.prev_eval_vals.append(qinfo.val)
self.prev_eval_true_vals.append(qinfo.true_val)
ret += 1
return ret
def _child_handle_data_loaded_from_file(self, loaded_data_from_file):
query_points = loaded_data_from_file['points']
num_pts_in_file = len(query_points)
query_vals = loaded_data_from_file['vals']
assert num_pts_in_file == len(query_vals)
if 'true_vals' in loaded_data_from_file:
query_true_vals = loaded_data_from_file['true_vals']
assert num_pts_in_file == len(query_vals)
else:
query_true_vals = [[-np.inf] * self.multi_func_caller.num_funcs] * len(query_vals)
if self.multi_func_caller.is_mf():
raise NotImplementedError('Not implemented multi-fidelity MOO yet.')
for pt, val, true_val in zip(query_points, query_vals, query_true_vals):
qinfo = Namespace(point=pt, val=val, true_val=true_val)
if self.multi_func_caller.is_mf():
raise NotImplementedError('Not implemented multi-fidelity MOO yet.')
else:
self._update_opt_point_and_val(qinfo)
self.prev_eval_points.append(qinfo.point)
self.prev_eval_vals.append(qinfo.val)
self.prev_eval_true_vals.append(qinfo.true_val)
return num_pts_in_file
def _exd_child_get_data_to_save(self):
ret = {'points': self.prev_eval_points + self.history.query_points,
'vals': self.prev_eval_vals + self.history.query_vals,
'true_vals': self.prev_eval_true_vals + self.history.query_true_vals}
if self.multi_func_caller.is_mf():
raise NotImplementedError('Not implemented multi-fidelity MOO yet.')
num_data_saved = len(ret['points'])
return ret, num_data_saved
def _child_run_experiments_initialise(self):
self._opt_method_optimise_initalise()
def _opt_method_optimise_initalise(self):
pass
def optimise(self, max_capital):
ret = self.run_experiments(max_capital)
return ret
def _get_final_return_quantities(self):
return self.curr_pareto_vals, self.curr_pareto_points, self.history
| true | true |
1c4564ebef61049797c74cdf7ecdeb24f8731ca0 | 2,471 | py | Python | tests/test_marmiton.py | squat-house/recipe-scrapers | 72d2f69196f95210d2ea248f3b5cb446f94fd2b2 | [
"MIT"
] | 2 | 2020-07-28T15:12:10.000Z | 2020-07-30T18:10:33.000Z | tests/test_marmiton.py | bfcarpio/recipe-scrapers | 827ec444bc9d422a98c84c05cc4e4bcd3d084d51 | [
"MIT"
] | 1 | 2022-01-08T10:49:17.000Z | 2022-01-08T10:49:30.000Z | tests/test_marmiton.py | AlexRogalskiy/recipe-scrapers | ff378b3ba4ae7ff4cbc113ca13991f887c1c70e7 | [
"MIT"
] | 1 | 2022-01-08T10:49:09.000Z | 2022-01-08T10:49:09.000Z | from recipe_scrapers.marmiton import Marmiton
from tests import ScraperTest
class TestMarmitonScraper(ScraperTest):
scraper_class = Marmiton
def test_host(self):
self.assertEqual("marmiton.org", self.harvester_class.host())
def test_canonical_url(self):
self.assertEqual(
"https://www.marmiton.org/recettes/recette_ratatouille_23223.aspx",
self.harvester_class.canonical_url(),
)
def test_title(self):
self.assertEqual(self.harvester_class.title(), "Ratatouille")
def test_total_time(self):
self.assertEqual(80, self.harvester_class.total_time())
def test_yields(self):
self.assertEqual("4 personnes", self.harvester_class.yields())
def test_ingredients(self):
self.assertCountEqual(
[
"350 g d'aubergine",
"350 g de courgette",
"350 g de poivron de couleur rouge et vert",
"350 g d'oignon",
"500 g de tomate bien mûres",
"3 gousses d'ail",
"6 cuillères à soupe d'huile d'olive",
"1 brin de thym",
"1 feuille de laurier",
"poivre",
"sel",
],
self.harvester_class.ingredients(),
)
def test_instructions(self):
return self.assertEqual(
"Coupez les tomates pelées en quartiers,\n"
"les aubergines et les courgettes en rondelles.\n"
"Emincez les poivrons en lamelles\n"
"et l'oignon en rouelles.\n"
"Chauffez 2 cuillères à soupe d'huile dans une poêle\n"
"et faites-y fondre les oignons et les poivrons.\n"
"Lorsqu'ils sont tendres, ajoutez les tomates, l'ail haché, le thym et le laurier.\n"
"Salez, poivrez et laissez mijoter doucement à couvert durant 45 minutes.\n"
"Pendant ce temps, préparez les aubergines et les courgettes. "
"Faites les cuire séparemment ou non dans l'huile d'olive pendant 15 minutes.\n"
"Vérifiez la cuisson des légumes pour qu'ils ne soient plus fermes. "
"Ajoutez les alors au mélange de tomates et prolongez la cuisson sur tout petit feu pendant 10 min.\n"
"Salez et poivrez si besoin.",
self.harvester_class.instructions(),
)
def test_ratings(self):
self.assertEqual(4.8, self.harvester_class.ratings())
| 38.015385 | 114 | 0.607446 | from recipe_scrapers.marmiton import Marmiton
from tests import ScraperTest
class TestMarmitonScraper(ScraperTest):
scraper_class = Marmiton
def test_host(self):
self.assertEqual("marmiton.org", self.harvester_class.host())
def test_canonical_url(self):
self.assertEqual(
"https://www.marmiton.org/recettes/recette_ratatouille_23223.aspx",
self.harvester_class.canonical_url(),
)
def test_title(self):
self.assertEqual(self.harvester_class.title(), "Ratatouille")
def test_total_time(self):
self.assertEqual(80, self.harvester_class.total_time())
def test_yields(self):
self.assertEqual("4 personnes", self.harvester_class.yields())
def test_ingredients(self):
self.assertCountEqual(
[
"350 g d'aubergine",
"350 g de courgette",
"350 g de poivron de couleur rouge et vert",
"350 g d'oignon",
"500 g de tomate bien mûres",
"3 gousses d'ail",
"6 cuillères à soupe d'huile d'olive",
"1 brin de thym",
"1 feuille de laurier",
"poivre",
"sel",
],
self.harvester_class.ingredients(),
)
def test_instructions(self):
return self.assertEqual(
"Coupez les tomates pelées en quartiers,\n"
"les aubergines et les courgettes en rondelles.\n"
"Emincez les poivrons en lamelles\n"
"et l'oignon en rouelles.\n"
"Chauffez 2 cuillères à soupe d'huile dans une poêle\n"
"et faites-y fondre les oignons et les poivrons.\n"
"Lorsqu'ils sont tendres, ajoutez les tomates, l'ail haché, le thym et le laurier.\n"
"Salez, poivrez et laissez mijoter doucement à couvert durant 45 minutes.\n"
"Pendant ce temps, préparez les aubergines et les courgettes. "
"Faites les cuire séparemment ou non dans l'huile d'olive pendant 15 minutes.\n"
"Vérifiez la cuisson des légumes pour qu'ils ne soient plus fermes. "
"Ajoutez les alors au mélange de tomates et prolongez la cuisson sur tout petit feu pendant 10 min.\n"
"Salez et poivrez si besoin.",
self.harvester_class.instructions(),
)
def test_ratings(self):
self.assertEqual(4.8, self.harvester_class.ratings())
| true | true |
1c4564ec45018750930b24c198003bce822d9fed | 2,333 | py | Python | script.py | jornix/Stavanger-school-learning-results | 9974cac4ebb91ea51b0437f8b7750feac3049804 | [
"MIT"
] | null | null | null | script.py | jornix/Stavanger-school-learning-results | 9974cac4ebb91ea51b0437f8b7750feac3049804 | [
"MIT"
] | null | null | null | script.py | jornix/Stavanger-school-learning-results | 9974cac4ebb91ea51b0437f8b7750feac3049804 | [
"MIT"
] | null | null | null | import pandas as pd
from matplotlib import pyplot as plt
import seaborn as sns
# Read csv and store in dataframe df
df = pd.read_csv("results.csv")
df.drop(["index"], axis=1).reset_index(drop=True)
# Separate fifth grade tests
femte_trinn = df[
(df["statistikk"] == "Nasjonale prøver 5. trinn") & (pd.isna(df["verdi"]) == False)
].reset_index(drop=True)
# Separate the different tests for fifth grade (engelsk, lesing, regning)
femte_trinn_engelsk = femte_trinn[
(femte_trinn["indikator_delskar"] == "Engelsk")
& (femte_trinn["kjonn"] == "Begge kjønn")
].reset_index(drop=True)
femte_trinn_lesing = femte_trinn[
(femte_trinn["indikator_delskar"] == "Lesing")
& (femte_trinn["kjonn"] == "Begge kjønn")
].reset_index(drop=True)
femte_trinn_regning = femte_trinn[
(femte_trinn["indikator_delskar"] == "Regning")
& (femte_trinn["kjonn"] == "Begge kjønn")
].reset_index(drop=True)
# Set some seaborn estethic variables
sns.set_theme(style="ticks", color_codes=True)
sns.set_style("darkgrid")
sns.set_context("paper")
sns.set_palette("PiYG")
# calculate and print boxplots to files
fig, axes = plt.subplots(figsize=(10, 15))
fig.suptitle("Nasjonale prøver 5. trinn, spredning i resultater")
sns.boxplot(data=femte_trinn_engelsk, x="verdi", y="enhetsnavn", palette="RdYlBu")
sns.stripplot(
data=femte_trinn_engelsk,
x="verdi",
y="enhetsnavn",
palette="PRGn",
hue="periode",
)
axes.set_title("Engelsk")
plt.savefig("plots/boxplot_femte_trinn_engelsk.png")
fig, axes = plt.subplots(figsize=(10, 15))
fig.suptitle("Nasjonale prøver 5. trinn, spredning i resultater")
sns.boxplot(data=femte_trinn_lesing, x="verdi", y="enhetsnavn", palette="RdYlBu")
sns.stripplot(
data=femte_trinn_lesing,
x="verdi",
y="enhetsnavn",
palette="PRGn",
hue="periode",
)
axes.set_title("Lesing")
plt.savefig("plots/boxplot_femte_trinn_lesing.png")
fig, axes = plt.subplots(figsize=(10, 15))
fig.suptitle("Nasjonale prøver 5. trinn, spredning i resultater")
sns.boxplot(data=femte_trinn_regning, x="verdi", y="enhetsnavn", palette="RdYlBu")
sns.stripplot(
data=femte_trinn_regning,
x="verdi",
y="enhetsnavn",
palette="PRGn",
hue="periode",
)
axes.set_title("Regning")
plt.savefig("plots/boxplot_femte_trinn_regning.png")
# sns.despine(offset=10, trim=True)
plt.show()
| 29.910256 | 87 | 0.717531 | import pandas as pd
from matplotlib import pyplot as plt
import seaborn as sns
df = pd.read_csv("results.csv")
df.drop(["index"], axis=1).reset_index(drop=True)
femte_trinn = df[
(df["statistikk"] == "Nasjonale prøver 5. trinn") & (pd.isna(df["verdi"]) == False)
].reset_index(drop=True)
femte_trinn_engelsk = femte_trinn[
(femte_trinn["indikator_delskar"] == "Engelsk")
& (femte_trinn["kjonn"] == "Begge kjønn")
].reset_index(drop=True)
femte_trinn_lesing = femte_trinn[
(femte_trinn["indikator_delskar"] == "Lesing")
& (femte_trinn["kjonn"] == "Begge kjønn")
].reset_index(drop=True)
femte_trinn_regning = femte_trinn[
(femte_trinn["indikator_delskar"] == "Regning")
& (femte_trinn["kjonn"] == "Begge kjønn")
].reset_index(drop=True)
sns.set_theme(style="ticks", color_codes=True)
sns.set_style("darkgrid")
sns.set_context("paper")
sns.set_palette("PiYG")
fig, axes = plt.subplots(figsize=(10, 15))
fig.suptitle("Nasjonale prøver 5. trinn, spredning i resultater")
sns.boxplot(data=femte_trinn_engelsk, x="verdi", y="enhetsnavn", palette="RdYlBu")
sns.stripplot(
data=femte_trinn_engelsk,
x="verdi",
y="enhetsnavn",
palette="PRGn",
hue="periode",
)
axes.set_title("Engelsk")
plt.savefig("plots/boxplot_femte_trinn_engelsk.png")
fig, axes = plt.subplots(figsize=(10, 15))
fig.suptitle("Nasjonale prøver 5. trinn, spredning i resultater")
sns.boxplot(data=femte_trinn_lesing, x="verdi", y="enhetsnavn", palette="RdYlBu")
sns.stripplot(
data=femte_trinn_lesing,
x="verdi",
y="enhetsnavn",
palette="PRGn",
hue="periode",
)
axes.set_title("Lesing")
plt.savefig("plots/boxplot_femte_trinn_lesing.png")
fig, axes = plt.subplots(figsize=(10, 15))
fig.suptitle("Nasjonale prøver 5. trinn, spredning i resultater")
sns.boxplot(data=femte_trinn_regning, x="verdi", y="enhetsnavn", palette="RdYlBu")
sns.stripplot(
data=femte_trinn_regning,
x="verdi",
y="enhetsnavn",
palette="PRGn",
hue="periode",
)
axes.set_title("Regning")
plt.savefig("plots/boxplot_femte_trinn_regning.png")
plt.show()
| true | true |
1c4565b7ff36d1855854ec42200d3c17a32a5c55 | 1,561 | py | Python | apps/wagtail/myblog/migrations/0002_auto_20210425_1951.py | aadrm/breakoutwagtail | cf4ce09153adf2b5e14f15ffbc82bda754d427b3 | [
"MIT"
] | null | null | null | apps/wagtail/myblog/migrations/0002_auto_20210425_1951.py | aadrm/breakoutwagtail | cf4ce09153adf2b5e14f15ffbc82bda754d427b3 | [
"MIT"
] | null | null | null | apps/wagtail/myblog/migrations/0002_auto_20210425_1951.py | aadrm/breakoutwagtail | cf4ce09153adf2b5e14f15ffbc82bda754d427b3 | [
"MIT"
] | null | null | null | # Generated by Django 3.1.4 on 2021-04-25 19:51
import apps.wagtail.streams.blocks
from django.db import migrations
import wagtail.core.blocks
import wagtail.core.fields
import wagtail.images.blocks
class Migration(migrations.Migration):
dependencies = [
('myblog', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='blogpage',
name='body_de',
field=wagtail.core.fields.StreamField([('paragraph', wagtail.core.blocks.RichTextBlock()), ('image_text', wagtail.core.blocks.StructBlock([('reverse', wagtail.core.blocks.BooleanBlock(required=False)), ('text', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock())])), ('mymaps', apps.wagtail.streams.blocks.MyMapsBlock()), ('table', apps.wagtail.streams.blocks.CustomTableBlock()), ('spacer', apps.wagtail.streams.blocks.SpacerBlock())], blank=True, null=True),
),
migrations.AddField(
model_name='blogpage',
name='body_en',
field=wagtail.core.fields.StreamField([('paragraph', wagtail.core.blocks.RichTextBlock()), ('image_text', wagtail.core.blocks.StructBlock([('reverse', wagtail.core.blocks.BooleanBlock(required=False)), ('text', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock())])), ('mymaps', apps.wagtail.streams.blocks.MyMapsBlock()), ('table', apps.wagtail.streams.blocks.CustomTableBlock()), ('spacer', apps.wagtail.streams.blocks.SpacerBlock())], blank=True, null=True),
),
]
| 55.75 | 511 | 0.695708 |
import apps.wagtail.streams.blocks
from django.db import migrations
import wagtail.core.blocks
import wagtail.core.fields
import wagtail.images.blocks
class Migration(migrations.Migration):
dependencies = [
('myblog', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='blogpage',
name='body_de',
field=wagtail.core.fields.StreamField([('paragraph', wagtail.core.blocks.RichTextBlock()), ('image_text', wagtail.core.blocks.StructBlock([('reverse', wagtail.core.blocks.BooleanBlock(required=False)), ('text', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock())])), ('mymaps', apps.wagtail.streams.blocks.MyMapsBlock()), ('table', apps.wagtail.streams.blocks.CustomTableBlock()), ('spacer', apps.wagtail.streams.blocks.SpacerBlock())], blank=True, null=True),
),
migrations.AddField(
model_name='blogpage',
name='body_en',
field=wagtail.core.fields.StreamField([('paragraph', wagtail.core.blocks.RichTextBlock()), ('image_text', wagtail.core.blocks.StructBlock([('reverse', wagtail.core.blocks.BooleanBlock(required=False)), ('text', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock())])), ('mymaps', apps.wagtail.streams.blocks.MyMapsBlock()), ('table', apps.wagtail.streams.blocks.CustomTableBlock()), ('spacer', apps.wagtail.streams.blocks.SpacerBlock())], blank=True, null=True),
),
]
| true | true |
1c4565b95bce497665b69de99a26e49f66b582e5 | 416 | py | Python | simple-backend/stave_backend/migrations/0002_document_ontology.py | mylibrar/stave | 43145015253d0577dfc757419ad8b4fa06a04042 | [
"Apache-2.0"
] | 35 | 2020-01-29T04:21:10.000Z | 2021-12-13T01:44:28.000Z | simple-backend/stave_backend/migrations/0002_document_ontology.py | mylibrar/stave | 43145015253d0577dfc757419ad8b4fa06a04042 | [
"Apache-2.0"
] | 86 | 2020-04-17T16:36:13.000Z | 2022-03-25T22:51:34.000Z | simple-backend/stave_backend/migrations/0002_document_ontology.py | mylibrar/stave | 43145015253d0577dfc757419ad8b4fa06a04042 | [
"Apache-2.0"
] | 18 | 2020-02-04T17:40:02.000Z | 2021-06-17T07:11:42.000Z | # Generated by Django 3.0.2 on 2020-01-13 18:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('stave_backend', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='document',
name='ontology',
field=models.TextField(default=''),
preserve_default=False,
),
]
| 20.8 | 47 | 0.586538 |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('stave_backend', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='document',
name='ontology',
field=models.TextField(default=''),
preserve_default=False,
),
]
| true | true |
1c4565d7e382b201d1238aef00abd33b71ffbf9f | 3,402 | py | Python | src/tests/config_test.py | dixonwhitmire/lib-kafka | a29ec606278ad1ed8c5ada418e45593fe71dbaec | [
"Apache-2.0"
] | null | null | null | src/tests/config_test.py | dixonwhitmire/lib-kafka | a29ec606278ad1ed8c5ada418e45593fe71dbaec | [
"Apache-2.0"
] | 6 | 2021-11-03T15:17:29.000Z | 2021-11-05T14:10:56.000Z | src/tests/config_test.py | dixonwhitmire/lib-kafka | a29ec606278ad1ed8c5ada418e45593fe71dbaec | [
"Apache-2.0"
] | 1 | 2021-11-01T12:21:11.000Z | 2021-11-01T12:21:11.000Z | import os
import pytest
from pydantic import ValidationError
from lib_kafka import config as configuration
import importlib
from tests import resources_directory
@pytest.fixture(autouse=True)
def reset():
reset_env_vars()
def reset_env_vars():
for env_var in ("KAFKA_BROKER_CONFIG_FILE", "KAFKA_TOPIC_CONFIG_FILE"):
if env_var in os.environ:
del os.environ[env_var]
def get_sample_config_path(file_name):
return os.path.join(resources_directory, file_name)
def test_kafka_settings_success_object():
settings = configuration.KafkaSettings(
bootstrap_servers='test_server',
group_id='test_group_id',
security_protocol='test_protocol',
enable_auto_commit=False,
ssl_ca_location='test-location'
)
assert settings is not None
def test_kafka_settings_success_env_file():
os.environ["KAFKA_BROKER_CONFIG_FILE"] = get_sample_config_path('kafka.env')
importlib.reload(configuration)
settings = configuration.KafkaSettings()
assert settings is not None
settings_dict = settings.dict(by_alias=True)
assert 'bootstrap.servers' in settings_dict.keys() and settings_dict['bootstrap.servers'] == 'localhost:9093'
assert 'group.id' in settings_dict.keys() and settings_dict['group.id'] == 'kafka-listener'
assert 'security.protocol' in settings_dict.keys() and settings_dict['security.protocol'] == 'PLAINTEXT'
assert 'ssl.ca.location' in settings_dict.keys() and settings_dict[
'ssl.ca.location'] == '/var/app/certs/kafka/tls.crt'
assert 'enable.auto.commit' in settings_dict.keys() and not settings_dict['enable.auto.commit']
def test_kafka_settings_failures():
with pytest.raises(ValidationError):
test_object = configuration.KafkaSettings(
bootstrap_servers='test_server',
group_id='test_group_id',
security_protocol='test_protocol',
)
with pytest.raises(ValidationError):
test_object = configuration.KafkaSettings(
bootstrap_servers='test_server',
group_id='test_group_id',
security_protocol='test_protocol',
enable_auto_commit=False
)
with pytest.raises(ValidationError):
test_object = configuration.KafkaSettings(
bootstrap_servers='test_server',
group_id=32,
security_protocol='test_protocol',
enable_auto_commit=False,
ssl_ca_location='test-location'
)
def test_kafka_topics_success_parse():
topics = configuration.KafkaTopics.parse_file(get_sample_config_path('kafka-topic.json'))
assert topics is not None
assert len(topics.dict()['__root__']) == 3
def test_kafka_topic_success_object():
topic = configuration.KafkaTopic(
name='test-topic',
replication_factor=2,
partitions=2,
recreate_topic=False,
operation=configuration.OperationEnum.create
)
assert topic is not None
assert topic.name == 'test-topic'
def test_kafka_topic_failure_object():
with pytest.raises(KeyError):
topic = configuration.KafkaTopic(
name='test-topic',
replication_factor='non-int',
partitions=2,
recreate_topic=False,
operation=configuration.OperationEnum.create
)
| 32.4 | 113 | 0.683422 | import os
import pytest
from pydantic import ValidationError
from lib_kafka import config as configuration
import importlib
from tests import resources_directory
@pytest.fixture(autouse=True)
def reset():
reset_env_vars()
def reset_env_vars():
for env_var in ("KAFKA_BROKER_CONFIG_FILE", "KAFKA_TOPIC_CONFIG_FILE"):
if env_var in os.environ:
del os.environ[env_var]
def get_sample_config_path(file_name):
return os.path.join(resources_directory, file_name)
def test_kafka_settings_success_object():
settings = configuration.KafkaSettings(
bootstrap_servers='test_server',
group_id='test_group_id',
security_protocol='test_protocol',
enable_auto_commit=False,
ssl_ca_location='test-location'
)
assert settings is not None
def test_kafka_settings_success_env_file():
os.environ["KAFKA_BROKER_CONFIG_FILE"] = get_sample_config_path('kafka.env')
importlib.reload(configuration)
settings = configuration.KafkaSettings()
assert settings is not None
settings_dict = settings.dict(by_alias=True)
assert 'bootstrap.servers' in settings_dict.keys() and settings_dict['bootstrap.servers'] == 'localhost:9093'
assert 'group.id' in settings_dict.keys() and settings_dict['group.id'] == 'kafka-listener'
assert 'security.protocol' in settings_dict.keys() and settings_dict['security.protocol'] == 'PLAINTEXT'
assert 'ssl.ca.location' in settings_dict.keys() and settings_dict[
'ssl.ca.location'] == '/var/app/certs/kafka/tls.crt'
assert 'enable.auto.commit' in settings_dict.keys() and not settings_dict['enable.auto.commit']
def test_kafka_settings_failures():
with pytest.raises(ValidationError):
test_object = configuration.KafkaSettings(
bootstrap_servers='test_server',
group_id='test_group_id',
security_protocol='test_protocol',
)
with pytest.raises(ValidationError):
test_object = configuration.KafkaSettings(
bootstrap_servers='test_server',
group_id='test_group_id',
security_protocol='test_protocol',
enable_auto_commit=False
)
with pytest.raises(ValidationError):
test_object = configuration.KafkaSettings(
bootstrap_servers='test_server',
group_id=32,
security_protocol='test_protocol',
enable_auto_commit=False,
ssl_ca_location='test-location'
)
def test_kafka_topics_success_parse():
topics = configuration.KafkaTopics.parse_file(get_sample_config_path('kafka-topic.json'))
assert topics is not None
assert len(topics.dict()['__root__']) == 3
def test_kafka_topic_success_object():
topic = configuration.KafkaTopic(
name='test-topic',
replication_factor=2,
partitions=2,
recreate_topic=False,
operation=configuration.OperationEnum.create
)
assert topic is not None
assert topic.name == 'test-topic'
def test_kafka_topic_failure_object():
with pytest.raises(KeyError):
topic = configuration.KafkaTopic(
name='test-topic',
replication_factor='non-int',
partitions=2,
recreate_topic=False,
operation=configuration.OperationEnum.create
)
| true | true |
1c4565e0dbfa4053de453a8497b1789732532024 | 1,386 | py | Python | beorn_lib/version.py | PAntoine/beorn_lib | a5bb8859acfb136f33559b6ddbf3bb20f61bd310 | [
"MIT"
] | null | null | null | beorn_lib/version.py | PAntoine/beorn_lib | a5bb8859acfb136f33559b6ddbf3bb20f61bd310 | [
"MIT"
] | null | null | null | beorn_lib/version.py | PAntoine/beorn_lib | a5bb8859acfb136f33559b6ddbf3bb20f61bd310 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#---------------------------------------------------------------------------------
#
# ,--.
# | |-. ,---. ,---. ,--.--.,--,--,
# | .-. '| .-. :| .-. || .--'| \
# | `-' |\ --.' '-' '| | | || |
# `---' `----' `---' `--' `--''--'
#
# file: version
# desc: This file contains the versions of the library.
#
# author: Peter Antoine
# date: 23/05/2015
#---------------------------------------------------------------------------------
# Copyright (c) 2015 Peter Antoine
# All rights Reserved.
# Released Under the MIT Licence
#---------------------------------------------------------------------------------
#---------------------------------------------------------------------------------
# Python Standard Header
#---------------------------------------------------------------------------------
__author__ = "Peter Antoine"
__copyright__ = "Copyright 2014-2021, Peter Antoine"
__credits__ = ["Peter Antoine"]
__license__ = "MIT"
__version__ = "1.5.0"
__maintainer__ = "Peter Antoine"
__email__ = "github@peterantoine.me.uk"
__url__ = "https://github.com/PAntoine/BeornLib"
__status__ = "Development"
# vim: ts=4 sw=4 noexpandtab nocin ai
| 37.459459 | 82 | 0.328283 |
# `---' `----' `---' `--' `--''--'
__author__ = "Peter Antoine"
__copyright__ = "Copyright 2014-2021, Peter Antoine"
__credits__ = ["Peter Antoine"]
__license__ = "MIT"
__version__ = "1.5.0"
__maintainer__ = "Peter Antoine"
__email__ = "github@peterantoine.me.uk"
__url__ = "https://github.com/PAntoine/BeornLib"
__status__ = "Development"
| true | true |
1c456664268f1c7c8973969844fb625e7b99c69c | 6,122 | py | Python | dmwmclient/restclient.py | FernandoGarzon/dmwmclient | aa69978b430f6626864718ddfa21951f2bb41d06 | [
"BSD-3-Clause"
] | 1 | 2022-02-01T16:06:15.000Z | 2022-02-01T16:06:15.000Z | dmwmclient/restclient.py | FernandoGarzon/dmwmclient | aa69978b430f6626864718ddfa21951f2bb41d06 | [
"BSD-3-Clause"
] | 3 | 2020-03-04T23:34:39.000Z | 2020-04-03T22:19:44.000Z | dmwmclient/restclient.py | nsmith-/dmwmclient | 726a9400f5270e0521dc9d2c1bffed3c0af410a2 | [
"BSD-3-Clause"
] | 4 | 2020-03-17T15:39:10.000Z | 2021-06-10T22:51:11.000Z | import os
import logging
import json
import httpx
import asyncio
from lxml import etree
from . import __version__
logger = logging.getLogger(__name__)
def locate_proxycert():
"""Find a user proxy"""
path = os.getenv("X509_USER_PROXY")
if path is not None:
return path
path = "/tmp/x509up_u%d" % os.getuid()
if os.path.exists(path):
return path
return None
def _defaultcert():
"""Find a suitable user certificate from the usual locations
Preference is given to original user certificate over a proxy
as this is necessary for use with CERN SSO.
"""
path = (
os.path.expanduser("~/.globus/usercert.pem"),
os.path.expanduser("~/.globus/userkey.pem"),
)
if os.path.exists(path[0]) and os.path.exists(path[1]):
return path
path = locate_proxycert()
if path is not None:
return path
raise RuntimeError("Could not identify an appropriate default user certificate")
class RESTClient:
defaults = {
# Location of user x509 certificate, key pair
"usercert": _defaultcert(),
# Location of trusted x509 certificates
"certdir": os.getenv("X509_CERT_DIR", "/etc/grid-security/certificates"),
}
def __init__(self, usercert=None, certdir=None):
if usercert is None:
usercert = RESTClient.defaults["usercert"]
if certdir is None:
certdir = RESTClient.defaults["certdir"]
certdir = os.path.expanduser(certdir)
self._ssoevents = {}
self._client = httpx.AsyncClient(
cert=usercert,
verify=certdir,
timeout=httpx.Timeout(10.0, read_timeout=30.0),
headers=httpx.Headers({"User-Agent": f"python-dmwmclient/{__version__}"}),
)
async def cern_sso_check(self, host):
"""Check if this host already has an SSO action in progress, and wait for it"""
try:
await self._ssoevents[host].wait()
return True
except KeyError:
pass
return False
async def cern_sso_follow(self, result, host):
"""Follow CERN SSO redirect, returning the result of the original request"""
html = etree.HTML(result.content)
links = [
link
for link in html.xpath("//a")
if link.text == "Sign in using your CERN Certificate"
]
if len(links) == 1:
link = links.pop()
logger.debug("Running first-time CERN SSO sign-in routine")
self._ssoevents[host] = asyncio.Event()
url = result.url.join(link.attrib["href"])
result = await self._client.get(url)
if not result.status_code == 200:
logger.debug("Return content:\n" + result.text)
raise IOError(
"HTTP status code %d received while following SSO link to %r"
% (result.status_code, url)
)
html = etree.HTML(result.content)
url = result.url.join(html.xpath("body/form")[0].attrib["action"])
data = {
el.attrib["name"]: el.attrib["value"]
for el in html.xpath("body/form/input")
}
result = await self._client.post(url, data=data)
if not result.status_code == 200:
logger.debug("Return content:\n" + result.text)
raise IOError(
"HTTP status code %d received while posting to SSO link %r"
% (result.status_code, url)
)
logger.debug(
"Received SSO cookie for %s: %r"
% (host, dict(result.history[0].cookies))
)
self._ssoevents[host].set()
del self._ssoevents[host]
return result
form = html.xpath("body/form")
if len(form) == 1:
logger.debug("Following CERN SSO redirect")
url = result.url.join(form[0].attrib["action"])
data = {
el.attrib["name"]: el.attrib["value"]
for el in html.xpath("body/form/input")
}
result = await self._client.post(url, data=data)
logger.debug(
"Received SSO cookie for %s: %r"
% (host, dict(result.history[0].cookies))
)
return result
logger.debug("Invalid SSO login page content:\n" + result.text)
raise RuntimeError(
"Could not parse CERN SSO login page (no sign-in link or auto-redirect found)"
)
def build_request(self, **params):
return self._client.build_request(**params)
async def send(self, request, timeout=None, retries=1):
await self.cern_sso_check(request.url.host)
# Looking forward to https://github.com/encode/httpx/pull/784
while retries > 0:
try:
result = await self._client.send(request, timeout=timeout)
if result.status_code == 200 and result.url.host == "login.cern.ch":
if await self.cern_sso_check(request.url.host):
self._client.cookies.set_cookie_header(request)
continue
result = await self.cern_sso_follow(result, request.url.host)
return result
except httpx.TimeoutException:
logging.warning(
"Timeout encountered while executing request %r" % request
)
retries -= 1
raise IOError(
"Exhausted %d retries while executing request %r" % (retries, request)
)
async def getjson(self, url, params=None, timeout=None, retries=1):
request = self.build_request(method="GET", url=url, params=params)
result = await self.send(request, timeout=timeout, retries=retries)
try:
return result.json()
except json.JSONDecodeError:
logging.debug("Result content: {result.text}")
raise IOError(f"Failed to decode json for request {request}")
| 37.329268 | 90 | 0.571219 | import os
import logging
import json
import httpx
import asyncio
from lxml import etree
from . import __version__
logger = logging.getLogger(__name__)
def locate_proxycert():
path = os.getenv("X509_USER_PROXY")
if path is not None:
return path
path = "/tmp/x509up_u%d" % os.getuid()
if os.path.exists(path):
return path
return None
def _defaultcert():
path = (
os.path.expanduser("~/.globus/usercert.pem"),
os.path.expanduser("~/.globus/userkey.pem"),
)
if os.path.exists(path[0]) and os.path.exists(path[1]):
return path
path = locate_proxycert()
if path is not None:
return path
raise RuntimeError("Could not identify an appropriate default user certificate")
class RESTClient:
defaults = {
"usercert": _defaultcert(),
"certdir": os.getenv("X509_CERT_DIR", "/etc/grid-security/certificates"),
}
def __init__(self, usercert=None, certdir=None):
if usercert is None:
usercert = RESTClient.defaults["usercert"]
if certdir is None:
certdir = RESTClient.defaults["certdir"]
certdir = os.path.expanduser(certdir)
self._ssoevents = {}
self._client = httpx.AsyncClient(
cert=usercert,
verify=certdir,
timeout=httpx.Timeout(10.0, read_timeout=30.0),
headers=httpx.Headers({"User-Agent": f"python-dmwmclient/{__version__}"}),
)
async def cern_sso_check(self, host):
try:
await self._ssoevents[host].wait()
return True
except KeyError:
pass
return False
async def cern_sso_follow(self, result, host):
html = etree.HTML(result.content)
links = [
link
for link in html.xpath("//a")
if link.text == "Sign in using your CERN Certificate"
]
if len(links) == 1:
link = links.pop()
logger.debug("Running first-time CERN SSO sign-in routine")
self._ssoevents[host] = asyncio.Event()
url = result.url.join(link.attrib["href"])
result = await self._client.get(url)
if not result.status_code == 200:
logger.debug("Return content:\n" + result.text)
raise IOError(
"HTTP status code %d received while following SSO link to %r"
% (result.status_code, url)
)
html = etree.HTML(result.content)
url = result.url.join(html.xpath("body/form")[0].attrib["action"])
data = {
el.attrib["name"]: el.attrib["value"]
for el in html.xpath("body/form/input")
}
result = await self._client.post(url, data=data)
if not result.status_code == 200:
logger.debug("Return content:\n" + result.text)
raise IOError(
"HTTP status code %d received while posting to SSO link %r"
% (result.status_code, url)
)
logger.debug(
"Received SSO cookie for %s: %r"
% (host, dict(result.history[0].cookies))
)
self._ssoevents[host].set()
del self._ssoevents[host]
return result
form = html.xpath("body/form")
if len(form) == 1:
logger.debug("Following CERN SSO redirect")
url = result.url.join(form[0].attrib["action"])
data = {
el.attrib["name"]: el.attrib["value"]
for el in html.xpath("body/form/input")
}
result = await self._client.post(url, data=data)
logger.debug(
"Received SSO cookie for %s: %r"
% (host, dict(result.history[0].cookies))
)
return result
logger.debug("Invalid SSO login page content:\n" + result.text)
raise RuntimeError(
"Could not parse CERN SSO login page (no sign-in link or auto-redirect found)"
)
def build_request(self, **params):
return self._client.build_request(**params)
async def send(self, request, timeout=None, retries=1):
await self.cern_sso_check(request.url.host)
while retries > 0:
try:
result = await self._client.send(request, timeout=timeout)
if result.status_code == 200 and result.url.host == "login.cern.ch":
if await self.cern_sso_check(request.url.host):
self._client.cookies.set_cookie_header(request)
continue
result = await self.cern_sso_follow(result, request.url.host)
return result
except httpx.TimeoutException:
logging.warning(
"Timeout encountered while executing request %r" % request
)
retries -= 1
raise IOError(
"Exhausted %d retries while executing request %r" % (retries, request)
)
async def getjson(self, url, params=None, timeout=None, retries=1):
request = self.build_request(method="GET", url=url, params=params)
result = await self.send(request, timeout=timeout, retries=retries)
try:
return result.json()
except json.JSONDecodeError:
logging.debug("Result content: {result.text}")
raise IOError(f"Failed to decode json for request {request}")
| true | true |
1c4566b1e1aee1b8f0aa8bfb88df64b92879f477 | 7,636 | py | Python | sdk/python/pulumi_azure_native/network/v20180301/get_profile.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/network/v20180301/get_profile.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/network/v20180301/get_profile.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetProfileResult',
'AwaitableGetProfileResult',
'get_profile',
]
@pulumi.output_type
class GetProfileResult:
"""
Class representing a Traffic Manager profile.
"""
def __init__(__self__, dns_config=None, endpoints=None, id=None, location=None, monitor_config=None, name=None, profile_status=None, tags=None, traffic_routing_method=None, traffic_view_enrollment_status=None, type=None):
if dns_config and not isinstance(dns_config, dict):
raise TypeError("Expected argument 'dns_config' to be a dict")
pulumi.set(__self__, "dns_config", dns_config)
if endpoints and not isinstance(endpoints, list):
raise TypeError("Expected argument 'endpoints' to be a list")
pulumi.set(__self__, "endpoints", endpoints)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if monitor_config and not isinstance(monitor_config, dict):
raise TypeError("Expected argument 'monitor_config' to be a dict")
pulumi.set(__self__, "monitor_config", monitor_config)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if profile_status and not isinstance(profile_status, str):
raise TypeError("Expected argument 'profile_status' to be a str")
pulumi.set(__self__, "profile_status", profile_status)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if traffic_routing_method and not isinstance(traffic_routing_method, str):
raise TypeError("Expected argument 'traffic_routing_method' to be a str")
pulumi.set(__self__, "traffic_routing_method", traffic_routing_method)
if traffic_view_enrollment_status and not isinstance(traffic_view_enrollment_status, str):
raise TypeError("Expected argument 'traffic_view_enrollment_status' to be a str")
pulumi.set(__self__, "traffic_view_enrollment_status", traffic_view_enrollment_status)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="dnsConfig")
def dns_config(self) -> Optional['outputs.DnsConfigResponse']:
"""
The DNS settings of the Traffic Manager profile.
"""
return pulumi.get(self, "dns_config")
@property
@pulumi.getter
def endpoints(self) -> Optional[Sequence['outputs.EndpointResponse']]:
"""
The list of endpoints in the Traffic Manager profile.
"""
return pulumi.get(self, "endpoints")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/trafficManagerProfiles/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
The Azure Region where the resource lives
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="monitorConfig")
def monitor_config(self) -> Optional['outputs.MonitorConfigResponse']:
"""
The endpoint monitoring settings of the Traffic Manager profile.
"""
return pulumi.get(self, "monitor_config")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="profileStatus")
def profile_status(self) -> Optional[str]:
"""
The status of the Traffic Manager profile.
"""
return pulumi.get(self, "profile_status")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="trafficRoutingMethod")
def traffic_routing_method(self) -> Optional[str]:
"""
The traffic routing method of the Traffic Manager profile.
"""
return pulumi.get(self, "traffic_routing_method")
@property
@pulumi.getter(name="trafficViewEnrollmentStatus")
def traffic_view_enrollment_status(self) -> Optional[str]:
"""
Indicates whether Traffic View is 'Enabled' or 'Disabled' for the Traffic Manager profile. Null, indicates 'Disabled'. Enabling this feature will increase the cost of the Traffic Manage profile.
"""
return pulumi.get(self, "traffic_view_enrollment_status")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
The type of the resource. Ex- Microsoft.Network/trafficManagerProfiles.
"""
return pulumi.get(self, "type")
class AwaitableGetProfileResult(GetProfileResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetProfileResult(
dns_config=self.dns_config,
endpoints=self.endpoints,
id=self.id,
location=self.location,
monitor_config=self.monitor_config,
name=self.name,
profile_status=self.profile_status,
tags=self.tags,
traffic_routing_method=self.traffic_routing_method,
traffic_view_enrollment_status=self.traffic_view_enrollment_status,
type=self.type)
def get_profile(profile_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetProfileResult:
"""
Class representing a Traffic Manager profile.
:param str profile_name: The name of the Traffic Manager profile.
:param str resource_group_name: The name of the resource group containing the Traffic Manager profile.
"""
__args__ = dict()
__args__['profileName'] = profile_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:network/v20180301:getProfile', __args__, opts=opts, typ=GetProfileResult).value
return AwaitableGetProfileResult(
dns_config=__ret__.dns_config,
endpoints=__ret__.endpoints,
id=__ret__.id,
location=__ret__.location,
monitor_config=__ret__.monitor_config,
name=__ret__.name,
profile_status=__ret__.profile_status,
tags=__ret__.tags,
traffic_routing_method=__ret__.traffic_routing_method,
traffic_view_enrollment_status=__ret__.traffic_view_enrollment_status,
type=__ret__.type)
| 38.761421 | 225 | 0.667234 |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetProfileResult',
'AwaitableGetProfileResult',
'get_profile',
]
@pulumi.output_type
class GetProfileResult:
def __init__(__self__, dns_config=None, endpoints=None, id=None, location=None, monitor_config=None, name=None, profile_status=None, tags=None, traffic_routing_method=None, traffic_view_enrollment_status=None, type=None):
if dns_config and not isinstance(dns_config, dict):
raise TypeError("Expected argument 'dns_config' to be a dict")
pulumi.set(__self__, "dns_config", dns_config)
if endpoints and not isinstance(endpoints, list):
raise TypeError("Expected argument 'endpoints' to be a list")
pulumi.set(__self__, "endpoints", endpoints)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if monitor_config and not isinstance(monitor_config, dict):
raise TypeError("Expected argument 'monitor_config' to be a dict")
pulumi.set(__self__, "monitor_config", monitor_config)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if profile_status and not isinstance(profile_status, str):
raise TypeError("Expected argument 'profile_status' to be a str")
pulumi.set(__self__, "profile_status", profile_status)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if traffic_routing_method and not isinstance(traffic_routing_method, str):
raise TypeError("Expected argument 'traffic_routing_method' to be a str")
pulumi.set(__self__, "traffic_routing_method", traffic_routing_method)
if traffic_view_enrollment_status and not isinstance(traffic_view_enrollment_status, str):
raise TypeError("Expected argument 'traffic_view_enrollment_status' to be a str")
pulumi.set(__self__, "traffic_view_enrollment_status", traffic_view_enrollment_status)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="dnsConfig")
def dns_config(self) -> Optional['outputs.DnsConfigResponse']:
return pulumi.get(self, "dns_config")
@property
@pulumi.getter
def endpoints(self) -> Optional[Sequence['outputs.EndpointResponse']]:
return pulumi.get(self, "endpoints")
@property
@pulumi.getter
def id(self) -> Optional[str]:
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> Optional[str]:
return pulumi.get(self, "location")
@property
@pulumi.getter(name="monitorConfig")
def monitor_config(self) -> Optional['outputs.MonitorConfigResponse']:
return pulumi.get(self, "monitor_config")
@property
@pulumi.getter
def name(self) -> Optional[str]:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="profileStatus")
def profile_status(self) -> Optional[str]:
return pulumi.get(self, "profile_status")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="trafficRoutingMethod")
def traffic_routing_method(self) -> Optional[str]:
return pulumi.get(self, "traffic_routing_method")
@property
@pulumi.getter(name="trafficViewEnrollmentStatus")
def traffic_view_enrollment_status(self) -> Optional[str]:
return pulumi.get(self, "traffic_view_enrollment_status")
@property
@pulumi.getter
def type(self) -> Optional[str]:
return pulumi.get(self, "type")
class AwaitableGetProfileResult(GetProfileResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetProfileResult(
dns_config=self.dns_config,
endpoints=self.endpoints,
id=self.id,
location=self.location,
monitor_config=self.monitor_config,
name=self.name,
profile_status=self.profile_status,
tags=self.tags,
traffic_routing_method=self.traffic_routing_method,
traffic_view_enrollment_status=self.traffic_view_enrollment_status,
type=self.type)
def get_profile(profile_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetProfileResult:
__args__ = dict()
__args__['profileName'] = profile_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:network/v20180301:getProfile', __args__, opts=opts, typ=GetProfileResult).value
return AwaitableGetProfileResult(
dns_config=__ret__.dns_config,
endpoints=__ret__.endpoints,
id=__ret__.id,
location=__ret__.location,
monitor_config=__ret__.monitor_config,
name=__ret__.name,
profile_status=__ret__.profile_status,
tags=__ret__.tags,
traffic_routing_method=__ret__.traffic_routing_method,
traffic_view_enrollment_status=__ret__.traffic_view_enrollment_status,
type=__ret__.type)
| true | true |
1c45678f9f69f1c3e03546028ce022e51687f2b5 | 48,532 | py | Python | hytra/core/hypothesesgraph.py | m-novikov/hytra | 0dc28deaa2571fa8bea63ca178f0e53cc1cd7508 | [
"MIT"
] | null | null | null | hytra/core/hypothesesgraph.py | m-novikov/hytra | 0dc28deaa2571fa8bea63ca178f0e53cc1cd7508 | [
"MIT"
] | null | null | null | hytra/core/hypothesesgraph.py | m-novikov/hytra | 0dc28deaa2571fa8bea63ca178f0e53cc1cd7508 | [
"MIT"
] | null | null | null | import logging
import copy
import networkx as nx
import numpy as np
from sklearn.neighbors import KDTree
import hytra.core.jsongraph
from hytra.core.jsongraph import negLog, listify
from hytra.util.progressbar import DefaultProgressVisitor
logger = logging.getLogger(__name__)
def getTraxelFeatureVector(traxel, featureName, maxNumDimensions=3):
"""
extract a feature vector from a traxel
"""
result = []
for i in range(maxNumDimensions):
try:
result.append(traxel.get_feature_value(str(featureName), i))
except:
if i == 0:
logger.error(
f"Error when accessing feature {featureName}[{i}] for "
f"traxel (Id={traxel.Id},Timestep={traxel.Timestep})"
)
logger.error(traxel.print_available_features())
raise Exception
else:
logger.error(
f"Error: Classifier was trained with less merger than maxNumObjects {maxNumDimensions}."
)
raise Exception
return result
class NodeMap:
"""
To access per node features of the hypotheses graph,
this node map provides the same interface as pgmlink's NodeMaps
"""
def __init__(self, graph: nx.DiGraph, attributeName):
assert isinstance(graph, nx.DiGraph), "Expecting the graph to be directed"
self.__graph = graph
self.__attributeName = attributeName
def __getitem__(self, key):
return self.__graph.nodes[key][self.__attributeName]
class HypothesesGraph:
"""
Replacement for pgmlink's hypotheses graph,
with a similar API so it can be used as drop-in replacement.
Internally it uses [networkx](http://networkx.github.io/) to construct the graph.
Use the insertEnergies() method to populate the nodes and arcs with the energies for different
configurations (according to DPCT's JSON style'), derived from given probability generation functions.
**Notes:** `self._graph.node`'s are indexed by tuples (int(timestep), int(id)), and contain either a
single `'traxel'` attribute, or a list of traxels in `'tracklet'`.
Nodes also get a unique ID assigned once they are added to the graph.
"""
def __init__(self):
self._graph = nx.DiGraph()
self.withTracklets = False
self.allowLengthOneTracks = True
self._nextNodeUuid = 0
self.progressVisitor = DefaultProgressVisitor()
def nodeIterator(self):
return self._graph.nodes()
def arcIterator(self):
return self._graph.edges()
def countNodes(self):
return self._graph.number_of_nodes()
def countArcs(self):
return self._graph.number_of_edges()
def hasNode(self, node):
return self._graph.has_node(node)
def hasEdge(self, u, v):
return self._graph.has_edge(u, v)
@staticmethod
def source(edge):
return edge[0]
@staticmethod
def target(edge):
return edge[1]
def _findNearestNeighbors(
self, kdtreeObjectPair, traxel, numNeighbors, maxNeighborDist
):
"""
Return a list of object IDs which are the 'numNeighbors' closest elements
in the kdtree less than maxNeighborDist away of the traxel.
"""
kdtree, objectIdList = kdtreeObjectPair
if len(objectIdList) <= numNeighbors:
return objectIdList
distances, neighbors = kdtree.query(
[self._extractCenter(traxel)], k=numNeighbors, return_distance=True
)
return [
objectIdList[index]
for distance, index in zip(distances[0], neighbors[0])
if distance < maxNeighborDist
]
def _extractCenter(self, traxel):
try:
# python probabilityGenerator
if "com" in traxel.Features:
return traxel.Features["com"]
else:
return traxel.Features["RegionCenter"]
except:
# C++ pgmlink probabilityGenerator
try:
return getTraxelFeatureVector(traxel, "com")
except:
try:
return getTraxelFeatureVector(traxel, "RegionCenter")
except:
raise ValueError(
"given traxel (t={},id={}) does not have "
'"com" or "RegionCenter"'.format(traxel.Timestep, traxel.Id)
)
def _traxelMightDivide(self, traxel, divisionThreshold):
assert "divProb" in traxel.Features
return traxel.Features["divProb"][0] > divisionThreshold
def _buildFrameKdTree(self, traxelDict):
"""
Collect the centers of all traxels and their ids of this frame's traxels.
Then build a kdtree and return (kdtree, listOfObjectIdsInFrame), where the second argument
is needed to decode the object id of the nearest neighbors in _findNearestNeighbors().
"""
objectIdList = []
features = []
for obj, traxel in traxelDict.items():
if obj == 0:
continue
objectIdList.append(obj)
features.append(list(self._extractCenter(traxel)))
return (KDTree(features, metric="euclidean"), objectIdList)
def _addNodesForFrame(self, frame, traxelDict):
"""
Insert nodes for all objects in this frame, with the attribute "traxel"
"""
for obj, traxel in traxelDict.items():
if obj == 0:
continue
self._graph.add_node((frame, obj), traxel=traxel, id=self._nextNodeUuid)
self._nextNodeUuid += 1
def addNodeFromTraxel(self, traxel, **kwargs):
"""
Insert a single node specified by a traxel.
All keyword arguments are passed to the node as well.
"""
assert traxel is not None
assert not self.withTracklets
self._graph.add_node(
(traxel.Timestep, traxel.Id), traxel=traxel, id=self._nextNodeUuid, **kwargs
)
self._nextNodeUuid += 1
def buildFromProbabilityGenerator(
self,
probabilityGenerator,
maxNeighborDist=200,
numNearestNeighbors=1,
forwardBackwardCheck=True,
withDivisions=True,
divisionThreshold=0.1,
skipLinks=1,
):
"""
Takes a python probabilityGenerator containing traxel features and finds probable links between frames.
Builds a kdTree with the 'numNearestneighbors' for each frame and adds the nodes. In the same iteration, it adds
a number of 'skipLinks' between the nodes separated by 'skipLinks' frames.
"""
assert probabilityGenerator is not None
assert len(probabilityGenerator.TraxelsPerFrame) > 0
assert skipLinks > 0
def checkNodeWhileAddingLinks(frame, obj):
if (frame, obj) not in self._graph:
logger.warning(
"Adding node ({}, {}) when setting up links".format(frame, obj)
)
kdTreeFrames = [None] * (skipLinks + 1)
# len(probabilityGenerator.TraxelsPerFrame.keys()) is NOT an indicator for the total number of frames,
# because an empty frame does not create a key in the dictionary. E.g. for one frame in the middle of the
# dataset, we won't access the last one.
# Idea: take the max key in the dict. Remember, frame numbering starts with 0.
frameMax = max(probabilityGenerator.TraxelsPerFrame.keys())
frameMin = min(probabilityGenerator.TraxelsPerFrame.keys())
numFrames = frameMax - frameMin + 1
self.progressVisitor.showState("Probability Generator")
countFrames = 0
for frame in range(numFrames):
countFrames += 1
self.progressVisitor.showProgress(countFrames / float(numFrames))
if frame > 0:
del kdTreeFrames[0] # this is the current frame
if (
frame + skipLinks < numFrames
and frameMin + frame + skipLinks
in probabilityGenerator.TraxelsPerFrame.keys()
):
kdTreeFrames.append(
self._buildFrameKdTree(
probabilityGenerator.TraxelsPerFrame[
frameMin + frame + skipLinks
]
)
)
self._addNodesForFrame(
frameMin + frame + skipLinks,
probabilityGenerator.TraxelsPerFrame[
frameMin + frame + skipLinks
],
)
else:
for i in range(0, skipLinks + 1):
if (
frameMin + frame + i
in probabilityGenerator.TraxelsPerFrame.keys()
): # empty frame
kdTreeFrames[i] = self._buildFrameKdTree(
probabilityGenerator.TraxelsPerFrame[frameMin + frame + i]
)
self._addNodesForFrame(
frameMin + frame + i,
probabilityGenerator.TraxelsPerFrame[frameMin + frame + i],
)
# find forward links
if (
frameMin + frame in probabilityGenerator.TraxelsPerFrame.keys()
): # 'frame' could be empty
for obj, traxel in probabilityGenerator.TraxelsPerFrame[
frameMin + frame
].items():
divisionPreservingNumNearestNeighbors = numNearestNeighbors
if (
divisionPreservingNumNearestNeighbors < 2
and withDivisions
and self._traxelMightDivide(traxel, divisionThreshold)
):
divisionPreservingNumNearestNeighbors = 2
for i in range(1, skipLinks + 1):
if (
frame + i < numFrames
and frameMin + frame + i
in probabilityGenerator.TraxelsPerFrame.keys()
):
neighbors = self._findNearestNeighbors(
kdTreeFrames[i],
traxel,
divisionPreservingNumNearestNeighbors,
maxNeighborDist,
)
# type(neighbors) is list
for n in neighbors:
edge_start = (frameMin + frame, obj)
edge_end = (frameMin + frame + i, n)
checkNodeWhileAddingLinks(*edge_start)
checkNodeWhileAddingLinks(*edge_end)
self._graph.add_edge(edge_start, edge_end)
self._graph.edges[edge_start, edge_end][
"src"
] = self._graph.nodes[edge_start]["id"]
self._graph.edges[edge_start, edge_end][
"dest"
] = self._graph.nodes[edge_end]["id"]
# find backward links
if forwardBackwardCheck:
for i in range(1, skipLinks + 1):
if frame + i < numFrames:
if (
frameMin + frame + i
in probabilityGenerator.TraxelsPerFrame.keys()
): # empty frame
for obj, traxel in probabilityGenerator.TraxelsPerFrame[
frameMin + frame + i
].items():
if kdTreeFrames[0] is not None:
neighbors = self._findNearestNeighbors(
kdTreeFrames[0],
traxel,
numNearestNeighbors,
maxNeighborDist,
)
for n in neighbors:
edge_start = (frameMin + frame, n)
edge_end = (frameMin + frame + i, obj)
checkNodeWhileAddingLinks(*edge_start)
checkNodeWhileAddingLinks(*edge_end)
self._graph.add_edge(edge_start, edge_end)
self._graph.edges[edge_start, edge_end][
"src"
] = self._graph.nodes[edge_start]["id"]
self._graph.edges[edge_start, edge_end][
"dest"
] = self._graph.nodes[edge_end]["id"]
def generateTrackletGraph(self):
"""
**Return** a new hypotheses graph where chains of detections with only one possible
incoming/outgoing transition are contracted into one node in the graph.
The returned graph will have `withTracklets` set to `True`!
The `'tracklet'` node map contains a list of traxels that each node represents.
"""
logger.info("generating tracklet graph...")
tracklet_graph = copy.copy(self)
tracklet_graph._graph = tracklet_graph._graph.copy()
tracklet_graph.withTracklets = True
tracklet_graph.referenceTraxelGraph = self
tracklet_graph.progressVisitor = self.progressVisitor
self.progressVisitor.showState("Initializing Tracklet Graph")
# initialize tracklet map to contain a list of only one traxel per node
countNodes = 0
numNodes = tracklet_graph.countNodes()
for node in tracklet_graph._graph.nodes():
countNodes += 1
self.progressVisitor.showProgress(countNodes / float(numNodes))
tracklet_graph._graph.nodes[node]["tracklet"] = [
tracklet_graph._graph.nodes[node]["traxel"]
]
del tracklet_graph._graph.nodes[node]["traxel"]
# set up a list of links that indicates whether the target's in- and source's out-degree
# are one, meaning the edge can be contracted
links_to_be_contracted = []
node_remapping = {}
self.progressVisitor.showState("Finding Tracklets in Graph")
countEdges = 0
numEdges = tracklet_graph.countArcs()
for edge in tracklet_graph._graph.edges():
countEdges += 1
self.progressVisitor.showProgress(countEdges / float(numEdges))
if (
tracklet_graph._graph.out_degree(edge[0]) == 1
and tracklet_graph._graph.in_degree(edge[1]) == 1
):
links_to_be_contracted.append(edge)
for i in [0, 1]:
node_remapping[edge[i]] = edge[i]
# apply edge contraction
self.progressVisitor.showState("Contracting Edges in Tracklet Graph")
countLinks = 0
numLinks = len(links_to_be_contracted)
for edge in links_to_be_contracted:
countLinks += 1
self.progressVisitor.showProgress(countLinks / float(numLinks))
src = node_remapping[edge[0]]
dest = node_remapping[edge[1]]
if (
tracklet_graph._graph.in_degree(src) == 0
and tracklet_graph._graph.out_degree(dest) == 0
):
# if this tracklet would contract to a single node without incoming or outgoing edges,
# then do NOT contract, as our tracking cannot handle length-one-tracks
continue
tracklet_graph._graph.nodes[src]["tracklet"].extend(
tracklet_graph._graph.nodes[dest]["tracklet"]
)
# duplicate out arcs with new source
for out_edge in tracklet_graph._graph.out_edges(dest):
tracklet_graph._graph.add_edge(src, out_edge[1])
# adjust node remapping to point to new source for all contracted traxels
for t in tracklet_graph._graph.nodes[dest]["tracklet"]:
node_remapping[(t.Timestep, t.Id)] = src
tracklet_graph._graph.remove_node(dest)
logger.info(
"tracklet graph has {} nodes and {} edges (before {},{})".format(
tracklet_graph.countNodes(),
tracklet_graph.countArcs(),
self.countNodes(),
self.countArcs(),
)
)
return tracklet_graph
def getNodeTraxelMap(self):
return NodeMap(self._graph, "traxel")
def getNodeTrackletMap(self):
return NodeMap(self._graph, "tracklet")
def insertEnergies(
self,
maxNumObjects,
detectionProbabilityFunc,
transitionProbabilityFunc,
boundaryCostMultiplierFunc,
divisionProbabilityFunc,
skipLinksBias,
):
"""
Insert energies for detections, divisions and links into the hypotheses graph,
by transforming the probabilities for certain
events (given by the `*ProbabilityFunc`-functions per traxel) into energies. If the given graph
contained tracklets (`self.withTracklets is True`), then also the probabilities over all contained traxels will be
accumulated for those nodes in the graph.
The energies are stored in the networkx graph under the following attribute names (to match the format for solvers):
* detection energies: `self._graph.nodes[n]['features']`
* division energies: `self._graph.nodes[n]['divisionFeatures']`
* appearance energies: `self._graph.nodes[n]['appearanceFeatures']`
* disappearance energies: `self._graph.nodes[n]['disappearanceFeatures']`
* transition energies: `self._graph.edges[src][dest]['features']`
* additionally we also store the timestep (range for traxels) per node as `timestep` attribute
** Parameters: **
* `maxNumObjects`: the max number of objects per detections
* `detectionProbabilityFunc`: should take a traxel and return its detection probabilities
([prob0objects, prob1object,...])
* `transitionProbabilityFunc`: should take two traxels and return this link's probabilities
([prob0objectsInTransition, prob1objectsInTransition,...])
* `boundaryCostMultiplierFunc`: should take a traxel and a boolean that is true if we are seeking for an appearance cost multiplier,
false for disappearance, and return a scalar multiplier between 0 and 1 for the
appearance/disappearance cost that depends on the traxel's distance to the spacial and time boundary
* `divisionProbabilityFunc`: should take a traxel and return its division probabilities ([probNoDiv, probDiv])
"""
numElements = self._graph.number_of_nodes() + self._graph.number_of_edges()
self.progressVisitor.showState("Inserting energies")
# insert detection probabilities for all detections (and some also get a div probability)
countElements = 0
for n in self._graph.nodes():
countElements += 1
if not self.withTracklets:
# only one traxel, but make it a list so everything below works the same
traxels = [self._graph.nodes[n]["traxel"]]
else:
traxels = self._graph.nodes[n]["tracklet"]
# accumulate features over all contained traxels
previousTraxel = None
detectionFeatures = np.zeros(maxNumObjects + 1)
for t in traxels:
detectionFeatures += np.array(negLog(detectionProbabilityFunc(t)))
if previousTraxel is not None:
detectionFeatures += np.array(
negLog(transitionProbabilityFunc(previousTraxel, t))
)
previousTraxel = t
detectionFeatures = listify(list(detectionFeatures))
# division only if probability is big enough
divisionFeatures = divisionProbabilityFunc(traxels[-1])
if divisionFeatures is not None:
divisionFeatures = listify(negLog(divisionFeatures))
# appearance/disappearance
appearanceFeatures = listify(
[0.0] + [boundaryCostMultiplierFunc(traxels[0], True)] * maxNumObjects
)
disappearanceFeatures = listify(
[0.0] + [boundaryCostMultiplierFunc(traxels[-1], False)] * maxNumObjects
)
self._graph.nodes[n]["features"] = detectionFeatures
if divisionFeatures is not None:
self._graph.nodes[n]["divisionFeatures"] = divisionFeatures
self._graph.nodes[n]["appearanceFeatures"] = appearanceFeatures
self._graph.nodes[n]["disappearanceFeatures"] = disappearanceFeatures
self._graph.nodes[n]["timestep"] = [
traxels[0].Timestep,
traxels[-1].Timestep,
]
self.progressVisitor.showProgress(countElements / float(numElements))
# insert transition probabilities for all links
for a in self._graph.edges():
countElements += 1
self.progressVisitor.showProgress(countElements / float(numElements))
if not self.withTracklets:
srcTraxel = self._graph.nodes[self.source(a)]["traxel"]
destTraxel = self._graph.nodes[self.target(a)]["traxel"]
else:
srcTraxel = self._graph.nodes[self.source(a)]["tracklet"][
-1
] # src is last of the traxels in source tracklet
destTraxel = self._graph.nodes[self.target(a)]["tracklet"][
0
] # dest is first of traxels in destination tracklet
features = listify(negLog(transitionProbabilityFunc(srcTraxel, destTraxel)))
# add feature for additional Frames. Since we do not want these edges to be primarily taken, we add a bias to the edge. Now: hard coded, future: parameter
frame_gap = destTraxel.Timestep - srcTraxel.Timestep
# 1. method
if frame_gap > 1:
features[1][0] = features[1][0] + skipLinksBias * frame_gap
# # 2. method
# # introduce a new energies like: [[6], [15]] -> [[6, 23], [15, 23]] for first links and
# # [[6], [15]] -> [[23, 6], [23, 15]] for second links, and so on for 3rd order links
# # !!! this will introduce a new weight in the weight.json file. For the 2nd link, comes in 2nd row and so on.
# # drawback: did not manage to adjust parameter to get sensible results.
# for feat in features:
# for i in range(frame_gap):
# feat.append(23)
# if frame_gap > 1:
# feat[frame_gap-1], feat[0] = feat[0], feat[frame_gap-1]
self._graph.edges[a[0], a[1]]["src"] = self._graph.nodes[a[0]]["id"]
self._graph.edges[a[0], a[1]]["dest"] = self._graph.nodes[a[1]]["id"]
self._graph.edges[a[0], a[1]]["features"] = features
def getMappingsBetweenUUIDsAndTraxels(self):
"""
Extract the mapping from UUID to traxel and vice versa from the networkx graph.
** Returns: a tuple of **
* `traxelIdPerTimestepToUniqueIdMap`: a dictionary of the structure `{str(timestep):{str(labelimageId):int(uuid),
str(labelimageId):int(uuid), ...}, str(nextTimestep):{}, ...}`
* `uuidToTraxelMap`: a dictionary with keys = int(uuid), values = list(of timestep-Id-tuples (int(Timestep), int(Id)))
"""
uuidToTraxelMap = {}
traxelIdPerTimestepToUniqueIdMap = {}
for n in self._graph.nodes():
uuid = self._graph.nodes[n]["id"]
traxels = []
if self.withTracklets:
traxels = self._graph.nodes[n]["tracklet"]
else:
traxels = [self._graph.nodes[n]["traxel"]]
uuidToTraxelMap[uuid] = [(t.Timestep, t.Id) for t in traxels]
for t in uuidToTraxelMap[uuid]:
traxelIdPerTimestepToUniqueIdMap.setdefault(str(t[0]), {})[
str(t[1])
] = uuid
# sort the list of traxels per UUID by their timesteps
for v in uuidToTraxelMap.values():
v.sort(key=lambda timestepIdTuple: timestepIdTuple[0])
return traxelIdPerTimestepToUniqueIdMap, uuidToTraxelMap
def toTrackingGraph(self, noFeatures=False):
"""
Create a dictionary representation of this graph which can be passed to the solvers directly.
The resulting graph (=model) is wrapped within a `hytra.jsongraph.JsonTrackingGraph` structure for convenience.
If `noFeatures` is `True`, then only the structure of the graph will be exported.
"""
requiredNodeAttribs = ["id"]
requiredLinkAttribs = ["src", "dest"]
if not noFeatures:
requiredNodeAttribs.append("features")
requiredLinkAttribs.append("features")
def translateNodeToDict(n):
result = {}
attrs = self._graph.nodes[n]
for k in [
"id",
"features",
"appearanceFeatures",
"disappearanceFeatures",
"divisionFeatures",
"timestep",
]:
if k in attrs:
result[k] = attrs[k]
elif k in requiredNodeAttribs:
raise ValueError(
"Cannot use graph nodes without assigned ID and features, run insertEnergies() first"
)
return result
def translateLinkToDict(l):
result = {}
attrs = self._graph.edges[l[0], l[1]]
for k in ["src", "dest", "features"]:
if k in attrs:
result[k] = attrs[k]
elif k in requiredLinkAttribs:
raise ValueError(
"Cannot use graph links without source, target, and features, run insertEnergies() first"
)
return result
traxelIdPerTimestepToUniqueIdMap, _ = self.getMappingsBetweenUUIDsAndTraxels()
model = {
"segmentationHypotheses": [
translateNodeToDict(n) for n in self._graph.nodes()
],
"linkingHypotheses": [translateLinkToDict(e) for e in self._graph.edges()],
"divisionHypotheses": [],
"traxelToUniqueId": traxelIdPerTimestepToUniqueIdMap,
"settings": {
"statesShareWeights": True,
"allowPartialMergerAppearance": False,
"requireSeparateChildrenOfDivision": True,
"optimizerEpGap": 0.01,
"optimizerVerbose": True,
"optimizerNumThreads": 1,
},
}
# extract exclusion sets:
exclusions = set([])
for n in self._graph.nodes():
if self.withTracklets:
traxel = self._graph.nodes[n]["tracklet"][0]
else:
traxel = self._graph.nodes[n]["traxel"]
if traxel.conflictingTraxelIds is not None:
if self.withTracklets:
logger.error(
"Exclusion constraints do not work with tracklets yet!"
)
conflictingIds = [
traxelIdPerTimestepToUniqueIdMap[str(traxel.Timestep)][str(i)]
for i in traxel.conflictingTraxelIds
]
myId = traxelIdPerTimestepToUniqueIdMap[str(traxel.Timestep)][
str(traxel.Id)
]
for ci in conflictingIds:
# insert pairwise exclusion constraints only, and always put the lower id first
if ci < myId:
exclusions.add((ci, myId))
else:
exclusions.add((myId, ci))
model["exclusions"] = [list(t) for t in exclusions]
# TODO: this recomputes the uuidToTraxelMap even though we have it already...
trackingGraph = hytra.core.jsongraph.JsonTrackingGraph(
model=model, progressVisitor=self.progressVisitor
)
return trackingGraph
def insertSolution(self, resultDictionary):
"""
Add solution values to nodes and arcs from dictionary representation of solution.
The resulting graph (=model) gets an additional property "value" that represents the number of objects inside a detection/arc
Additionally a division indicator is saved in the node property "divisionValue".
The link also gets a new attribute: the gap that is covered. E.g. 1, if consecutive timeframes, 2 if link skipping one timeframe.
"""
assert isinstance(self._graph, nx.DiGraph), "Expecting the graph to be directed"
_, uuidToTraxelMap = self.getMappingsBetweenUUIDsAndTraxels()
if self.withTracklets:
traxelgraph = self.referenceTraxelGraph
else:
traxelgraph = self
# reset all values
for n in traxelgraph._graph.nodes():
traxelgraph._graph.nodes[n]["value"] = 0
traxelgraph._graph.nodes[n]["divisionValue"] = False
for e in traxelgraph._graph.edges():
traxelgraph._graph.edges[e[0], e[1]]["value"] = 0
# store values from dict
for detection in resultDictionary["detectionResults"]:
traxels = uuidToTraxelMap[detection["id"]]
for traxel in traxels:
traxelgraph._graph.nodes[traxel]["value"] = detection["value"]
for internal_edge in zip(traxels, traxels[1:]):
traxelgraph._graph.edges[internal_edge[0], internal_edge[1]][
"value"
] = detection["value"]
if (
"linkingResults" in resultDictionary
and resultDictionary["linkingResults"] is not None
):
for link in resultDictionary["linkingResults"]:
source, dest = (
uuidToTraxelMap[link["src"]][-1],
uuidToTraxelMap[link["dest"]][0],
)
if (source in traxelgraph._graph.predecessors(dest)) and (
dest in traxelgraph._graph.neighbors(source)
):
traxelgraph._graph.edges[source, dest]["value"] = link["value"]
traxelgraph._graph.edges[source, dest]["gap"] = dest[0] - source[0]
if (
"divisionResults" in resultDictionary
and resultDictionary["divisionResults"] is not None
):
for division in resultDictionary["divisionResults"]:
traxelgraph._graph.nodes[uuidToTraxelMap[division["id"]][-1]][
"divisionValue"
] = division["value"]
def getSolutionDictionary(self):
"""
Return the solution encoded in the `value` and `divisionValue` attributes of nodes and edges
as a python dictionary in the style that can be saved to JSON or sent to our solvers as ground truths.
"""
resultDictionary = {}
if self.withTracklets:
traxelgraph = self.referenceTraxelGraph
else:
traxelgraph = self
detectionList = []
divisionList = []
linkList = []
def checkAttributeValue(element, attribName, default):
if attribName in element:
return element[attribName]
else:
return default
for n in traxelgraph._graph.nodes():
newDetection = {}
newDetection["id"] = traxelgraph._graph.nodes[n]["id"]
newDetection["value"] = checkAttributeValue(
traxelgraph._graph.nodes[n], "value", 0
)
detectionList.append(newDetection)
if "divisionValue" in traxelgraph._graph.nodes[n]:
newDivsion = {}
newDivsion["id"] = traxelgraph._graph.nodes[n]["id"]
newDivsion["value"] = checkAttributeValue(
traxelgraph._graph.nodes[n], "divisionValue", False
)
divisionList.append(newDivsion)
for a in traxelgraph.arcIterator():
newLink = {}
src = self.source(a)
dest = self.target(a)
newLink["src"] = traxelgraph._graph.nodes[src]["id"]
newLink["dest"] = traxelgraph._graph.nodes[dest]["id"]
newLink["value"] = checkAttributeValue(
traxelgraph._graph.edges[src, dest], "value", 0
)
newLink["gap"] = checkAttributeValue(
traxelgraph._graph.edges[src, dest], "gap", 1
)
linkList.append(newLink)
resultDictionary["detectionResults"] = detectionList
resultDictionary["linkingResults"] = linkList
resultDictionary["divisionResults"] = divisionList
return resultDictionary
def countIncomingObjects(self, node):
"""
Once a solution was written to the graph, this returns the number of
incoming objects of a node, and the number of active incoming edges.
If the latter is greater than 1, this shows that we have a merger.
"""
numberOfIncomingObject = 0
numberOfIncomingEdges = 0
for in_edge in self._graph.in_edges(node):
if "value" in self._graph.edges[in_edge[0], node]:
numberOfIncomingObject += self._graph.edges[in_edge[0], node]["value"]
numberOfIncomingEdges += 1
return numberOfIncomingObject, numberOfIncomingEdges
def countOutgoingObjects(self, node):
"""
Once a solution was written to the graph, this returns the number of
outgoing objects of a node, and the number of active outgoing edges.
If the latter is greater than 1, this shows that we have a merger splitting up, or a division.
"""
numberOfOutgoingObject = 0
numberOfOutgoingEdges = 0
for out_edge in self._graph.out_edges(node):
if (
"value" in self._graph.edges[node, out_edge[1]]
and self._graph.edges[node, out_edge[1]]["value"] > 0
):
numberOfOutgoingObject += self._graph.edges[node, out_edge[1]]["value"]
numberOfOutgoingEdges += 1
return numberOfOutgoingObject, numberOfOutgoingEdges
def computeLineage(self, firstTrackId=2, firstLineageId=2, skipLinks=1):
"""
computes lineage and track id for every node in the graph
"""
update_queue = []
# start lineages / tracks at 2, because 0 means background=black, 1 means misdetection in ilastik
max_lineage_id = firstLineageId
max_track_id = firstTrackId
if self.withTracklets:
traxelgraph = self.referenceTraxelGraph
else:
traxelgraph = self
self.progressVisitor.showState("Compute lineage")
# find start of lineages
numElements = 2 * traxelgraph.countNodes()
countElements = 0
for n in traxelgraph.nodeIterator():
countElements += 1
self.progressVisitor.showProgress(countElements / float(numElements))
if (
traxelgraph.countIncomingObjects(n)[0] == 0
and "value" in traxelgraph._graph.nodes[n]
and traxelgraph._graph.nodes[n]["value"] > 0
and (
self.allowLengthOneTracks
or traxelgraph.countOutgoingObjects(n)[0] > 0
)
):
# found start of a track
update_queue.append((n, max_lineage_id, max_track_id))
max_lineage_id += 1
max_track_id += 1
else:
traxelgraph._graph.nodes[n]["lineageId"] = None
traxelgraph._graph.nodes[n]["trackId"] = None
while len(update_queue) > 0:
countElements += 1
current_node, lineage_id, track_id = update_queue.pop()
self.progressVisitor.showProgress(countElements / float(numElements))
# if we did not run merger resolving, it can happen that we reach a node several times,
# and would propagate the new lineage+track IDs to all descendants again! We simply
# stop propagating in that case and just use the lineageID that reached the node first.
if (
traxelgraph._graph.nodes[current_node].get("lineageId", None)
is not None
and traxelgraph._graph.nodes[current_node].get("trackId", None)
is not None
):
logger.debug("Several tracks are merging here, stopping a later one")
continue
# set a new trackID
traxelgraph._graph.nodes[current_node]["lineageId"] = lineage_id
traxelgraph._graph.nodes[current_node]["trackId"] = track_id
numberOfOutgoingObject, numberOfOutgoingEdges = traxelgraph.countOutgoingObjects(
current_node
)
if numberOfOutgoingObject != numberOfOutgoingEdges:
logger.warning(
"running lineage computation on unresolved graphs depends on a race condition"
)
if (
"divisionValue" in traxelgraph._graph.nodes[current_node]
and traxelgraph._graph.nodes[current_node]["divisionValue"]
):
assert traxelgraph.countOutgoingObjects(current_node)[1] == 2
traxelgraph._graph.nodes[current_node]["children"] = []
for a in traxelgraph._graph.out_edges(current_node):
if (
"value" in traxelgraph._graph.edges[current_node, a[1]]
and traxelgraph._graph.edges[current_node, a[1]]["value"] > 0
):
traxelgraph._graph.nodes[a[1]]["gap"] = skipLinks
traxelgraph._graph.nodes[current_node]["children"].append(a[1])
traxelgraph._graph.nodes[a[1]]["parent"] = current_node
update_queue.append(
(traxelgraph.target(a), lineage_id, max_track_id)
)
max_track_id += 1
else:
if traxelgraph.countOutgoingObjects(current_node)[1] > 1:
logger.debug(
"Found merger splitting into several objects, propagating lineage and track to all descendants!"
)
for a in traxelgraph._graph.out_edges(current_node):
if (
"value" in traxelgraph._graph.edges[current_node, a[1]]
and traxelgraph._graph.edges[current_node, a[1]]["value"] > 0
):
if (
"gap" in traxelgraph._graph.edges[current_node, a[1]]
and traxelgraph._graph.edges[current_node, a[1]]["gap"] == 1
) or "gap" not in traxelgraph._graph.edges[current_node, a[1]]:
traxelgraph._graph.nodes[a[1]]["gap"] = 1
update_queue.append(
(traxelgraph.target(a), lineage_id, track_id)
)
if (
"gap" in traxelgraph._graph.edges[current_node, a[1]]
and traxelgraph._graph.edges[current_node, a[1]]["gap"] > 1
):
traxelgraph._graph.nodes[a[1]]["gap"] = skipLinks
traxelgraph._graph.nodes[a[1]]["gap_parent"] = current_node
update_queue.append(
(traxelgraph.target(a), lineage_id, max_track_id)
)
max_track_id += 1
def pruneGraphToSolution(self, distanceToSolution=0):
"""
creates a new pruned HypothesesGraph that around the result. Assumes that value==0 corresponds
to unlabeled parts of the graph.
distanceToSolution determines how many negative examples are included
distanceToSolution = 0: only include negative edges that connect used objects
distanceToSolution = 1: additionally include edges that connect used objects with unlabeled objects
"""
prunedGraph = HypothesesGraph()
for n in self.nodeIterator():
if "value" in self._graph.nodes[n] and self._graph.nodes[n]["value"] > 0:
prunedGraph._graph.add_node(n, **self._graph.nodes[n])
for e in self.arcIterator():
src = self.source(e)
dest = self.target(e)
if distanceToSolution == 0:
if src in prunedGraph._graph and dest in prunedGraph._graph:
prunedGraph._graph.add_edge(
src, dest, **self._graph.edges[src, dest]
)
# TODO: can be optimized by looping over the pruned graph nodes(might sacrifice readability)
for distance in range(1, distanceToSolution + 1):
for e in self.arcIterator():
src = self.source(e)
dest = self.target(e)
if src in prunedGraph._graph or dest in prunedGraph._graph:
prunedGraph._graph.add_node(src, **self._graph.nodes[src])
prunedGraph._graph.add_node(dest, **self._graph.nodes[dest])
prunedGraph._graph.add_edge(
src, dest, **self._graph.edges[src, dest]
)
# in case a node is NOT an appearance and
# has all the incoming edges with value 0, we remove all these incoming edges
#
# in case a node is NOT a disappearance and
# has all the outgoing edges with value 0, we remove all these outgoing edges
withAppearanceFeatures = True
withDisappearanceFeatures = True
withFeatures = True
correctAppearanceFeatureLength = True
correctDisappearanceFeatureLength = True
correctFeatureLength = True
maxNumObjects = None
maxNumObjectsAppearance = None
maxNumObjectsDisappearance = None
for n in self.nodeIterator():
try:
maxNumObjectsApp = len(self._graph.nodes[n]["appearanceFeatures"]) - 1
if maxNumObjectsAppearance is None:
maxNumObjectsAppearance = maxNumObjectsApp
elif not maxNumObjectsApp == maxNumObjectsAppearance:
correctAppearanceFeatureLength = False
logger.info(
"Appearance/disappearance features have different lengths!"
)
except:
withAppearanceFeatures = False
logger.info("There are no appearance features in node properties!")
break
try:
maxNumObjectsDis = (
len(self._graph.nodes[n]["disappearanceFeatures"]) - 1
)
if maxNumObjectsDisappearance is None:
maxNumObjectsDisappearance = maxNumObjectsDis
elif not maxNumObjectsDis == maxNumObjectsDisappearance:
correctDisappearanceFeatureLength = False
logger.info("Disappearance features have different lengths!")
except:
withDisappearanceFeatures = False
logger.info("There are no disappearance features in node properties!")
break
if withAppearanceFeatures and withDisappearanceFeatures:
if (
correctAppearanceFeatureLength
and correctDisappearanceFeatureLength
and maxNumObjectsAppearance == maxNumObjectsDisappearance
):
maxNumObjects = maxNumObjectsAppearance
else:
correctFeatureLength = False
logger.info(
"Appearance and disappearance features have different lengths!"
)
else:
withFeatures = False
if withFeatures and correctFeatureLength:
for n in self.nodeIterator():
if not (
"appearance" in self._graph.nodes[n].keys()
and self._graph.nodes[n]["appearance"]
):
allArcsWithValueZero = True
in_edges = self._graph.in_edges(n)
for edge in list(in_edges):
if (
"value" in self._graph.edges[edge[0]][edge[1]].keys()
and not self._graph.edges[edge[0]][edge[1]]["value"] == 0
):
allArcsWithValueZero = False
break
self._graph.nodes[n]["appearanceFeatures"] = listify(
[0.0] + [0.0] * maxNumObjects
)
if allArcsWithValueZero:
if not in_edges == []:
self._graph.remove_edges_from(in_edges)
if not (
"disappearance" in self._graph.nodes[n].keys()
and self._graph.nodes[n]["disappearance"]
):
allArcsWithValueZero = True
out_edges = self._graph.out_edges(n)
for edge in list(out_edges):
if (
"value" in self._graph.edges[edge[0]][edge[1]].keys()
and not self._graph.edges[edge[0]][edge[1]]["value"] == 0
):
allArcsWithValueZero = False
break
self._graph.nodes[n]["disappearanceFeatures"] = listify(
[0.0] + [0.0] * maxNumObjects
)
if allArcsWithValueZero:
if not out_edges == []:
self._graph.remove_edges_from(out_edges)
return prunedGraph
def _getNodeAttribute(self, timestep, objectId, attribute):
"""
return some attribute of a certain node specified by timestep and objectId
"""
try:
return self._graph.nodes[(int(timestep), int(objectId))][attribute]
except KeyError:
logger.error(
attribute
+ " not found in graph node properties, call computeLineage() first!"
)
raise
def getLineageId(self, timestep, objectId):
"""
return the lineage Id of a certain node specified by timestep and objectId
"""
if self.withTracklets:
traxelgraph = self.referenceTraxelGraph
else:
traxelgraph = self
return traxelgraph._getNodeAttribute(timestep, objectId, "lineageId")
def getTrackId(self, timestep, objectId):
"""
return the track Id of a certain node specified by timestep and objectId
"""
if self.withTracklets:
traxelgraph = self.referenceTraxelGraph
else:
traxelgraph = self
return traxelgraph._getNodeAttribute(timestep, objectId, "trackId")
| 43.643885 | 166 | 0.557879 | import logging
import copy
import networkx as nx
import numpy as np
from sklearn.neighbors import KDTree
import hytra.core.jsongraph
from hytra.core.jsongraph import negLog, listify
from hytra.util.progressbar import DefaultProgressVisitor
logger = logging.getLogger(__name__)
def getTraxelFeatureVector(traxel, featureName, maxNumDimensions=3):
result = []
for i in range(maxNumDimensions):
try:
result.append(traxel.get_feature_value(str(featureName), i))
except:
if i == 0:
logger.error(
f"Error when accessing feature {featureName}[{i}] for "
f"traxel (Id={traxel.Id},Timestep={traxel.Timestep})"
)
logger.error(traxel.print_available_features())
raise Exception
else:
logger.error(
f"Error: Classifier was trained with less merger than maxNumObjects {maxNumDimensions}."
)
raise Exception
return result
class NodeMap:
def __init__(self, graph: nx.DiGraph, attributeName):
assert isinstance(graph, nx.DiGraph), "Expecting the graph to be directed"
self.__graph = graph
self.__attributeName = attributeName
def __getitem__(self, key):
return self.__graph.nodes[key][self.__attributeName]
class HypothesesGraph:
def __init__(self):
self._graph = nx.DiGraph()
self.withTracklets = False
self.allowLengthOneTracks = True
self._nextNodeUuid = 0
self.progressVisitor = DefaultProgressVisitor()
def nodeIterator(self):
return self._graph.nodes()
def arcIterator(self):
return self._graph.edges()
def countNodes(self):
return self._graph.number_of_nodes()
def countArcs(self):
return self._graph.number_of_edges()
def hasNode(self, node):
return self._graph.has_node(node)
def hasEdge(self, u, v):
return self._graph.has_edge(u, v)
@staticmethod
def source(edge):
return edge[0]
@staticmethod
def target(edge):
return edge[1]
def _findNearestNeighbors(
self, kdtreeObjectPair, traxel, numNeighbors, maxNeighborDist
):
kdtree, objectIdList = kdtreeObjectPair
if len(objectIdList) <= numNeighbors:
return objectIdList
distances, neighbors = kdtree.query(
[self._extractCenter(traxel)], k=numNeighbors, return_distance=True
)
return [
objectIdList[index]
for distance, index in zip(distances[0], neighbors[0])
if distance < maxNeighborDist
]
def _extractCenter(self, traxel):
try:
if "com" in traxel.Features:
return traxel.Features["com"]
else:
return traxel.Features["RegionCenter"]
except:
try:
return getTraxelFeatureVector(traxel, "com")
except:
try:
return getTraxelFeatureVector(traxel, "RegionCenter")
except:
raise ValueError(
"given traxel (t={},id={}) does not have "
'"com" or "RegionCenter"'.format(traxel.Timestep, traxel.Id)
)
def _traxelMightDivide(self, traxel, divisionThreshold):
assert "divProb" in traxel.Features
return traxel.Features["divProb"][0] > divisionThreshold
def _buildFrameKdTree(self, traxelDict):
objectIdList = []
features = []
for obj, traxel in traxelDict.items():
if obj == 0:
continue
objectIdList.append(obj)
features.append(list(self._extractCenter(traxel)))
return (KDTree(features, metric="euclidean"), objectIdList)
def _addNodesForFrame(self, frame, traxelDict):
for obj, traxel in traxelDict.items():
if obj == 0:
continue
self._graph.add_node((frame, obj), traxel=traxel, id=self._nextNodeUuid)
self._nextNodeUuid += 1
def addNodeFromTraxel(self, traxel, **kwargs):
assert traxel is not None
assert not self.withTracklets
self._graph.add_node(
(traxel.Timestep, traxel.Id), traxel=traxel, id=self._nextNodeUuid, **kwargs
)
self._nextNodeUuid += 1
def buildFromProbabilityGenerator(
self,
probabilityGenerator,
maxNeighborDist=200,
numNearestNeighbors=1,
forwardBackwardCheck=True,
withDivisions=True,
divisionThreshold=0.1,
skipLinks=1,
):
assert probabilityGenerator is not None
assert len(probabilityGenerator.TraxelsPerFrame) > 0
assert skipLinks > 0
def checkNodeWhileAddingLinks(frame, obj):
if (frame, obj) not in self._graph:
logger.warning(
"Adding node ({}, {}) when setting up links".format(frame, obj)
)
kdTreeFrames = [None] * (skipLinks + 1)
# Idea: take the max key in the dict. Remember, frame numbering starts with 0.
frameMax = max(probabilityGenerator.TraxelsPerFrame.keys())
frameMin = min(probabilityGenerator.TraxelsPerFrame.keys())
numFrames = frameMax - frameMin + 1
self.progressVisitor.showState("Probability Generator")
countFrames = 0
for frame in range(numFrames):
countFrames += 1
self.progressVisitor.showProgress(countFrames / float(numFrames))
if frame > 0:
del kdTreeFrames[0] # this is the current frame
if (
frame + skipLinks < numFrames
and frameMin + frame + skipLinks
in probabilityGenerator.TraxelsPerFrame.keys()
):
kdTreeFrames.append(
self._buildFrameKdTree(
probabilityGenerator.TraxelsPerFrame[
frameMin + frame + skipLinks
]
)
)
self._addNodesForFrame(
frameMin + frame + skipLinks,
probabilityGenerator.TraxelsPerFrame[
frameMin + frame + skipLinks
],
)
else:
for i in range(0, skipLinks + 1):
if (
frameMin + frame + i
in probabilityGenerator.TraxelsPerFrame.keys()
): # empty frame
kdTreeFrames[i] = self._buildFrameKdTree(
probabilityGenerator.TraxelsPerFrame[frameMin + frame + i]
)
self._addNodesForFrame(
frameMin + frame + i,
probabilityGenerator.TraxelsPerFrame[frameMin + frame + i],
)
# find forward links
if (
frameMin + frame in probabilityGenerator.TraxelsPerFrame.keys()
): # 'frame' could be empty
for obj, traxel in probabilityGenerator.TraxelsPerFrame[
frameMin + frame
].items():
divisionPreservingNumNearestNeighbors = numNearestNeighbors
if (
divisionPreservingNumNearestNeighbors < 2
and withDivisions
and self._traxelMightDivide(traxel, divisionThreshold)
):
divisionPreservingNumNearestNeighbors = 2
for i in range(1, skipLinks + 1):
if (
frame + i < numFrames
and frameMin + frame + i
in probabilityGenerator.TraxelsPerFrame.keys()
):
neighbors = self._findNearestNeighbors(
kdTreeFrames[i],
traxel,
divisionPreservingNumNearestNeighbors,
maxNeighborDist,
)
# type(neighbors) is list
for n in neighbors:
edge_start = (frameMin + frame, obj)
edge_end = (frameMin + frame + i, n)
checkNodeWhileAddingLinks(*edge_start)
checkNodeWhileAddingLinks(*edge_end)
self._graph.add_edge(edge_start, edge_end)
self._graph.edges[edge_start, edge_end][
"src"
] = self._graph.nodes[edge_start]["id"]
self._graph.edges[edge_start, edge_end][
"dest"
] = self._graph.nodes[edge_end]["id"]
# find backward links
if forwardBackwardCheck:
for i in range(1, skipLinks + 1):
if frame + i < numFrames:
if (
frameMin + frame + i
in probabilityGenerator.TraxelsPerFrame.keys()
): # empty frame
for obj, traxel in probabilityGenerator.TraxelsPerFrame[
frameMin + frame + i
].items():
if kdTreeFrames[0] is not None:
neighbors = self._findNearestNeighbors(
kdTreeFrames[0],
traxel,
numNearestNeighbors,
maxNeighborDist,
)
for n in neighbors:
edge_start = (frameMin + frame, n)
edge_end = (frameMin + frame + i, obj)
checkNodeWhileAddingLinks(*edge_start)
checkNodeWhileAddingLinks(*edge_end)
self._graph.add_edge(edge_start, edge_end)
self._graph.edges[edge_start, edge_end][
"src"
] = self._graph.nodes[edge_start]["id"]
self._graph.edges[edge_start, edge_end][
"dest"
] = self._graph.nodes[edge_end]["id"]
def generateTrackletGraph(self):
logger.info("generating tracklet graph...")
tracklet_graph = copy.copy(self)
tracklet_graph._graph = tracklet_graph._graph.copy()
tracklet_graph.withTracklets = True
tracklet_graph.referenceTraxelGraph = self
tracklet_graph.progressVisitor = self.progressVisitor
self.progressVisitor.showState("Initializing Tracklet Graph")
# initialize tracklet map to contain a list of only one traxel per node
countNodes = 0
numNodes = tracklet_graph.countNodes()
for node in tracklet_graph._graph.nodes():
countNodes += 1
self.progressVisitor.showProgress(countNodes / float(numNodes))
tracklet_graph._graph.nodes[node]["tracklet"] = [
tracklet_graph._graph.nodes[node]["traxel"]
]
del tracklet_graph._graph.nodes[node]["traxel"]
# set up a list of links that indicates whether the target's in- and source's out-degree
# are one, meaning the edge can be contracted
links_to_be_contracted = []
node_remapping = {}
self.progressVisitor.showState("Finding Tracklets in Graph")
countEdges = 0
numEdges = tracklet_graph.countArcs()
for edge in tracklet_graph._graph.edges():
countEdges += 1
self.progressVisitor.showProgress(countEdges / float(numEdges))
if (
tracklet_graph._graph.out_degree(edge[0]) == 1
and tracklet_graph._graph.in_degree(edge[1]) == 1
):
links_to_be_contracted.append(edge)
for i in [0, 1]:
node_remapping[edge[i]] = edge[i]
# apply edge contraction
self.progressVisitor.showState("Contracting Edges in Tracklet Graph")
countLinks = 0
numLinks = len(links_to_be_contracted)
for edge in links_to_be_contracted:
countLinks += 1
self.progressVisitor.showProgress(countLinks / float(numLinks))
src = node_remapping[edge[0]]
dest = node_remapping[edge[1]]
if (
tracklet_graph._graph.in_degree(src) == 0
and tracklet_graph._graph.out_degree(dest) == 0
):
# if this tracklet would contract to a single node without incoming or outgoing edges,
# then do NOT contract, as our tracking cannot handle length-one-tracks
continue
tracklet_graph._graph.nodes[src]["tracklet"].extend(
tracklet_graph._graph.nodes[dest]["tracklet"]
)
# duplicate out arcs with new source
for out_edge in tracklet_graph._graph.out_edges(dest):
tracklet_graph._graph.add_edge(src, out_edge[1])
# adjust node remapping to point to new source for all contracted traxels
for t in tracklet_graph._graph.nodes[dest]["tracklet"]:
node_remapping[(t.Timestep, t.Id)] = src
tracklet_graph._graph.remove_node(dest)
logger.info(
"tracklet graph has {} nodes and {} edges (before {},{})".format(
tracklet_graph.countNodes(),
tracklet_graph.countArcs(),
self.countNodes(),
self.countArcs(),
)
)
return tracklet_graph
def getNodeTraxelMap(self):
return NodeMap(self._graph, "traxel")
def getNodeTrackletMap(self):
return NodeMap(self._graph, "tracklet")
def insertEnergies(
self,
maxNumObjects,
detectionProbabilityFunc,
transitionProbabilityFunc,
boundaryCostMultiplierFunc,
divisionProbabilityFunc,
skipLinksBias,
):
numElements = self._graph.number_of_nodes() + self._graph.number_of_edges()
self.progressVisitor.showState("Inserting energies")
# insert detection probabilities for all detections (and some also get a div probability)
countElements = 0
for n in self._graph.nodes():
countElements += 1
if not self.withTracklets:
# only one traxel, but make it a list so everything below works the same
traxels = [self._graph.nodes[n]["traxel"]]
else:
traxels = self._graph.nodes[n]["tracklet"]
# accumulate features over all contained traxels
previousTraxel = None
detectionFeatures = np.zeros(maxNumObjects + 1)
for t in traxels:
detectionFeatures += np.array(negLog(detectionProbabilityFunc(t)))
if previousTraxel is not None:
detectionFeatures += np.array(
negLog(transitionProbabilityFunc(previousTraxel, t))
)
previousTraxel = t
detectionFeatures = listify(list(detectionFeatures))
# division only if probability is big enough
divisionFeatures = divisionProbabilityFunc(traxels[-1])
if divisionFeatures is not None:
divisionFeatures = listify(negLog(divisionFeatures))
# appearance/disappearance
appearanceFeatures = listify(
[0.0] + [boundaryCostMultiplierFunc(traxels[0], True)] * maxNumObjects
)
disappearanceFeatures = listify(
[0.0] + [boundaryCostMultiplierFunc(traxels[-1], False)] * maxNumObjects
)
self._graph.nodes[n]["features"] = detectionFeatures
if divisionFeatures is not None:
self._graph.nodes[n]["divisionFeatures"] = divisionFeatures
self._graph.nodes[n]["appearanceFeatures"] = appearanceFeatures
self._graph.nodes[n]["disappearanceFeatures"] = disappearanceFeatures
self._graph.nodes[n]["timestep"] = [
traxels[0].Timestep,
traxels[-1].Timestep,
]
self.progressVisitor.showProgress(countElements / float(numElements))
# insert transition probabilities for all links
for a in self._graph.edges():
countElements += 1
self.progressVisitor.showProgress(countElements / float(numElements))
if not self.withTracklets:
srcTraxel = self._graph.nodes[self.source(a)]["traxel"]
destTraxel = self._graph.nodes[self.target(a)]["traxel"]
else:
srcTraxel = self._graph.nodes[self.source(a)]["tracklet"][
-1
] # src is last of the traxels in source tracklet
destTraxel = self._graph.nodes[self.target(a)]["tracklet"][
0
] # dest is first of traxels in destination tracklet
features = listify(negLog(transitionProbabilityFunc(srcTraxel, destTraxel)))
# add feature for additional Frames. Since we do not want these edges to be primarily taken, we add a bias to the edge. Now: hard coded, future: parameter
frame_gap = destTraxel.Timestep - srcTraxel.Timestep
# 1. method
if frame_gap > 1:
features[1][0] = features[1][0] + skipLinksBias * frame_gap
# # 2. method
# # introduce a new energies like: [[6], [15]] -> [[6, 23], [15, 23]] for first links and
# # [[6], [15]] -> [[23, 6], [23, 15]] for second links, and so on for 3rd order links
# # !!! this will introduce a new weight in the weight.json file. For the 2nd link, comes in 2nd row and so on.
# # drawback: did not manage to adjust parameter to get sensible results.
# for feat in features:
# for i in range(frame_gap):
# feat.append(23)
# if frame_gap > 1:
# feat[frame_gap-1], feat[0] = feat[0], feat[frame_gap-1]
self._graph.edges[a[0], a[1]]["src"] = self._graph.nodes[a[0]]["id"]
self._graph.edges[a[0], a[1]]["dest"] = self._graph.nodes[a[1]]["id"]
self._graph.edges[a[0], a[1]]["features"] = features
def getMappingsBetweenUUIDsAndTraxels(self):
uuidToTraxelMap = {}
traxelIdPerTimestepToUniqueIdMap = {}
for n in self._graph.nodes():
uuid = self._graph.nodes[n]["id"]
traxels = []
if self.withTracklets:
traxels = self._graph.nodes[n]["tracklet"]
else:
traxels = [self._graph.nodes[n]["traxel"]]
uuidToTraxelMap[uuid] = [(t.Timestep, t.Id) for t in traxels]
for t in uuidToTraxelMap[uuid]:
traxelIdPerTimestepToUniqueIdMap.setdefault(str(t[0]), {})[
str(t[1])
] = uuid
# sort the list of traxels per UUID by their timesteps
for v in uuidToTraxelMap.values():
v.sort(key=lambda timestepIdTuple: timestepIdTuple[0])
return traxelIdPerTimestepToUniqueIdMap, uuidToTraxelMap
def toTrackingGraph(self, noFeatures=False):
requiredNodeAttribs = ["id"]
requiredLinkAttribs = ["src", "dest"]
if not noFeatures:
requiredNodeAttribs.append("features")
requiredLinkAttribs.append("features")
def translateNodeToDict(n):
result = {}
attrs = self._graph.nodes[n]
for k in [
"id",
"features",
"appearanceFeatures",
"disappearanceFeatures",
"divisionFeatures",
"timestep",
]:
if k in attrs:
result[k] = attrs[k]
elif k in requiredNodeAttribs:
raise ValueError(
"Cannot use graph nodes without assigned ID and features, run insertEnergies() first"
)
return result
def translateLinkToDict(l):
result = {}
attrs = self._graph.edges[l[0], l[1]]
for k in ["src", "dest", "features"]:
if k in attrs:
result[k] = attrs[k]
elif k in requiredLinkAttribs:
raise ValueError(
"Cannot use graph links without source, target, and features, run insertEnergies() first"
)
return result
traxelIdPerTimestepToUniqueIdMap, _ = self.getMappingsBetweenUUIDsAndTraxels()
model = {
"segmentationHypotheses": [
translateNodeToDict(n) for n in self._graph.nodes()
],
"linkingHypotheses": [translateLinkToDict(e) for e in self._graph.edges()],
"divisionHypotheses": [],
"traxelToUniqueId": traxelIdPerTimestepToUniqueIdMap,
"settings": {
"statesShareWeights": True,
"allowPartialMergerAppearance": False,
"requireSeparateChildrenOfDivision": True,
"optimizerEpGap": 0.01,
"optimizerVerbose": True,
"optimizerNumThreads": 1,
},
}
# extract exclusion sets:
exclusions = set([])
for n in self._graph.nodes():
if self.withTracklets:
traxel = self._graph.nodes[n]["tracklet"][0]
else:
traxel = self._graph.nodes[n]["traxel"]
if traxel.conflictingTraxelIds is not None:
if self.withTracklets:
logger.error(
"Exclusion constraints do not work with tracklets yet!"
)
conflictingIds = [
traxelIdPerTimestepToUniqueIdMap[str(traxel.Timestep)][str(i)]
for i in traxel.conflictingTraxelIds
]
myId = traxelIdPerTimestepToUniqueIdMap[str(traxel.Timestep)][
str(traxel.Id)
]
for ci in conflictingIds:
# insert pairwise exclusion constraints only, and always put the lower id first
if ci < myId:
exclusions.add((ci, myId))
else:
exclusions.add((myId, ci))
model["exclusions"] = [list(t) for t in exclusions]
# TODO: this recomputes the uuidToTraxelMap even though we have it already...
trackingGraph = hytra.core.jsongraph.JsonTrackingGraph(
model=model, progressVisitor=self.progressVisitor
)
return trackingGraph
def insertSolution(self, resultDictionary):
assert isinstance(self._graph, nx.DiGraph), "Expecting the graph to be directed"
_, uuidToTraxelMap = self.getMappingsBetweenUUIDsAndTraxels()
if self.withTracklets:
traxelgraph = self.referenceTraxelGraph
else:
traxelgraph = self
# reset all values
for n in traxelgraph._graph.nodes():
traxelgraph._graph.nodes[n]["value"] = 0
traxelgraph._graph.nodes[n]["divisionValue"] = False
for e in traxelgraph._graph.edges():
traxelgraph._graph.edges[e[0], e[1]]["value"] = 0
# store values from dict
for detection in resultDictionary["detectionResults"]:
traxels = uuidToTraxelMap[detection["id"]]
for traxel in traxels:
traxelgraph._graph.nodes[traxel]["value"] = detection["value"]
for internal_edge in zip(traxels, traxels[1:]):
traxelgraph._graph.edges[internal_edge[0], internal_edge[1]][
"value"
] = detection["value"]
if (
"linkingResults" in resultDictionary
and resultDictionary["linkingResults"] is not None
):
for link in resultDictionary["linkingResults"]:
source, dest = (
uuidToTraxelMap[link["src"]][-1],
uuidToTraxelMap[link["dest"]][0],
)
if (source in traxelgraph._graph.predecessors(dest)) and (
dest in traxelgraph._graph.neighbors(source)
):
traxelgraph._graph.edges[source, dest]["value"] = link["value"]
traxelgraph._graph.edges[source, dest]["gap"] = dest[0] - source[0]
if (
"divisionResults" in resultDictionary
and resultDictionary["divisionResults"] is not None
):
for division in resultDictionary["divisionResults"]:
traxelgraph._graph.nodes[uuidToTraxelMap[division["id"]][-1]][
"divisionValue"
] = division["value"]
def getSolutionDictionary(self):
resultDictionary = {}
if self.withTracklets:
traxelgraph = self.referenceTraxelGraph
else:
traxelgraph = self
detectionList = []
divisionList = []
linkList = []
def checkAttributeValue(element, attribName, default):
if attribName in element:
return element[attribName]
else:
return default
for n in traxelgraph._graph.nodes():
newDetection = {}
newDetection["id"] = traxelgraph._graph.nodes[n]["id"]
newDetection["value"] = checkAttributeValue(
traxelgraph._graph.nodes[n], "value", 0
)
detectionList.append(newDetection)
if "divisionValue" in traxelgraph._graph.nodes[n]:
newDivsion = {}
newDivsion["id"] = traxelgraph._graph.nodes[n]["id"]
newDivsion["value"] = checkAttributeValue(
traxelgraph._graph.nodes[n], "divisionValue", False
)
divisionList.append(newDivsion)
for a in traxelgraph.arcIterator():
newLink = {}
src = self.source(a)
dest = self.target(a)
newLink["src"] = traxelgraph._graph.nodes[src]["id"]
newLink["dest"] = traxelgraph._graph.nodes[dest]["id"]
newLink["value"] = checkAttributeValue(
traxelgraph._graph.edges[src, dest], "value", 0
)
newLink["gap"] = checkAttributeValue(
traxelgraph._graph.edges[src, dest], "gap", 1
)
linkList.append(newLink)
resultDictionary["detectionResults"] = detectionList
resultDictionary["linkingResults"] = linkList
resultDictionary["divisionResults"] = divisionList
return resultDictionary
def countIncomingObjects(self, node):
numberOfIncomingObject = 0
numberOfIncomingEdges = 0
for in_edge in self._graph.in_edges(node):
if "value" in self._graph.edges[in_edge[0], node]:
numberOfIncomingObject += self._graph.edges[in_edge[0], node]["value"]
numberOfIncomingEdges += 1
return numberOfIncomingObject, numberOfIncomingEdges
def countOutgoingObjects(self, node):
numberOfOutgoingObject = 0
numberOfOutgoingEdges = 0
for out_edge in self._graph.out_edges(node):
if (
"value" in self._graph.edges[node, out_edge[1]]
and self._graph.edges[node, out_edge[1]]["value"] > 0
):
numberOfOutgoingObject += self._graph.edges[node, out_edge[1]]["value"]
numberOfOutgoingEdges += 1
return numberOfOutgoingObject, numberOfOutgoingEdges
def computeLineage(self, firstTrackId=2, firstLineageId=2, skipLinks=1):
update_queue = []
# start lineages / tracks at 2, because 0 means background=black, 1 means misdetection in ilastik
max_lineage_id = firstLineageId
max_track_id = firstTrackId
if self.withTracklets:
traxelgraph = self.referenceTraxelGraph
else:
traxelgraph = self
self.progressVisitor.showState("Compute lineage")
# find start of lineages
numElements = 2 * traxelgraph.countNodes()
countElements = 0
for n in traxelgraph.nodeIterator():
countElements += 1
self.progressVisitor.showProgress(countElements / float(numElements))
if (
traxelgraph.countIncomingObjects(n)[0] == 0
and "value" in traxelgraph._graph.nodes[n]
and traxelgraph._graph.nodes[n]["value"] > 0
and (
self.allowLengthOneTracks
or traxelgraph.countOutgoingObjects(n)[0] > 0
)
):
# found start of a track
update_queue.append((n, max_lineage_id, max_track_id))
max_lineage_id += 1
max_track_id += 1
else:
traxelgraph._graph.nodes[n]["lineageId"] = None
traxelgraph._graph.nodes[n]["trackId"] = None
while len(update_queue) > 0:
countElements += 1
current_node, lineage_id, track_id = update_queue.pop()
self.progressVisitor.showProgress(countElements / float(numElements))
# if we did not run merger resolving, it can happen that we reach a node several times,
# and would propagate the new lineage+track IDs to all descendants again! We simply
# stop propagating in that case and just use the lineageID that reached the node first.
if (
traxelgraph._graph.nodes[current_node].get("lineageId", None)
is not None
and traxelgraph._graph.nodes[current_node].get("trackId", None)
is not None
):
logger.debug("Several tracks are merging here, stopping a later one")
continue
# set a new trackID
traxelgraph._graph.nodes[current_node]["lineageId"] = lineage_id
traxelgraph._graph.nodes[current_node]["trackId"] = track_id
numberOfOutgoingObject, numberOfOutgoingEdges = traxelgraph.countOutgoingObjects(
current_node
)
if numberOfOutgoingObject != numberOfOutgoingEdges:
logger.warning(
"running lineage computation on unresolved graphs depends on a race condition"
)
if (
"divisionValue" in traxelgraph._graph.nodes[current_node]
and traxelgraph._graph.nodes[current_node]["divisionValue"]
):
assert traxelgraph.countOutgoingObjects(current_node)[1] == 2
traxelgraph._graph.nodes[current_node]["children"] = []
for a in traxelgraph._graph.out_edges(current_node):
if (
"value" in traxelgraph._graph.edges[current_node, a[1]]
and traxelgraph._graph.edges[current_node, a[1]]["value"] > 0
):
traxelgraph._graph.nodes[a[1]]["gap"] = skipLinks
traxelgraph._graph.nodes[current_node]["children"].append(a[1])
traxelgraph._graph.nodes[a[1]]["parent"] = current_node
update_queue.append(
(traxelgraph.target(a), lineage_id, max_track_id)
)
max_track_id += 1
else:
if traxelgraph.countOutgoingObjects(current_node)[1] > 1:
logger.debug(
"Found merger splitting into several objects, propagating lineage and track to all descendants!"
)
for a in traxelgraph._graph.out_edges(current_node):
if (
"value" in traxelgraph._graph.edges[current_node, a[1]]
and traxelgraph._graph.edges[current_node, a[1]]["value"] > 0
):
if (
"gap" in traxelgraph._graph.edges[current_node, a[1]]
and traxelgraph._graph.edges[current_node, a[1]]["gap"] == 1
) or "gap" not in traxelgraph._graph.edges[current_node, a[1]]:
traxelgraph._graph.nodes[a[1]]["gap"] = 1
update_queue.append(
(traxelgraph.target(a), lineage_id, track_id)
)
if (
"gap" in traxelgraph._graph.edges[current_node, a[1]]
and traxelgraph._graph.edges[current_node, a[1]]["gap"] > 1
):
traxelgraph._graph.nodes[a[1]]["gap"] = skipLinks
traxelgraph._graph.nodes[a[1]]["gap_parent"] = current_node
update_queue.append(
(traxelgraph.target(a), lineage_id, max_track_id)
)
max_track_id += 1
def pruneGraphToSolution(self, distanceToSolution=0):
prunedGraph = HypothesesGraph()
for n in self.nodeIterator():
if "value" in self._graph.nodes[n] and self._graph.nodes[n]["value"] > 0:
prunedGraph._graph.add_node(n, **self._graph.nodes[n])
for e in self.arcIterator():
src = self.source(e)
dest = self.target(e)
if distanceToSolution == 0:
if src in prunedGraph._graph and dest in prunedGraph._graph:
prunedGraph._graph.add_edge(
src, dest, **self._graph.edges[src, dest]
)
# TODO: can be optimized by looping over the pruned graph nodes(might sacrifice readability)
for distance in range(1, distanceToSolution + 1):
for e in self.arcIterator():
src = self.source(e)
dest = self.target(e)
if src in prunedGraph._graph or dest in prunedGraph._graph:
prunedGraph._graph.add_node(src, **self._graph.nodes[src])
prunedGraph._graph.add_node(dest, **self._graph.nodes[dest])
prunedGraph._graph.add_edge(
src, dest, **self._graph.edges[src, dest]
)
# in case a node is NOT an appearance and
# has all the incoming edges with value 0, we remove all these incoming edges
#
# in case a node is NOT a disappearance and
# has all the outgoing edges with value 0, we remove all these outgoing edges
withAppearanceFeatures = True
withDisappearanceFeatures = True
withFeatures = True
correctAppearanceFeatureLength = True
correctDisappearanceFeatureLength = True
correctFeatureLength = True
maxNumObjects = None
maxNumObjectsAppearance = None
maxNumObjectsDisappearance = None
for n in self.nodeIterator():
try:
maxNumObjectsApp = len(self._graph.nodes[n]["appearanceFeatures"]) - 1
if maxNumObjectsAppearance is None:
maxNumObjectsAppearance = maxNumObjectsApp
elif not maxNumObjectsApp == maxNumObjectsAppearance:
correctAppearanceFeatureLength = False
logger.info(
"Appearance/disappearance features have different lengths!"
)
except:
withAppearanceFeatures = False
logger.info("There are no appearance features in node properties!")
break
try:
maxNumObjectsDis = (
len(self._graph.nodes[n]["disappearanceFeatures"]) - 1
)
if maxNumObjectsDisappearance is None:
maxNumObjectsDisappearance = maxNumObjectsDis
elif not maxNumObjectsDis == maxNumObjectsDisappearance:
correctDisappearanceFeatureLength = False
logger.info("Disappearance features have different lengths!")
except:
withDisappearanceFeatures = False
logger.info("There are no disappearance features in node properties!")
break
if withAppearanceFeatures and withDisappearanceFeatures:
if (
correctAppearanceFeatureLength
and correctDisappearanceFeatureLength
and maxNumObjectsAppearance == maxNumObjectsDisappearance
):
maxNumObjects = maxNumObjectsAppearance
else:
correctFeatureLength = False
logger.info(
"Appearance and disappearance features have different lengths!"
)
else:
withFeatures = False
if withFeatures and correctFeatureLength:
for n in self.nodeIterator():
if not (
"appearance" in self._graph.nodes[n].keys()
and self._graph.nodes[n]["appearance"]
):
allArcsWithValueZero = True
in_edges = self._graph.in_edges(n)
for edge in list(in_edges):
if (
"value" in self._graph.edges[edge[0]][edge[1]].keys()
and not self._graph.edges[edge[0]][edge[1]]["value"] == 0
):
allArcsWithValueZero = False
break
self._graph.nodes[n]["appearanceFeatures"] = listify(
[0.0] + [0.0] * maxNumObjects
)
if allArcsWithValueZero:
if not in_edges == []:
self._graph.remove_edges_from(in_edges)
if not (
"disappearance" in self._graph.nodes[n].keys()
and self._graph.nodes[n]["disappearance"]
):
allArcsWithValueZero = True
out_edges = self._graph.out_edges(n)
for edge in list(out_edges):
if (
"value" in self._graph.edges[edge[0]][edge[1]].keys()
and not self._graph.edges[edge[0]][edge[1]]["value"] == 0
):
allArcsWithValueZero = False
break
self._graph.nodes[n]["disappearanceFeatures"] = listify(
[0.0] + [0.0] * maxNumObjects
)
if allArcsWithValueZero:
if not out_edges == []:
self._graph.remove_edges_from(out_edges)
return prunedGraph
def _getNodeAttribute(self, timestep, objectId, attribute):
try:
return self._graph.nodes[(int(timestep), int(objectId))][attribute]
except KeyError:
logger.error(
attribute
+ " not found in graph node properties, call computeLineage() first!"
)
raise
def getLineageId(self, timestep, objectId):
if self.withTracklets:
traxelgraph = self.referenceTraxelGraph
else:
traxelgraph = self
return traxelgraph._getNodeAttribute(timestep, objectId, "lineageId")
def getTrackId(self, timestep, objectId):
if self.withTracklets:
traxelgraph = self.referenceTraxelGraph
else:
traxelgraph = self
return traxelgraph._getNodeAttribute(timestep, objectId, "trackId")
| true | true |
1c4567adc8fd9e1e995c1211c7aa015c0a2da7ee | 4,059 | py | Python | blockchain_client.py | JB1984/TheBlock | 0e0b00034424d7453651b5efc2ef71a13f3719c4 | [
"MIT"
] | null | null | null | blockchain_client.py | JB1984/TheBlock | 0e0b00034424d7453651b5efc2ef71a13f3719c4 | [
"MIT"
] | 6 | 2018-09-14T21:00:41.000Z | 2018-09-27T11:05:52.000Z | blockchain_client.py | JB1984/TheBlock | 0e0b00034424d7453651b5efc2ef71a13f3719c4 | [
"MIT"
] | null | null | null | from collections import OrderedDict
import binascii
import base64
import json
import Crypto
import Crypto.Random
from Crypto.Hash import SHA
from Crypto.PublicKey import RSA
from Crypto.Signature import PKCS1_v1_5
import requests
from flask import Flask, jsonify, request, render_template
class Transaction:
def __init__(self, sender_address, sender_private_key, recipient_address, value, note, picture):
self.sender_address = sender_address
self.sender_private_key = sender_private_key
self.recipient_address = recipient_address
self.value = value
self.note = note
self.picture = picture
def __getattr__(self, attr):
return self.data[attr]
def to_dict(self):
return OrderedDict({'sender_address': self.sender_address,
'recipient_address': self.recipient_address,
'value': self.value,
'note': self.note,
'picture': self.picture})
def sign_transaction(self):
private_key = self.sender_private_key
signer = PKCS1_v1_5.new(private_key)
h = SHA.new(str(self.to_dict()).encode('utf8'))
return binascii.hexlify(signer.sign(h)).decode('ascii')
app = Flask(__name__)
@app.route('/')
def index():
return render_template('./indexClient.html')
@app.route('/make/transaction')
def make_transaction():
return render_template('./make_transaction.html')
@app.route('/view/transactions')
def view_transaction():
return render_template('./view_transactions.html')
@app.route('/view/wallet_balance')
def view_wallet_balance():
return render_template('./view_wallet_balance.html')
@app.route('/wallet/new', methods=['GET'])
def new_wallet():
random_gen = Crypto.Random.new().read
private_key = RSA.generate(1024, random_gen)
public_key = private_key.publickey()
response = {
'private_key': binascii.hexlify(private_key.exportKey(format='DER')).decode('ascii'),
'public_key': binascii.hexlify(public_key.exportKey(format='DER')).decode('ascii')
}
#Save the generated key to the computer for use later
f = open("private.pem", "wb")
f.write(private_key.exportKey('PEM'))
f.close()
return jsonify(response), 200
@app.route('/generate/transaction', methods=['POST'])
def generate_transaction():
p = open("private.pem", "r")
priKey = RSA.import_key(p.read())
pubKey = priKey.publickey()
sender_address = binascii.hexlify(pubKey.exportKey(format='DER')).decode('ascii')
sender_private_key = priKey
recipient_address = request.form['recipient_address']
value = request.form['amount']
note = request.form['note']
if request.form['picture'] != "":
with open(request.form['picture'], "rb") as imageFile:
pictureString = base64.b64encode(imageFile.read())
pictureString1 = str(pictureString)
print(pictureString)
print(pictureString1)
else:
pictureString1 = ""
transaction = Transaction(sender_address, sender_private_key, recipient_address, value, note, pictureString1)
response = {'transaction': transaction.to_dict(), 'signature': transaction.sign_transaction()}
return jsonify(response), 200
@app.route('/get_pub_key', methods=['GET'])
def get_pub_key():
p = open("private.pem", "r")
priKey = RSA.import_key(p.read())
pubKey = priKey.publickey()
sender_address = binascii.hexlify(pubKey.exportKey(format='DER')).decode('ascii')
response = {'pub_key': sender_address}
return jsonify(response), 200
if __name__ == '__main__':
from argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument('-p', '--port', default=8080, type=int, help='port to listen on')
args = parser.parse_args()
port = args.port
app.run(host='0.0.0.0', port=port)
| 29.845588 | 114 | 0.649914 | from collections import OrderedDict
import binascii
import base64
import json
import Crypto
import Crypto.Random
from Crypto.Hash import SHA
from Crypto.PublicKey import RSA
from Crypto.Signature import PKCS1_v1_5
import requests
from flask import Flask, jsonify, request, render_template
class Transaction:
def __init__(self, sender_address, sender_private_key, recipient_address, value, note, picture):
self.sender_address = sender_address
self.sender_private_key = sender_private_key
self.recipient_address = recipient_address
self.value = value
self.note = note
self.picture = picture
def __getattr__(self, attr):
return self.data[attr]
def to_dict(self):
return OrderedDict({'sender_address': self.sender_address,
'recipient_address': self.recipient_address,
'value': self.value,
'note': self.note,
'picture': self.picture})
def sign_transaction(self):
private_key = self.sender_private_key
signer = PKCS1_v1_5.new(private_key)
h = SHA.new(str(self.to_dict()).encode('utf8'))
return binascii.hexlify(signer.sign(h)).decode('ascii')
app = Flask(__name__)
@app.route('/')
def index():
return render_template('./indexClient.html')
@app.route('/make/transaction')
def make_transaction():
return render_template('./make_transaction.html')
@app.route('/view/transactions')
def view_transaction():
return render_template('./view_transactions.html')
@app.route('/view/wallet_balance')
def view_wallet_balance():
return render_template('./view_wallet_balance.html')
@app.route('/wallet/new', methods=['GET'])
def new_wallet():
random_gen = Crypto.Random.new().read
private_key = RSA.generate(1024, random_gen)
public_key = private_key.publickey()
response = {
'private_key': binascii.hexlify(private_key.exportKey(format='DER')).decode('ascii'),
'public_key': binascii.hexlify(public_key.exportKey(format='DER')).decode('ascii')
}
f = open("private.pem", "wb")
f.write(private_key.exportKey('PEM'))
f.close()
return jsonify(response), 200
@app.route('/generate/transaction', methods=['POST'])
def generate_transaction():
p = open("private.pem", "r")
priKey = RSA.import_key(p.read())
pubKey = priKey.publickey()
sender_address = binascii.hexlify(pubKey.exportKey(format='DER')).decode('ascii')
sender_private_key = priKey
recipient_address = request.form['recipient_address']
value = request.form['amount']
note = request.form['note']
if request.form['picture'] != "":
with open(request.form['picture'], "rb") as imageFile:
pictureString = base64.b64encode(imageFile.read())
pictureString1 = str(pictureString)
print(pictureString)
print(pictureString1)
else:
pictureString1 = ""
transaction = Transaction(sender_address, sender_private_key, recipient_address, value, note, pictureString1)
response = {'transaction': transaction.to_dict(), 'signature': transaction.sign_transaction()}
return jsonify(response), 200
@app.route('/get_pub_key', methods=['GET'])
def get_pub_key():
p = open("private.pem", "r")
priKey = RSA.import_key(p.read())
pubKey = priKey.publickey()
sender_address = binascii.hexlify(pubKey.exportKey(format='DER')).decode('ascii')
response = {'pub_key': sender_address}
return jsonify(response), 200
if __name__ == '__main__':
from argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument('-p', '--port', default=8080, type=int, help='port to listen on')
args = parser.parse_args()
port = args.port
app.run(host='0.0.0.0', port=port)
| true | true |
1c4567eaf684081be1c5f842fe292f61cc1719a8 | 1,370 | py | Python | dataloader/dataloaders.py | misads/cv_template | 9976ee0ada449a494d26f896c598610f233edc10 | [
"MIT"
] | 69 | 2020-09-01T11:23:48.000Z | 2022-03-26T08:42:16.000Z | dataloader/dataloaders.py | misads/cv_template | 9976ee0ada449a494d26f896c598610f233edc10 | [
"MIT"
] | 3 | 2021-02-16T09:22:09.000Z | 2022-01-02T07:54:39.000Z | dataloader/dataloaders.py | misads/cv_template | 9976ee0ada449a494d26f896c598610f233edc10 | [
"MIT"
] | 15 | 2020-07-07T06:37:33.000Z | 2022-03-21T07:37:33.000Z | # encoding=utf-8
from dataloader.image_list import ListTrainValDataset, ListTestDataset
from dataloader.transforms import get_transform
from torch.utils.data import DataLoader
from options import opt
import pdb
import os
###################
TEST_DATASET_HAS_OPEN = False # 有没有开放测试集
###################
train_list = os.path.join('datasets', opt.dataset, 'train.txt')
val_list = os.path.join('datasets', opt.dataset, 'val.txt')
max_size = 128 if opt.debug else None
# transforms
transform = get_transform(opt.transform)
train_transform = transform.train_transform
val_transform = transform.val_transform
# datasets和dataloaders
train_dataset = ListTrainValDataset(train_list, transforms=train_transform, max_size=max_size)
train_dataloader = DataLoader(train_dataset, batch_size=opt.batch_size, shuffle=True, num_workers=opt.workers, drop_last=True)
val_dataset = ListTrainValDataset(val_list, transforms=val_transform, max_size=max_size)
val_dataloader = DataLoader(val_dataset, batch_size=1, shuffle=False, num_workers=opt.workers//2)
if TEST_DATASET_HAS_OPEN:
test_list = os.path.join('datasets', opt.dataset, 'test.txt') # 还没有
test_dataset = ListTestDataset(test_list, scale=opt.scale, max_size=max_size, norm=opt.norm_input)
test_dataloader = DataLoader(test_dataset, batch_size=1, shuffle=False, num_workers=1)
else:
test_dataloader = None
| 35.128205 | 126 | 0.783212 |
from dataloader.image_list import ListTrainValDataset, ListTestDataset
from dataloader.transforms import get_transform
from torch.utils.data import DataLoader
from options import opt
import pdb
import os
= ListTrainValDataset(train_list, transforms=train_transform, max_size=max_size)
train_dataloader = DataLoader(train_dataset, batch_size=opt.batch_size, shuffle=True, num_workers=opt.workers, drop_last=True)
val_dataset = ListTrainValDataset(val_list, transforms=val_transform, max_size=max_size)
val_dataloader = DataLoader(val_dataset, batch_size=1, shuffle=False, num_workers=opt.workers//2)
if TEST_DATASET_HAS_OPEN:
test_list = os.path.join('datasets', opt.dataset, 'test.txt')
test_dataset = ListTestDataset(test_list, scale=opt.scale, max_size=max_size, norm=opt.norm_input)
test_dataloader = DataLoader(test_dataset, batch_size=1, shuffle=False, num_workers=1)
else:
test_dataloader = None
| true | true |
1c45681e99e7576cdadf4d81110b8dbc5fa1dd25 | 4,730 | py | Python | graph_objs/ohlc/_line.py | wwwidonja/changed_plotly | 1bda35a438539a97c84a3ab3952e95e8848467bd | [
"MIT"
] | null | null | null | graph_objs/ohlc/_line.py | wwwidonja/changed_plotly | 1bda35a438539a97c84a3ab3952e95e8848467bd | [
"MIT"
] | null | null | null | graph_objs/ohlc/_line.py | wwwidonja/changed_plotly | 1bda35a438539a97c84a3ab3952e95e8848467bd | [
"MIT"
] | null | null | null | from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Line(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "ohlc"
_path_str = "ohlc.line"
_valid_props = {"dash", "width"}
# dash
# ----
@property
def dash(self):
"""
Sets the dash style of lines. Set to a dash type string
("solid", "dot", "dash", "longdash", "dashdot", or
"longdashdot") or a dash length list in px (eg
"5px,10px,2px,2px"). Note that this style setting can also be
set per direction via `increasing.line.dash` and
`decreasing.line.dash`.
The 'dash' property is an enumeration that may be specified as:
- One of the following dash styles:
['solid', 'dot', 'dash', 'longdash', 'dashdot', 'longdashdot']
- A string containing a dash length list in pixels or percentages
(e.g. '5px 10px 2px 2px', '5, 10, 2, 2', '10% 20% 40%', etc.)
Returns
-------
str
"""
return self["dash"]
@dash.setter
def dash(self, val):
self["dash"] = val
# width
# -----
@property
def width(self):
"""
[object Object] Note that this style setting can also be set
per direction via `increasing.line.width` and
`decreasing.line.width`.
The 'width' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["width"]
@width.setter
def width(self, val):
self["width"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
dash
Sets the dash style of lines. Set to a dash type string
("solid", "dot", "dash", "longdash", "dashdot", or
"longdashdot") or a dash length list in px (eg
"5px,10px,2px,2px"). Note that this style setting can
also be set per direction via `increasing.line.dash`
and `decreasing.line.dash`.
width
[object Object] Note that this style setting can also
be set per direction via `increasing.line.width` and
`decreasing.line.width`.
"""
def __init__(self, arg=None, dash=None, width=None, **kwargs):
"""
Construct a new Line object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of :class:`new_plotly.graph_objs.ohlc.Line`
dash
Sets the dash style of lines. Set to a dash type string
("solid", "dot", "dash", "longdash", "dashdot", or
"longdashdot") or a dash length list in px (eg
"5px,10px,2px,2px"). Note that this style setting can
also be set per direction via `increasing.line.dash`
and `decreasing.line.dash`.
width
[object Object] Note that this style setting can also
be set per direction via `increasing.line.width` and
`decreasing.line.width`.
Returns
-------
Line
"""
super(Line, self).__init__("line")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the new_plotly.graph_objs.ohlc.Line
constructor must be a dict or
an instance of :class:`new_plotly.graph_objs.ohlc.Line`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("dash", None)
_v = dash if dash is not None else _v
if _v is not None:
self["dash"] = _v
_v = arg.pop("width", None)
_v = width if width is not None else _v
if _v is not None:
self["width"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
| 31.324503 | 82 | 0.532981 | from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Line(_BaseTraceHierarchyType):
_parent_path_str = "ohlc"
_path_str = "ohlc.line"
_valid_props = {"dash", "width"}
@property
def dash(self):
return self["dash"]
@dash.setter
def dash(self, val):
self["dash"] = val
@property
def width(self):
return self["width"]
@width.setter
def width(self, val):
self["width"] = val
@property
def _prop_descriptions(self):
return """\
dash
Sets the dash style of lines. Set to a dash type string
("solid", "dot", "dash", "longdash", "dashdot", or
"longdashdot") or a dash length list in px (eg
"5px,10px,2px,2px"). Note that this style setting can
also be set per direction via `increasing.line.dash`
and `decreasing.line.dash`.
width
[object Object] Note that this style setting can also
be set per direction via `increasing.line.width` and
`decreasing.line.width`.
"""
def __init__(self, arg=None, dash=None, width=None, **kwargs):
super(Line, self).__init__("line")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the new_plotly.graph_objs.ohlc.Line
constructor must be a dict or
an instance of :class:`new_plotly.graph_objs.ohlc.Line`"""
)
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
_v = arg.pop("dash", None)
_v = dash if dash is not None else _v
if _v is not None:
self["dash"] = _v
_v = arg.pop("width", None)
_v = width if width is not None else _v
if _v is not None:
self["width"] = _v
self._process_kwargs(**dict(arg, **kwargs))
self._skip_invalid = False
| true | true |
1c45687ed1e1d1996e74a33c71010180e5f720d1 | 6,785 | py | Python | Localization/particle_filter/particle_filter.py | MerdanBay/PythonRobotics | 71de5d038f348d347d7b5dc00c914d523cd59f92 | [
"MIT"
] | 1 | 2021-12-02T01:45:01.000Z | 2021-12-02T01:45:01.000Z | Localization/particle_filter/particle_filter.py | MerdanBay/PythonRobotics | 71de5d038f348d347d7b5dc00c914d523cd59f92 | [
"MIT"
] | null | null | null | Localization/particle_filter/particle_filter.py | MerdanBay/PythonRobotics | 71de5d038f348d347d7b5dc00c914d523cd59f92 | [
"MIT"
] | 1 | 2022-01-14T11:11:24.000Z | 2022-01-14T11:11:24.000Z | """
Particle Filter localization sample
author: Atsushi Sakai (@Atsushi_twi)
"""
import sys
import os
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/../utils/")
import math
import matplotlib.pyplot as plt
import numpy as np
from utils.angle import rot_mat_2d
# Estimation parameter of PF
Q = np.diag([0.2]) ** 2 # range error
R = np.diag([2.0, np.deg2rad(40.0)]) ** 2 # input error
# Simulation parameter
Q_sim = np.diag([0.2]) ** 2
R_sim = np.diag([1.0, np.deg2rad(30.0)]) ** 2
DT = 0.1 # time tick [s]
SIM_TIME = 50.0 # simulation time [s]
MAX_RANGE = 20.0 # maximum observation range
# Particle filter parameter
NP = 100 # Number of Particle
NTh = NP / 2.0 # Number of particle for re-sampling
show_animation = True
def calc_input():
v = 1.0 # [m/s]
yaw_rate = 0.1 # [rad/s]
u = np.array([[v, yaw_rate]]).T
return u
def observation(x_true, xd, u, rf_id):
x_true = motion_model(x_true, u)
# add noise to gps x-y
z = np.zeros((0, 3))
for i in range(len(rf_id[:, 0])):
dx = x_true[0, 0] - rf_id[i, 0]
dy = x_true[1, 0] - rf_id[i, 1]
d = math.hypot(dx, dy)
if d <= MAX_RANGE:
dn = d + np.random.randn() * Q_sim[0, 0] ** 0.5 # add noise
zi = np.array([[dn, rf_id[i, 0], rf_id[i, 1]]])
z = np.vstack((z, zi))
# add noise to input
ud1 = u[0, 0] + np.random.randn() * R_sim[0, 0] ** 0.5
ud2 = u[1, 0] + np.random.randn() * R_sim[1, 1] ** 0.5
ud = np.array([[ud1, ud2]]).T
xd = motion_model(xd, ud)
return x_true, z, xd, ud
def motion_model(x, u):
F = np.array([[1.0, 0, 0, 0],
[0, 1.0, 0, 0],
[0, 0, 1.0, 0],
[0, 0, 0, 0]])
B = np.array([[DT * math.cos(x[2, 0]), 0],
[DT * math.sin(x[2, 0]), 0],
[0.0, DT],
[1.0, 0.0]])
x = F.dot(x) + B.dot(u)
return x
def gauss_likelihood(x, sigma):
p = 1.0 / math.sqrt(2.0 * math.pi * sigma ** 2) * \
math.exp(-x ** 2 / (2 * sigma ** 2))
return p
def calc_covariance(x_est, px, pw):
"""
calculate covariance matrix
see ipynb doc
"""
cov = np.zeros((3, 3))
n_particle = px.shape[1]
for i in range(n_particle):
dx = (px[:, i:i + 1] - x_est)[0:3]
cov += pw[0, i] * dx @ dx.T
cov *= 1.0 / (1.0 - pw @ pw.T)
return cov
def pf_localization(px, pw, z, u):
"""
Localization with Particle filter
"""
for ip in range(NP):
x = np.array([px[:, ip]]).T
w = pw[0, ip]
# Predict with random input sampling
ud1 = u[0, 0] + np.random.randn() * R[0, 0] ** 0.5
ud2 = u[1, 0] + np.random.randn() * R[1, 1] ** 0.5
ud = np.array([[ud1, ud2]]).T
x = motion_model(x, ud)
# Calc Importance Weight
for i in range(len(z[:, 0])):
dx = x[0, 0] - z[i, 1]
dy = x[1, 0] - z[i, 2]
pre_z = math.hypot(dx, dy)
dz = pre_z - z[i, 0]
w = w * gauss_likelihood(dz, math.sqrt(Q[0, 0]))
px[:, ip] = x[:, 0]
pw[0, ip] = w
pw = pw / pw.sum() # normalize
x_est = px.dot(pw.T)
p_est = calc_covariance(x_est, px, pw)
N_eff = 1.0 / (pw.dot(pw.T))[0, 0] # Effective particle number
if N_eff < NTh:
px, pw = re_sampling(px, pw)
return x_est, p_est, px, pw
def re_sampling(px, pw):
"""
low variance re-sampling
"""
w_cum = np.cumsum(pw)
base = np.arange(0.0, 1.0, 1 / NP)
re_sample_id = base + np.random.uniform(0, 1 / NP)
indexes = []
ind = 0
for ip in range(NP):
while re_sample_id[ip] > w_cum[ind]:
ind += 1
indexes.append(ind)
px = px[:, indexes]
pw = np.zeros((1, NP)) + 1.0 / NP # init weight
return px, pw
def plot_covariance_ellipse(x_est, p_est): # pragma: no cover
p_xy = p_est[0:2, 0:2]
eig_val, eig_vec = np.linalg.eig(p_xy)
if eig_val[0] >= eig_val[1]:
big_ind = 0
small_ind = 1
else:
big_ind = 1
small_ind = 0
t = np.arange(0, 2 * math.pi + 0.1, 0.1)
# eig_val[big_ind] or eiq_val[small_ind] were occasionally negative
# numbers extremely close to 0 (~10^-20), catch these cases and set the
# respective variable to 0
try:
a = math.sqrt(eig_val[big_ind])
except ValueError:
a = 0
try:
b = math.sqrt(eig_val[small_ind])
except ValueError:
b = 0
x = [a * math.cos(it) for it in t]
y = [b * math.sin(it) for it in t]
angle = math.atan2(eig_vec[1, big_ind], eig_vec[0, big_ind])
fx = rot_mat_2d(angle) @ np.array([[x, y]])
px = np.array(fx[:, 0] + x_est[0, 0]).flatten()
py = np.array(fx[:, 1] + x_est[1, 0]).flatten()
plt.plot(px, py, "--r")
def main():
print(__file__ + " start!!")
time = 0.0
# RF_ID positions [x, y]
rf_id = np.array([[10.0, 0.0],
[10.0, 10.0],
[0.0, 15.0],
[-5.0, 20.0]])
# State Vector [x y yaw v]'
x_est = np.zeros((4, 1))
x_true = np.zeros((4, 1))
px = np.zeros((4, NP)) # Particle store
pw = np.zeros((1, NP)) + 1.0 / NP # Particle weight
x_dr = np.zeros((4, 1)) # Dead reckoning
# history
h_x_est = x_est
h_x_true = x_true
h_x_dr = x_true
while SIM_TIME >= time:
time += DT
u = calc_input()
x_true, z, x_dr, ud = observation(x_true, x_dr, u, rf_id)
x_est, PEst, px, pw = pf_localization(px, pw, z, ud)
# store data history
h_x_est = np.hstack((h_x_est, x_est))
h_x_dr = np.hstack((h_x_dr, x_dr))
h_x_true = np.hstack((h_x_true, x_true))
if show_animation:
plt.cla()
# for stopping simulation with the esc key.
plt.gcf().canvas.mpl_connect(
'key_release_event',
lambda event: [exit(0) if event.key == 'escape' else None])
for i in range(len(z[:, 0])):
plt.plot([x_true[0, 0], z[i, 1]], [x_true[1, 0], z[i, 2]], "-k")
plt.plot(rf_id[:, 0], rf_id[:, 1], "*k")
plt.plot(px[0, :], px[1, :], ".r")
plt.plot(np.array(h_x_true[0, :]).flatten(),
np.array(h_x_true[1, :]).flatten(), "-b")
plt.plot(np.array(h_x_dr[0, :]).flatten(),
np.array(h_x_dr[1, :]).flatten(), "-k")
plt.plot(np.array(h_x_est[0, :]).flatten(),
np.array(h_x_est[1, :]).flatten(), "-r")
plot_covariance_ellipse(x_est, PEst)
plt.axis("equal")
plt.grid(True)
plt.pause(0.001)
if __name__ == '__main__':
main()
| 25.700758 | 80 | 0.506853 | import sys
import os
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/../utils/")
import math
import matplotlib.pyplot as plt
import numpy as np
from utils.angle import rot_mat_2d
Q = np.diag([0.2]) ** 2
R = np.diag([2.0, np.deg2rad(40.0)]) ** 2
Q_sim = np.diag([0.2]) ** 2
R_sim = np.diag([1.0, np.deg2rad(30.0)]) ** 2
DT = 0.1
SIM_TIME = 50.0
MAX_RANGE = 20.0
NP = 100
NTh = NP / 2.0
show_animation = True
def calc_input():
v = 1.0
yaw_rate = 0.1
u = np.array([[v, yaw_rate]]).T
return u
def observation(x_true, xd, u, rf_id):
x_true = motion_model(x_true, u)
z = np.zeros((0, 3))
for i in range(len(rf_id[:, 0])):
dx = x_true[0, 0] - rf_id[i, 0]
dy = x_true[1, 0] - rf_id[i, 1]
d = math.hypot(dx, dy)
if d <= MAX_RANGE:
dn = d + np.random.randn() * Q_sim[0, 0] ** 0.5
zi = np.array([[dn, rf_id[i, 0], rf_id[i, 1]]])
z = np.vstack((z, zi))
ud1 = u[0, 0] + np.random.randn() * R_sim[0, 0] ** 0.5
ud2 = u[1, 0] + np.random.randn() * R_sim[1, 1] ** 0.5
ud = np.array([[ud1, ud2]]).T
xd = motion_model(xd, ud)
return x_true, z, xd, ud
def motion_model(x, u):
F = np.array([[1.0, 0, 0, 0],
[0, 1.0, 0, 0],
[0, 0, 1.0, 0],
[0, 0, 0, 0]])
B = np.array([[DT * math.cos(x[2, 0]), 0],
[DT * math.sin(x[2, 0]), 0],
[0.0, DT],
[1.0, 0.0]])
x = F.dot(x) + B.dot(u)
return x
def gauss_likelihood(x, sigma):
p = 1.0 / math.sqrt(2.0 * math.pi * sigma ** 2) * \
math.exp(-x ** 2 / (2 * sigma ** 2))
return p
def calc_covariance(x_est, px, pw):
cov = np.zeros((3, 3))
n_particle = px.shape[1]
for i in range(n_particle):
dx = (px[:, i:i + 1] - x_est)[0:3]
cov += pw[0, i] * dx @ dx.T
cov *= 1.0 / (1.0 - pw @ pw.T)
return cov
def pf_localization(px, pw, z, u):
for ip in range(NP):
x = np.array([px[:, ip]]).T
w = pw[0, ip]
ud1 = u[0, 0] + np.random.randn() * R[0, 0] ** 0.5
ud2 = u[1, 0] + np.random.randn() * R[1, 1] ** 0.5
ud = np.array([[ud1, ud2]]).T
x = motion_model(x, ud)
for i in range(len(z[:, 0])):
dx = x[0, 0] - z[i, 1]
dy = x[1, 0] - z[i, 2]
pre_z = math.hypot(dx, dy)
dz = pre_z - z[i, 0]
w = w * gauss_likelihood(dz, math.sqrt(Q[0, 0]))
px[:, ip] = x[:, 0]
pw[0, ip] = w
pw = pw / pw.sum()
x_est = px.dot(pw.T)
p_est = calc_covariance(x_est, px, pw)
N_eff = 1.0 / (pw.dot(pw.T))[0, 0]
if N_eff < NTh:
px, pw = re_sampling(px, pw)
return x_est, p_est, px, pw
def re_sampling(px, pw):
w_cum = np.cumsum(pw)
base = np.arange(0.0, 1.0, 1 / NP)
re_sample_id = base + np.random.uniform(0, 1 / NP)
indexes = []
ind = 0
for ip in range(NP):
while re_sample_id[ip] > w_cum[ind]:
ind += 1
indexes.append(ind)
px = px[:, indexes]
pw = np.zeros((1, NP)) + 1.0 / NP
return px, pw
def plot_covariance_ellipse(x_est, p_est):
p_xy = p_est[0:2, 0:2]
eig_val, eig_vec = np.linalg.eig(p_xy)
if eig_val[0] >= eig_val[1]:
big_ind = 0
small_ind = 1
else:
big_ind = 1
small_ind = 0
t = np.arange(0, 2 * math.pi + 0.1, 0.1)
try:
a = math.sqrt(eig_val[big_ind])
except ValueError:
a = 0
try:
b = math.sqrt(eig_val[small_ind])
except ValueError:
b = 0
x = [a * math.cos(it) for it in t]
y = [b * math.sin(it) for it in t]
angle = math.atan2(eig_vec[1, big_ind], eig_vec[0, big_ind])
fx = rot_mat_2d(angle) @ np.array([[x, y]])
px = np.array(fx[:, 0] + x_est[0, 0]).flatten()
py = np.array(fx[:, 1] + x_est[1, 0]).flatten()
plt.plot(px, py, "--r")
def main():
print(__file__ + " start!!")
time = 0.0
rf_id = np.array([[10.0, 0.0],
[10.0, 10.0],
[0.0, 15.0],
[-5.0, 20.0]])
x_est = np.zeros((4, 1))
x_true = np.zeros((4, 1))
px = np.zeros((4, NP)) # Particle store
pw = np.zeros((1, NP)) + 1.0 / NP # Particle weight
x_dr = np.zeros((4, 1)) # Dead reckoning
# history
h_x_est = x_est
h_x_true = x_true
h_x_dr = x_true
while SIM_TIME >= time:
time += DT
u = calc_input()
x_true, z, x_dr, ud = observation(x_true, x_dr, u, rf_id)
x_est, PEst, px, pw = pf_localization(px, pw, z, ud)
# store data history
h_x_est = np.hstack((h_x_est, x_est))
h_x_dr = np.hstack((h_x_dr, x_dr))
h_x_true = np.hstack((h_x_true, x_true))
if show_animation:
plt.cla()
# for stopping simulation with the esc key.
plt.gcf().canvas.mpl_connect(
'key_release_event',
lambda event: [exit(0) if event.key == 'escape' else None])
for i in range(len(z[:, 0])):
plt.plot([x_true[0, 0], z[i, 1]], [x_true[1, 0], z[i, 2]], "-k")
plt.plot(rf_id[:, 0], rf_id[:, 1], "*k")
plt.plot(px[0, :], px[1, :], ".r")
plt.plot(np.array(h_x_true[0, :]).flatten(),
np.array(h_x_true[1, :]).flatten(), "-b")
plt.plot(np.array(h_x_dr[0, :]).flatten(),
np.array(h_x_dr[1, :]).flatten(), "-k")
plt.plot(np.array(h_x_est[0, :]).flatten(),
np.array(h_x_est[1, :]).flatten(), "-r")
plot_covariance_ellipse(x_est, PEst)
plt.axis("equal")
plt.grid(True)
plt.pause(0.001)
if __name__ == '__main__':
main()
| true | true |
1c45694042710f3d8d3724815e70347ae2585bff | 1,850 | py | Python | thedoctor/tests/test_integration.py | hhuuggoo/thedoctor | 84c11377dc16ef8208480cd2745ce1ffc5614865 | [
"BSD-2-Clause"
] | 37 | 2015-02-24T21:59:04.000Z | 2021-07-13T19:04:34.000Z | thedoctor/tests/test_integration.py | hhuuggoo/thedoctor | 84c11377dc16ef8208480cd2745ce1ffc5614865 | [
"BSD-2-Clause"
] | 1 | 2015-03-23T20:23:41.000Z | 2015-03-23T21:09:58.000Z | thedoctor/tests/test_integration.py | hhuuggoo/thedoctor | 84c11377dc16ef8208480cd2745ce1ffc5614865 | [
"BSD-2-Clause"
] | 7 | 2015-03-17T17:18:27.000Z | 2020-07-30T13:05:42.000Z | from .utils import raises
from .. import ValidationError
from .. import validate
from ..validators import dict_validator, true
def test_integration():
@validate(a=int, b=int)
def func(a, b):
return (a, b)
assert func(1, 2) == (1, 2)
assert raises(ValidationError, func, 1, 'a')
@validate(a=int)
def func(a, b):
return (a, b)
assert func(1, 'a') == (1, 'a')
assert raises(ValidationError, func, 'a', 1)
def test_return_validator():
def return_validator(result):
if result != 3:
raise ValidationError('not 3')
@validate(_return=[int, return_validator])
def func(a, b):
return a + b
assert raises(ValidationError, func, 1, 3)
assert raises(ValidationError, func, 'a', 'b')
assert func(1, 2) == 3
def sums_to_3(all_args):
if all_args['a'] + all_args['b'] != 3:
raise ValidationError('not sum to 3')
@validate(_all=sums_to_3)
def func(a, b):
return a + b
assert raises(ValidationError, func, 1, 3)
assert func(1, 2) == 3
def test_lambda_validator():
@validate(_all=lambda x: true(x['a'] + x['b'] == 3, "must sum to 3"))
def func(a, b):
return a + b
assert raises(ValidationError, func, 1, 3)
assert func(1, 2) == 3
def instance_method_test():
class Test(object):
@validate(_return=lambda x: true(x % 2 == 0, "return must be even"),
a=int, b=int)
def func(self, a, b):
return a + b
t = Test()
assert raises(ValidationError, t.func, 1, 2)
t.func(2, 4)
def dict_validator_integration_test():
@validate(x=dict_validator(
{'name': lambda x: true(x == 'sally', 'must be sally')}))
def func(x):
return x
assert raises(ValidationError, func, {'name': 'bob'})
func({'name': 'sally'})
| 26.428571 | 76 | 0.584324 | from .utils import raises
from .. import ValidationError
from .. import validate
from ..validators import dict_validator, true
def test_integration():
@validate(a=int, b=int)
def func(a, b):
return (a, b)
assert func(1, 2) == (1, 2)
assert raises(ValidationError, func, 1, 'a')
@validate(a=int)
def func(a, b):
return (a, b)
assert func(1, 'a') == (1, 'a')
assert raises(ValidationError, func, 'a', 1)
def test_return_validator():
def return_validator(result):
if result != 3:
raise ValidationError('not 3')
@validate(_return=[int, return_validator])
def func(a, b):
return a + b
assert raises(ValidationError, func, 1, 3)
assert raises(ValidationError, func, 'a', 'b')
assert func(1, 2) == 3
def sums_to_3(all_args):
if all_args['a'] + all_args['b'] != 3:
raise ValidationError('not sum to 3')
@validate(_all=sums_to_3)
def func(a, b):
return a + b
assert raises(ValidationError, func, 1, 3)
assert func(1, 2) == 3
def test_lambda_validator():
@validate(_all=lambda x: true(x['a'] + x['b'] == 3, "must sum to 3"))
def func(a, b):
return a + b
assert raises(ValidationError, func, 1, 3)
assert func(1, 2) == 3
def instance_method_test():
class Test(object):
@validate(_return=lambda x: true(x % 2 == 0, "return must be even"),
a=int, b=int)
def func(self, a, b):
return a + b
t = Test()
assert raises(ValidationError, t.func, 1, 2)
t.func(2, 4)
def dict_validator_integration_test():
@validate(x=dict_validator(
{'name': lambda x: true(x == 'sally', 'must be sally')}))
def func(x):
return x
assert raises(ValidationError, func, {'name': 'bob'})
func({'name': 'sally'})
| true | true |
1c45697bb34e4558711a296700d158a28ef349c2 | 1,054 | py | Python | onnxruntime/__init__.py | NonStatic2014/onnxruntime | bdfd46082a152e9605199b4f01664fc76f97a346 | [
"MIT"
] | null | null | null | onnxruntime/__init__.py | NonStatic2014/onnxruntime | bdfd46082a152e9605199b4f01664fc76f97a346 | [
"MIT"
] | 10 | 2019-03-25T21:47:46.000Z | 2019-04-30T02:33:05.000Z | onnxruntime/__init__.py | NonStatic2014/onnxruntime | bdfd46082a152e9605199b4f01664fc76f97a346 | [
"MIT"
] | 1 | 2019-04-09T16:15:51.000Z | 2019-04-09T16:15:51.000Z | # -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
# --------------------------------------------------------------------------
"""
ONNX Runtime
enables high-performance evaluation of trained machine learning (ML)
models while keeping resource usage low.
Building on Microsoft's dedication to the
`Open Neural Network Exchange (ONNX) <https://onnx.ai/>`_
community, it supports traditional ML models as well
as Deep Learning algorithms in the
`ONNX-ML format <https://github.com/onnx/onnx/blob/master/docs/IR.md>`_.
"""
__version__ = "0.5.0"
__author__ = "Microsoft"
from onnxruntime.capi._pybind_state import get_all_providers, get_available_providers, get_device, RunOptions, SessionOptions, set_default_logger_severity, NodeArg, ModelMetadata, GraphOptimizationLevel, ExecutionMode
from onnxruntime.capi.session import InferenceSession
from onnxruntime.capi import onnxruntime_validation
onnxruntime_validation.check_distro_info()
| 47.909091 | 217 | 0.701139 |
__version__ = "0.5.0"
__author__ = "Microsoft"
from onnxruntime.capi._pybind_state import get_all_providers, get_available_providers, get_device, RunOptions, SessionOptions, set_default_logger_severity, NodeArg, ModelMetadata, GraphOptimizationLevel, ExecutionMode
from onnxruntime.capi.session import InferenceSession
from onnxruntime.capi import onnxruntime_validation
onnxruntime_validation.check_distro_info()
| true | true |
1c4569ce9ebd3c7cda1043c0df205b8a956f5f5e | 3,842 | py | Python | image_train.py | to0mi1/image-categorization-based-cifar10 | 96218f2f60faf424b26112559a170a05463113bb | [
"MIT"
] | 1 | 2019-04-16T08:42:32.000Z | 2019-04-16T08:42:32.000Z | image_train.py | to0mi1/image-categorization-based-cifar10 | 96218f2f60faf424b26112559a170a05463113bb | [
"MIT"
] | null | null | null | image_train.py | to0mi1/image-categorization-based-cifar10 | 96218f2f60faf424b26112559a170a05463113bb | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
モデルを作成しトレーニングを行う
"""
import os
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D, Dense, Dropout, Activation, Flatten
from keras.utils import plot_model
from keras.preprocessing.image import ImageDataGenerator
from keras.utils import plot_model
from keras.preprocessing import image
from keras.callbacks import EarlyStopping
# パラメータ定義
activation = 'relu'
optimizer = 'Adam'
nb_epoch = 30
batch_size = 16
# 訓練・検証用データの格納ディレクトリ
train_path = './train'
valid_path = './valid'
# 学習結果を格納するディレクトリを作成する
if not os.path.exists('./result'):
os.mkdir('./result')
result_dir = './result'
# 画像を分類するクラスを定義する
classes = ['buri', 'katsuo', 'kuromaguro', 'maaji', 'NG']
nb_classes = len (classes)
def image_train():
print('Start model building')
model = Sequential()
model.add(Conv2D(32, (3, 3), padding="same", input_shape=(150, 150, 3)))
model.add(Activation(activation))
model.add(Conv2D(32, (3, 3), padding="same"))
model.add(Activation(activation))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Conv2D(64, (3, 3), padding='same'))
model.add(Activation(activation))
model.add(Conv2D(64, (3, 3), padding="same"))
model.add(Activation(activation))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(512))
model.add(Activation(activation))
model.add(Dropout(0.5))
model.add(Dense(nb_classes))
model.add(Activation('softmax'))
model.summary()
model.compile(loss='binary_crossentropy',
optimizer=optimizer,
metrics=['accuracy'])
# モデルをファイルに保存する
model_json = model.to_json()
with open(os.path.join(result_dir, 'model.json'), 'w') as f:
f.write(model_json)
print('start training.')
# 訓練データの作成
train_datagen = ImageDataGenerator(
#zca_whitening= True, # ZCA白色化を適用します
rotation_range=40, # 画像をランダムに回転する回転範囲
width_shift_range=0.2, # ランダムに水平シフトする範囲
height_shift_range=0.2, # ランダムに垂直シフトする範囲
shear_range=0.2, # シアー強度(反時計回りのシアー角度(ラジアン))
zoom_range=0.2, # ランダムにズームする範囲.浮動小数点数が与えられた場合
horizontal_flip=True, # 水平方向に入力をランダムに反転します
rescale=1.0 / 255) # Noneか0ならば,適用しない.それ以外であれば,(他の変換を行う前に) 与えられた値をデータに積算する
train_generator = train_datagen.flow_from_directory(
train_path,
target_size=(150, 150),
batch_size=batch_size,
classes=classes,
class_mode='categorical')
# 検証用データの生成定義
validation_datagen = ImageDataGenerator(rescale=1.0 / 255)
validation_generator = validation_datagen.flow_from_directory(
valid_path,
target_size=(150, 150),
batch_size=batch_size,
classes=classes,
class_mode='categorical')
steps_per_epoch = train_generator.samples
validation_steps = validation_generator.samples
print('steps_per_epoch is set to %s' % steps_per_epoch)
print('validation_steps is set to %s' % validation_steps)
# 訓練の早期終了
es_cb = EarlyStopping(monitor='val_loss', patience=2, verbose=1, mode='auto')
# 訓練開始
history = model.fit_generator(generator=train_generator,
steps_per_epoch=steps_per_epoch,
verbose=1,
callbacks=[es_cb],
validation_data=validation_generator,
validation_steps=validation_steps,
epochs=nb_epoch)
print('Training Complete.')
model.save_weights(os.path.join(result_dir, 'weight.h5'))
# plot_model(model, to_file=os.path.join(result_dir, filename_prefix + '_model.png'), show_shapes=True)
if __name__ == '__main__':
image_train()
| 30.736 | 107 | 0.656689 |
import os
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D, Dense, Dropout, Activation, Flatten
from keras.utils import plot_model
from keras.preprocessing.image import ImageDataGenerator
from keras.utils import plot_model
from keras.preprocessing import image
from keras.callbacks import EarlyStopping
activation = 'relu'
optimizer = 'Adam'
nb_epoch = 30
batch_size = 16
train_path = './train'
valid_path = './valid'
if not os.path.exists('./result'):
os.mkdir('./result')
result_dir = './result'
classes = ['buri', 'katsuo', 'kuromaguro', 'maaji', 'NG']
nb_classes = len (classes)
def image_train():
print('Start model building')
model = Sequential()
model.add(Conv2D(32, (3, 3), padding="same", input_shape=(150, 150, 3)))
model.add(Activation(activation))
model.add(Conv2D(32, (3, 3), padding="same"))
model.add(Activation(activation))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Conv2D(64, (3, 3), padding='same'))
model.add(Activation(activation))
model.add(Conv2D(64, (3, 3), padding="same"))
model.add(Activation(activation))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(512))
model.add(Activation(activation))
model.add(Dropout(0.5))
model.add(Dense(nb_classes))
model.add(Activation('softmax'))
model.summary()
model.compile(loss='binary_crossentropy',
optimizer=optimizer,
metrics=['accuracy'])
model_json = model.to_json()
with open(os.path.join(result_dir, 'model.json'), 'w') as f:
f.write(model_json)
print('start training.')
train_datagen = ImageDataGenerator(
ion_range=40,
width_shift_range=0.2,
height_shift_range=0.2,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True,
rescale=1.0 / 255)
train_generator = train_datagen.flow_from_directory(
train_path,
target_size=(150, 150),
batch_size=batch_size,
classes=classes,
class_mode='categorical')
validation_datagen = ImageDataGenerator(rescale=1.0 / 255)
validation_generator = validation_datagen.flow_from_directory(
valid_path,
target_size=(150, 150),
batch_size=batch_size,
classes=classes,
class_mode='categorical')
steps_per_epoch = train_generator.samples
validation_steps = validation_generator.samples
print('steps_per_epoch is set to %s' % steps_per_epoch)
print('validation_steps is set to %s' % validation_steps)
es_cb = EarlyStopping(monitor='val_loss', patience=2, verbose=1, mode='auto')
history = model.fit_generator(generator=train_generator,
steps_per_epoch=steps_per_epoch,
verbose=1,
callbacks=[es_cb],
validation_data=validation_generator,
validation_steps=validation_steps,
epochs=nb_epoch)
print('Training Complete.')
model.save_weights(os.path.join(result_dir, 'weight.h5'))
if __name__ == '__main__':
image_train()
| true | true |
1c456a5b82ba05659c5a11cb9ea95320d3f81903 | 2,818 | py | Python | test/functional/rpc_getblockfilter.py | CallMeMisterOwl/bitcoin | 9d2895157ec0ff6e356f40c5fe84d3007fc991c1 | [
"MIT"
] | 2 | 2020-08-16T16:27:01.000Z | 2020-08-20T06:19:32.000Z | test/functional/rpc_getblockfilter.py | CallMeMisterOwl/bitcoin | 9d2895157ec0ff6e356f40c5fe84d3007fc991c1 | [
"MIT"
] | 1 | 2022-01-08T14:38:57.000Z | 2022-01-08T14:38:57.000Z | test/functional/rpc_getblockfilter.py | CallMeMisterOwl/bitcoin | 9d2895157ec0ff6e356f40c5fe84d3007fc991c1 | [
"MIT"
] | 1 | 2022-02-19T19:33:46.000Z | 2022-02-19T19:33:46.000Z | #!/usr/bin/env python3
# Copyright (c) 2018-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the getblockfilter RPC."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal, assert_is_hex_string, assert_raises_rpc_error,
)
FILTER_TYPES = ["basic"]
class GetBlockFilterTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.extra_args = [["-blockfilterindex"], []]
def run_test(self):
# Create two chains by disconnecting nodes 0 & 1, mining, then reconnecting
self.disconnect_nodes(0, 1)
self.generate(self.nodes[0], 3, sync_fun=self.no_op)
self.generate(self.nodes[1], 4, sync_fun=self.no_op)
assert_equal(self.nodes[0].getblockcount(), 3)
chain0_hashes = [self.nodes[0].getblockhash(block_height) for block_height in range(4)]
# Reorg node 0 to a new chain
self.connect_nodes(0, 1)
self.sync_blocks()
assert_equal(self.nodes[0].getblockcount(), 4)
chain1_hashes = [self.nodes[0].getblockhash(block_height) for block_height in range(4)]
# Test getblockfilter returns a filter for all blocks and filter types on active chain
for block_hash in chain1_hashes:
for filter_type in FILTER_TYPES:
result = self.nodes[0].getblockfilter(block_hash, filter_type)
assert_is_hex_string(result['filter'])
# Test getblockfilter returns a filter for all blocks and filter types on stale chain
for block_hash in chain0_hashes:
for filter_type in FILTER_TYPES:
result = self.nodes[0].getblockfilter(block_hash, filter_type)
assert_is_hex_string(result['filter'])
# Test getblockfilter with unknown block
bad_block_hash = "0123456789abcdef" * 4
assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getblockfilter, bad_block_hash, "basic")
# Test getblockfilter with undefined filter type
genesis_hash = self.nodes[0].getblockhash(0)
assert_raises_rpc_error(-5, "Unknown filtertype", self.nodes[0].getblockfilter, genesis_hash, "unknown")
# Test getblockfilter fails on node without compact block filter index
self.restart_node(0, extra_args=["-blockfilterindex=0"])
for filter_type in FILTER_TYPES:
assert_raises_rpc_error(-1, "Index is not enabled for filtertype {}".format(filter_type),
self.nodes[0].getblockfilter, genesis_hash, filter_type)
if __name__ == '__main__':
GetBlockFilterTest().main()
| 43.353846 | 112 | 0.688077 |
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal, assert_is_hex_string, assert_raises_rpc_error,
)
FILTER_TYPES = ["basic"]
class GetBlockFilterTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.extra_args = [["-blockfilterindex"], []]
def run_test(self):
self.disconnect_nodes(0, 1)
self.generate(self.nodes[0], 3, sync_fun=self.no_op)
self.generate(self.nodes[1], 4, sync_fun=self.no_op)
assert_equal(self.nodes[0].getblockcount(), 3)
chain0_hashes = [self.nodes[0].getblockhash(block_height) for block_height in range(4)]
self.connect_nodes(0, 1)
self.sync_blocks()
assert_equal(self.nodes[0].getblockcount(), 4)
chain1_hashes = [self.nodes[0].getblockhash(block_height) for block_height in range(4)]
for block_hash in chain1_hashes:
for filter_type in FILTER_TYPES:
result = self.nodes[0].getblockfilter(block_hash, filter_type)
assert_is_hex_string(result['filter'])
for block_hash in chain0_hashes:
for filter_type in FILTER_TYPES:
result = self.nodes[0].getblockfilter(block_hash, filter_type)
assert_is_hex_string(result['filter'])
bad_block_hash = "0123456789abcdef" * 4
assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getblockfilter, bad_block_hash, "basic")
genesis_hash = self.nodes[0].getblockhash(0)
assert_raises_rpc_error(-5, "Unknown filtertype", self.nodes[0].getblockfilter, genesis_hash, "unknown")
self.restart_node(0, extra_args=["-blockfilterindex=0"])
for filter_type in FILTER_TYPES:
assert_raises_rpc_error(-1, "Index is not enabled for filtertype {}".format(filter_type),
self.nodes[0].getblockfilter, genesis_hash, filter_type)
if __name__ == '__main__':
GetBlockFilterTest().main()
| true | true |
1c456aa4cb7c1dda13e25217b75d8708106ea6d2 | 14,192 | py | Python | sdk/python/pulumi_azure_nextgen/securityinsights/v20190101preview/get_incident.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 31 | 2020-09-21T09:41:01.000Z | 2021-02-26T13:21:59.000Z | sdk/python/pulumi_azure_nextgen/securityinsights/v20190101preview/get_incident.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 231 | 2020-09-21T09:38:45.000Z | 2021-03-01T11:16:03.000Z | sdk/python/pulumi_azure_nextgen/securityinsights/v20190101preview/get_incident.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 4 | 2020-09-29T14:14:59.000Z | 2021-02-10T20:38:16.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetIncidentResult',
'AwaitableGetIncidentResult',
'get_incident',
]
@pulumi.output_type
class GetIncidentResult:
"""
Represents an incident in Azure Security Insights.
"""
def __init__(__self__, additional_data=None, classification=None, classification_comment=None, classification_reason=None, created_time_utc=None, description=None, etag=None, first_activity_time_utc=None, id=None, incident_number=None, incident_url=None, labels=None, last_activity_time_utc=None, last_modified_time_utc=None, name=None, owner=None, provider_incident_id=None, provider_name=None, related_analytic_rule_ids=None, severity=None, status=None, title=None, type=None):
if additional_data and not isinstance(additional_data, dict):
raise TypeError("Expected argument 'additional_data' to be a dict")
pulumi.set(__self__, "additional_data", additional_data)
if classification and not isinstance(classification, str):
raise TypeError("Expected argument 'classification' to be a str")
pulumi.set(__self__, "classification", classification)
if classification_comment and not isinstance(classification_comment, str):
raise TypeError("Expected argument 'classification_comment' to be a str")
pulumi.set(__self__, "classification_comment", classification_comment)
if classification_reason and not isinstance(classification_reason, str):
raise TypeError("Expected argument 'classification_reason' to be a str")
pulumi.set(__self__, "classification_reason", classification_reason)
if created_time_utc and not isinstance(created_time_utc, str):
raise TypeError("Expected argument 'created_time_utc' to be a str")
pulumi.set(__self__, "created_time_utc", created_time_utc)
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if first_activity_time_utc and not isinstance(first_activity_time_utc, str):
raise TypeError("Expected argument 'first_activity_time_utc' to be a str")
pulumi.set(__self__, "first_activity_time_utc", first_activity_time_utc)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if incident_number and not isinstance(incident_number, int):
raise TypeError("Expected argument 'incident_number' to be a int")
pulumi.set(__self__, "incident_number", incident_number)
if incident_url and not isinstance(incident_url, str):
raise TypeError("Expected argument 'incident_url' to be a str")
pulumi.set(__self__, "incident_url", incident_url)
if labels and not isinstance(labels, list):
raise TypeError("Expected argument 'labels' to be a list")
pulumi.set(__self__, "labels", labels)
if last_activity_time_utc and not isinstance(last_activity_time_utc, str):
raise TypeError("Expected argument 'last_activity_time_utc' to be a str")
pulumi.set(__self__, "last_activity_time_utc", last_activity_time_utc)
if last_modified_time_utc and not isinstance(last_modified_time_utc, str):
raise TypeError("Expected argument 'last_modified_time_utc' to be a str")
pulumi.set(__self__, "last_modified_time_utc", last_modified_time_utc)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if owner and not isinstance(owner, dict):
raise TypeError("Expected argument 'owner' to be a dict")
pulumi.set(__self__, "owner", owner)
if provider_incident_id and not isinstance(provider_incident_id, str):
raise TypeError("Expected argument 'provider_incident_id' to be a str")
pulumi.set(__self__, "provider_incident_id", provider_incident_id)
if provider_name and not isinstance(provider_name, str):
raise TypeError("Expected argument 'provider_name' to be a str")
pulumi.set(__self__, "provider_name", provider_name)
if related_analytic_rule_ids and not isinstance(related_analytic_rule_ids, list):
raise TypeError("Expected argument 'related_analytic_rule_ids' to be a list")
pulumi.set(__self__, "related_analytic_rule_ids", related_analytic_rule_ids)
if severity and not isinstance(severity, str):
raise TypeError("Expected argument 'severity' to be a str")
pulumi.set(__self__, "severity", severity)
if status and not isinstance(status, str):
raise TypeError("Expected argument 'status' to be a str")
pulumi.set(__self__, "status", status)
if title and not isinstance(title, str):
raise TypeError("Expected argument 'title' to be a str")
pulumi.set(__self__, "title", title)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="additionalData")
def additional_data(self) -> 'outputs.IncidentAdditionalDataResponse':
"""
Additional data on the incident
"""
return pulumi.get(self, "additional_data")
@property
@pulumi.getter
def classification(self) -> Optional[str]:
"""
The reason the incident was closed
"""
return pulumi.get(self, "classification")
@property
@pulumi.getter(name="classificationComment")
def classification_comment(self) -> Optional[str]:
"""
Describes the reason the incident was closed
"""
return pulumi.get(self, "classification_comment")
@property
@pulumi.getter(name="classificationReason")
def classification_reason(self) -> Optional[str]:
"""
The classification reason the incident was closed with
"""
return pulumi.get(self, "classification_reason")
@property
@pulumi.getter(name="createdTimeUtc")
def created_time_utc(self) -> str:
"""
The time the incident was created
"""
return pulumi.get(self, "created_time_utc")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
The description of the incident
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
Etag of the azure resource
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="firstActivityTimeUtc")
def first_activity_time_utc(self) -> Optional[str]:
"""
The time of the first activity in the incident
"""
return pulumi.get(self, "first_activity_time_utc")
@property
@pulumi.getter
def id(self) -> str:
"""
Azure resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="incidentNumber")
def incident_number(self) -> int:
"""
A sequential number
"""
return pulumi.get(self, "incident_number")
@property
@pulumi.getter(name="incidentUrl")
def incident_url(self) -> str:
"""
The deep-link url to the incident in Azure portal
"""
return pulumi.get(self, "incident_url")
@property
@pulumi.getter
def labels(self) -> Optional[Sequence['outputs.IncidentLabelResponse']]:
"""
List of labels relevant to this incident
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter(name="lastActivityTimeUtc")
def last_activity_time_utc(self) -> Optional[str]:
"""
The time of the last activity in the incident
"""
return pulumi.get(self, "last_activity_time_utc")
@property
@pulumi.getter(name="lastModifiedTimeUtc")
def last_modified_time_utc(self) -> str:
"""
The last time the incident was updated
"""
return pulumi.get(self, "last_modified_time_utc")
@property
@pulumi.getter
def name(self) -> str:
"""
Azure resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def owner(self) -> Optional['outputs.IncidentOwnerInfoResponse']:
"""
Describes a user that the incident is assigned to
"""
return pulumi.get(self, "owner")
@property
@pulumi.getter(name="providerIncidentId")
def provider_incident_id(self) -> Optional[str]:
"""
The incident ID assigned by the incident provider
"""
return pulumi.get(self, "provider_incident_id")
@property
@pulumi.getter(name="providerName")
def provider_name(self) -> Optional[str]:
"""
The name of the source provider that generated the incident
"""
return pulumi.get(self, "provider_name")
@property
@pulumi.getter(name="relatedAnalyticRuleIds")
def related_analytic_rule_ids(self) -> Sequence[str]:
"""
List of resource ids of Analytic rules related to the incident
"""
return pulumi.get(self, "related_analytic_rule_ids")
@property
@pulumi.getter
def severity(self) -> str:
"""
The severity of the incident
"""
return pulumi.get(self, "severity")
@property
@pulumi.getter
def status(self) -> str:
"""
The status of the incident
"""
return pulumi.get(self, "status")
@property
@pulumi.getter
def title(self) -> str:
"""
The title of the incident
"""
return pulumi.get(self, "title")
@property
@pulumi.getter
def type(self) -> str:
"""
Azure resource type
"""
return pulumi.get(self, "type")
class AwaitableGetIncidentResult(GetIncidentResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetIncidentResult(
additional_data=self.additional_data,
classification=self.classification,
classification_comment=self.classification_comment,
classification_reason=self.classification_reason,
created_time_utc=self.created_time_utc,
description=self.description,
etag=self.etag,
first_activity_time_utc=self.first_activity_time_utc,
id=self.id,
incident_number=self.incident_number,
incident_url=self.incident_url,
labels=self.labels,
last_activity_time_utc=self.last_activity_time_utc,
last_modified_time_utc=self.last_modified_time_utc,
name=self.name,
owner=self.owner,
provider_incident_id=self.provider_incident_id,
provider_name=self.provider_name,
related_analytic_rule_ids=self.related_analytic_rule_ids,
severity=self.severity,
status=self.status,
title=self.title,
type=self.type)
def get_incident(incident_id: Optional[str] = None,
operational_insights_resource_provider: Optional[str] = None,
resource_group_name: Optional[str] = None,
workspace_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetIncidentResult:
"""
Represents an incident in Azure Security Insights.
:param str incident_id: Incident ID
:param str operational_insights_resource_provider: The namespace of workspaces resource provider- Microsoft.OperationalInsights.
:param str resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive.
:param str workspace_name: The name of the workspace.
"""
__args__ = dict()
__args__['incidentId'] = incident_id
__args__['operationalInsightsResourceProvider'] = operational_insights_resource_provider
__args__['resourceGroupName'] = resource_group_name
__args__['workspaceName'] = workspace_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:securityinsights/v20190101preview:getIncident', __args__, opts=opts, typ=GetIncidentResult).value
return AwaitableGetIncidentResult(
additional_data=__ret__.additional_data,
classification=__ret__.classification,
classification_comment=__ret__.classification_comment,
classification_reason=__ret__.classification_reason,
created_time_utc=__ret__.created_time_utc,
description=__ret__.description,
etag=__ret__.etag,
first_activity_time_utc=__ret__.first_activity_time_utc,
id=__ret__.id,
incident_number=__ret__.incident_number,
incident_url=__ret__.incident_url,
labels=__ret__.labels,
last_activity_time_utc=__ret__.last_activity_time_utc,
last_modified_time_utc=__ret__.last_modified_time_utc,
name=__ret__.name,
owner=__ret__.owner,
provider_incident_id=__ret__.provider_incident_id,
provider_name=__ret__.provider_name,
related_analytic_rule_ids=__ret__.related_analytic_rule_ids,
severity=__ret__.severity,
status=__ret__.status,
title=__ret__.title,
type=__ret__.type)
| 39.532033 | 483 | 0.666995 |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetIncidentResult',
'AwaitableGetIncidentResult',
'get_incident',
]
@pulumi.output_type
class GetIncidentResult:
def __init__(__self__, additional_data=None, classification=None, classification_comment=None, classification_reason=None, created_time_utc=None, description=None, etag=None, first_activity_time_utc=None, id=None, incident_number=None, incident_url=None, labels=None, last_activity_time_utc=None, last_modified_time_utc=None, name=None, owner=None, provider_incident_id=None, provider_name=None, related_analytic_rule_ids=None, severity=None, status=None, title=None, type=None):
if additional_data and not isinstance(additional_data, dict):
raise TypeError("Expected argument 'additional_data' to be a dict")
pulumi.set(__self__, "additional_data", additional_data)
if classification and not isinstance(classification, str):
raise TypeError("Expected argument 'classification' to be a str")
pulumi.set(__self__, "classification", classification)
if classification_comment and not isinstance(classification_comment, str):
raise TypeError("Expected argument 'classification_comment' to be a str")
pulumi.set(__self__, "classification_comment", classification_comment)
if classification_reason and not isinstance(classification_reason, str):
raise TypeError("Expected argument 'classification_reason' to be a str")
pulumi.set(__self__, "classification_reason", classification_reason)
if created_time_utc and not isinstance(created_time_utc, str):
raise TypeError("Expected argument 'created_time_utc' to be a str")
pulumi.set(__self__, "created_time_utc", created_time_utc)
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if first_activity_time_utc and not isinstance(first_activity_time_utc, str):
raise TypeError("Expected argument 'first_activity_time_utc' to be a str")
pulumi.set(__self__, "first_activity_time_utc", first_activity_time_utc)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if incident_number and not isinstance(incident_number, int):
raise TypeError("Expected argument 'incident_number' to be a int")
pulumi.set(__self__, "incident_number", incident_number)
if incident_url and not isinstance(incident_url, str):
raise TypeError("Expected argument 'incident_url' to be a str")
pulumi.set(__self__, "incident_url", incident_url)
if labels and not isinstance(labels, list):
raise TypeError("Expected argument 'labels' to be a list")
pulumi.set(__self__, "labels", labels)
if last_activity_time_utc and not isinstance(last_activity_time_utc, str):
raise TypeError("Expected argument 'last_activity_time_utc' to be a str")
pulumi.set(__self__, "last_activity_time_utc", last_activity_time_utc)
if last_modified_time_utc and not isinstance(last_modified_time_utc, str):
raise TypeError("Expected argument 'last_modified_time_utc' to be a str")
pulumi.set(__self__, "last_modified_time_utc", last_modified_time_utc)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if owner and not isinstance(owner, dict):
raise TypeError("Expected argument 'owner' to be a dict")
pulumi.set(__self__, "owner", owner)
if provider_incident_id and not isinstance(provider_incident_id, str):
raise TypeError("Expected argument 'provider_incident_id' to be a str")
pulumi.set(__self__, "provider_incident_id", provider_incident_id)
if provider_name and not isinstance(provider_name, str):
raise TypeError("Expected argument 'provider_name' to be a str")
pulumi.set(__self__, "provider_name", provider_name)
if related_analytic_rule_ids and not isinstance(related_analytic_rule_ids, list):
raise TypeError("Expected argument 'related_analytic_rule_ids' to be a list")
pulumi.set(__self__, "related_analytic_rule_ids", related_analytic_rule_ids)
if severity and not isinstance(severity, str):
raise TypeError("Expected argument 'severity' to be a str")
pulumi.set(__self__, "severity", severity)
if status and not isinstance(status, str):
raise TypeError("Expected argument 'status' to be a str")
pulumi.set(__self__, "status", status)
if title and not isinstance(title, str):
raise TypeError("Expected argument 'title' to be a str")
pulumi.set(__self__, "title", title)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="additionalData")
def additional_data(self) -> 'outputs.IncidentAdditionalDataResponse':
return pulumi.get(self, "additional_data")
@property
@pulumi.getter
def classification(self) -> Optional[str]:
return pulumi.get(self, "classification")
@property
@pulumi.getter(name="classificationComment")
def classification_comment(self) -> Optional[str]:
return pulumi.get(self, "classification_comment")
@property
@pulumi.getter(name="classificationReason")
def classification_reason(self) -> Optional[str]:
return pulumi.get(self, "classification_reason")
@property
@pulumi.getter(name="createdTimeUtc")
def created_time_utc(self) -> str:
return pulumi.get(self, "created_time_utc")
@property
@pulumi.getter
def description(self) -> Optional[str]:
return pulumi.get(self, "description")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="firstActivityTimeUtc")
def first_activity_time_utc(self) -> Optional[str]:
return pulumi.get(self, "first_activity_time_utc")
@property
@pulumi.getter
def id(self) -> str:
return pulumi.get(self, "id")
@property
@pulumi.getter(name="incidentNumber")
def incident_number(self) -> int:
return pulumi.get(self, "incident_number")
@property
@pulumi.getter(name="incidentUrl")
def incident_url(self) -> str:
return pulumi.get(self, "incident_url")
@property
@pulumi.getter
def labels(self) -> Optional[Sequence['outputs.IncidentLabelResponse']]:
return pulumi.get(self, "labels")
@property
@pulumi.getter(name="lastActivityTimeUtc")
def last_activity_time_utc(self) -> Optional[str]:
return pulumi.get(self, "last_activity_time_utc")
@property
@pulumi.getter(name="lastModifiedTimeUtc")
def last_modified_time_utc(self) -> str:
return pulumi.get(self, "last_modified_time_utc")
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
@property
@pulumi.getter
def owner(self) -> Optional['outputs.IncidentOwnerInfoResponse']:
return pulumi.get(self, "owner")
@property
@pulumi.getter(name="providerIncidentId")
def provider_incident_id(self) -> Optional[str]:
return pulumi.get(self, "provider_incident_id")
@property
@pulumi.getter(name="providerName")
def provider_name(self) -> Optional[str]:
return pulumi.get(self, "provider_name")
@property
@pulumi.getter(name="relatedAnalyticRuleIds")
def related_analytic_rule_ids(self) -> Sequence[str]:
return pulumi.get(self, "related_analytic_rule_ids")
@property
@pulumi.getter
def severity(self) -> str:
return pulumi.get(self, "severity")
@property
@pulumi.getter
def status(self) -> str:
return pulumi.get(self, "status")
@property
@pulumi.getter
def title(self) -> str:
return pulumi.get(self, "title")
@property
@pulumi.getter
def type(self) -> str:
return pulumi.get(self, "type")
class AwaitableGetIncidentResult(GetIncidentResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetIncidentResult(
additional_data=self.additional_data,
classification=self.classification,
classification_comment=self.classification_comment,
classification_reason=self.classification_reason,
created_time_utc=self.created_time_utc,
description=self.description,
etag=self.etag,
first_activity_time_utc=self.first_activity_time_utc,
id=self.id,
incident_number=self.incident_number,
incident_url=self.incident_url,
labels=self.labels,
last_activity_time_utc=self.last_activity_time_utc,
last_modified_time_utc=self.last_modified_time_utc,
name=self.name,
owner=self.owner,
provider_incident_id=self.provider_incident_id,
provider_name=self.provider_name,
related_analytic_rule_ids=self.related_analytic_rule_ids,
severity=self.severity,
status=self.status,
title=self.title,
type=self.type)
def get_incident(incident_id: Optional[str] = None,
operational_insights_resource_provider: Optional[str] = None,
resource_group_name: Optional[str] = None,
workspace_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetIncidentResult:
__args__ = dict()
__args__['incidentId'] = incident_id
__args__['operationalInsightsResourceProvider'] = operational_insights_resource_provider
__args__['resourceGroupName'] = resource_group_name
__args__['workspaceName'] = workspace_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:securityinsights/v20190101preview:getIncident', __args__, opts=opts, typ=GetIncidentResult).value
return AwaitableGetIncidentResult(
additional_data=__ret__.additional_data,
classification=__ret__.classification,
classification_comment=__ret__.classification_comment,
classification_reason=__ret__.classification_reason,
created_time_utc=__ret__.created_time_utc,
description=__ret__.description,
etag=__ret__.etag,
first_activity_time_utc=__ret__.first_activity_time_utc,
id=__ret__.id,
incident_number=__ret__.incident_number,
incident_url=__ret__.incident_url,
labels=__ret__.labels,
last_activity_time_utc=__ret__.last_activity_time_utc,
last_modified_time_utc=__ret__.last_modified_time_utc,
name=__ret__.name,
owner=__ret__.owner,
provider_incident_id=__ret__.provider_incident_id,
provider_name=__ret__.provider_name,
related_analytic_rule_ids=__ret__.related_analytic_rule_ids,
severity=__ret__.severity,
status=__ret__.status,
title=__ret__.title,
type=__ret__.type)
| true | true |
1c456b4651c14bf62f1b981a4373ef3876f9cc4a | 10,375 | py | Python | python/tvm/relay/analysis.py | Checkmate50/tvm | 0293f42232ac2506c9cf8914410282c54ee4c0ed | [
"Apache-2.0"
] | null | null | null | python/tvm/relay/analysis.py | Checkmate50/tvm | 0293f42232ac2506c9cf8914410282c54ee4c0ed | [
"Apache-2.0"
] | null | null | null | python/tvm/relay/analysis.py | Checkmate50/tvm | 0293f42232ac2506c9cf8914410282c54ee4c0ed | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=no-else-return
# pylint: disable=unidiomatic-typecheck
"""
This file contains the set of passes for Relay, which exposes an interface for
configuring the passes and scripting them in Python.
"""
from . import _analysis
from . import _make
from .expr import Expr
from .ty import Type
from .module import Module
from .feature import Feature
def post_order_visit(expr, fvisit):
"""Recursively visit the ir in post DFS order node,
apply fvisit. Each node is guaranteed to be visited
only once.
Parameters
----------
expr : tvm.relay.Expr
The input expression.
fvisit : function
The visitor function to be applied.
"""
return _analysis.post_order_visit(expr, fvisit)
def well_formed(expr):
"""Check that each Var is only bound once (well formed).
Parameters
----------
expr : tvm.relay.Expr
The input expression
Returns
-------
well_form : bool
Whether the input expression is well formed
"""
return _analysis.well_formed(expr)
def check_kind(t, mod=None):
"""Check that the type is well kinded and return the kind.
For example, this mean type cannot has tensor of tensor, or is a tuple type
of 2 shapes.
Parameters
----------
t : tvm.relay.Type
The type to check
mod : Optional[tvm.relay.Module]
The global module.
Returns
-------
kind : Kind
the kind of t
Examples
--------
.. code:: python
assert check_kind(relay.TupleType([relay.TypeParam('tp1', relay.Kind.Shape)])) == Shape
assert check_kind(relay.TupleType([relay.TypeParam('tp1', relay.Kind.Type)])) == Type
"""
if mod is not None:
return _analysis.check_kind(t, mod)
else:
return _analysis.check_kind(t)
def check_constant(expr):
"""Check whether an expression is constant
Parameters
----------
expr : tvm.relay.Expr
The input expression
Returns
-------
result : bool
Whether the expression is constant.
"""
return _analysis.check_constant(expr)
def free_vars(expr):
"""Get free Vars from expression expr in Post DFS order.
Parameters
----------
expr : tvm.relay.Expr
The input expression
Returns
-------
free : List[tvm.relay.Var]
The list of free variables in post DFS order.
Note
----
The fact that Vars are post-DFS ordred are useful in
neural networks: usually this means weights of previous
are ordered first.
"""
return _analysis.free_vars(expr)
def bound_vars(expr):
"""Get bound vars from expression expr in post-DFS order.
Parameters
----------
expr : tvm.relay.Expr
The input expression
Returns
-------
free : List[tvm.relay.Var]
The list of bound variables in post-DFS order.
"""
return _analysis.bound_vars(expr)
def all_vars(expr):
"""Get all vars from expression expr in post-DFS order.
Parameters
----------
expr : tvm.relay.Expr
The input expression
Returns
-------
free : List[tvm.relay.Var]
The list of all variables in post-DFS order.
"""
return _analysis.all_vars(expr)
def free_type_vars(expr, mod=None):
"""Get free type variables from expression/type e
Parameters
----------
expr : Union[tvm.relay.Expr,tvm.relay.Type]
The input expression/type
mod : Optional[tvm.relay.Module]
The global module
Returns
-------
free : List[tvm.relay.TypeVar]
The list of free type variables in post-DFS order
"""
use_mod = mod if mod is not None else Module()
return _analysis.free_type_vars(expr, use_mod)
def bound_type_vars(expr, mod=None):
"""Get bound type variables from expression/type e
Parameters
----------
expr : Union[tvm.relay.Expr,tvm.relay.Type]
The input expression/type
mod : Optional[tvm.relay.Module]
The global module
Returns
-------
free : List[tvm.relay.TypeVar]
The list of bound type variables in post-DFS order
"""
use_mod = mod if mod is not None else Module()
return _analysis.bound_type_vars(expr, use_mod)
def all_type_vars(expr, mod=None):
"""Get all type variables from expression/type e
Parameters
----------
expr : Union[tvm.relay.Expr,tvm.relay.Type]
The input expression/type
mod : Optional[tvm.relay.Module]
The global module
Returns
-------
free : List[tvm.relay.TypeVar]
The list of all type variables in post-DFS order
"""
use_mod = mod if mod is not None else Module()
return _analysis.all_type_vars(expr, use_mod)
def alpha_equal(lhs, rhs):
"""Compare two Relay expr for structural equivalence (alpha equivalence).
Parameters
----------
lhs : tvm.relay.Expr
One of the input Expression.
rhs : tvm.relay.Expr
One of the input Expression.
Returns
-------
result : bool
True iff lhs is alpha equal to rhs.
"""
return bool(_make._alpha_equal(lhs, rhs))
def assert_alpha_equal(lhs, rhs):
"""Assert that two Relay expr is structurally equivalent. (alpha equivalence).
Parameters
----------
lhs : tvm.relay.Expr
One of the input Expression.
rhs : tvm.relay.Expr
One of the input Expression.
"""
_make._assert_alpha_equal(lhs, rhs)
def graph_equal(lhs, rhs):
"""Compare two Relay expr for data-flow equivalence.
The difference between this and alpha-equality is that
variables are not expected to match between lhs and rhs;
they are treated as sources and are mapped between each other.
Parameters
----------
lhs : tvm.relay.Expr
One of the input Expression.
rhs : tvm.relay.Expr
One of the input Expression.
Returns
-------
result : bool
True iff lhs is data-flow equivalent to rhs.
"""
return bool(_make._graph_equal(lhs, rhs))
def assert_graph_equal(lhs, rhs):
"""Compare two Relay expr for data-flow equivalence.
The difference between this and alpha-equality is that
variables are not expected to match between lhs and rhs;
they are treated as sources and are mapped between each other.
Parameters
----------
lhs : tvm.relay.Expr
One of the input Expression.
rhs : tvm.relay.Expr
One of the input Expression.
"""
_make._assert_graph_equal(lhs, rhs)
def collect_device_info(expr):
"""Collect the device allocation map for the given expression. The device
ids are propagated from the `device_copy` operators.
Parameters
----------
expr : tvm.relay.Expr
The input expression.
Returns
-------
ret : Dict[tvm.relay.expr, int]
A dictionary mapping tvm.relay.Expr to device type.
"""
return _analysis.CollectDeviceInfo(expr)
def collect_device_annotation_ops(expr):
"""Collect the device annotation ops for the given expression.
Parameters
----------
expr : tvm.relay.Expr
The input expression.
Returns
-------
ret : Dict[tvm.relay.expr, int]
A dictionary mapping tvm.relay.Expr to device type where the keys are
annotation expressions.
"""
return _analysis.CollectDeviceAnnotationOps(expr)
def get_total_mac_number(expr):
"""
Count the number of MACs (multiply-accumulate) of a model
Parameters
----------
expr : tvm.relay.Expr
The input expression.
Returns
-------
result : int64
The number of MACs (multiply-accumulate) of a model
"""
return _analysis.GetTotalMacNumber(expr)
def missing_gradient_check(expr):
"""
Check if there is a missing gradient and print it.
Parameters
----------
expr : tvm.relay.Expr
The input expression.
"""
_analysis.missing_gradient_check(expr)
def unmatched_cases(match, mod=None):
"""
Finds cases that the match expression does not catch, if any.
Parameters
----------
match : tvm.relay.Match
The match expression
mod : Optional[tvm.relay.Module]
The module (defaults to an empty module)
Returns
-------
missing_patterns : [tvm.relay.Pattern]
Patterns that the match expression does not catch.
"""
return _analysis.unmatched_cases(match, mod)
def detect_feature(a, b=None):
"""
Detect the feature used in a relay program.
Parameters
----------
a : Union[tvm.relay.Expr, tvm.relay.Module]
The input expression or module.
b : Optional[Union[tvm.relay.Expr, tvm.relay.Module]]
The input expression or module.
The two arguments cannot both be expression or module.
Returns
-------
features : Set[Feature]
Features used in the program.
"""
if isinstance(a, Module):
a, b = b, a
return set([Feature(int(x)) for x in _analysis.detect_feature(a, b)])
def structural_hash(value):
"""Hash a Relay expression structurally.
Parameters
----------
expr : Union[tvm.relay.Expr, tvm.relay.Type]
The expression to hash.
Returns
-------
result : int
The hash value
"""
if isinstance(value, Expr):
return int(_analysis._expr_hash(value))
elif isinstance(value, Type):
return int(_analysis._type_hash(value))
else:
msg = ("found value of type {0} expected" +
"relay.Expr or relay.Type").format(type(value))
raise TypeError(msg)
| 24.585308 | 95 | 0.64212 |
from . import _analysis
from . import _make
from .expr import Expr
from .ty import Type
from .module import Module
from .feature import Feature
def post_order_visit(expr, fvisit):
return _analysis.post_order_visit(expr, fvisit)
def well_formed(expr):
return _analysis.well_formed(expr)
def check_kind(t, mod=None):
if mod is not None:
return _analysis.check_kind(t, mod)
else:
return _analysis.check_kind(t)
def check_constant(expr):
return _analysis.check_constant(expr)
def free_vars(expr):
return _analysis.free_vars(expr)
def bound_vars(expr):
return _analysis.bound_vars(expr)
def all_vars(expr):
return _analysis.all_vars(expr)
def free_type_vars(expr, mod=None):
use_mod = mod if mod is not None else Module()
return _analysis.free_type_vars(expr, use_mod)
def bound_type_vars(expr, mod=None):
use_mod = mod if mod is not None else Module()
return _analysis.bound_type_vars(expr, use_mod)
def all_type_vars(expr, mod=None):
use_mod = mod if mod is not None else Module()
return _analysis.all_type_vars(expr, use_mod)
def alpha_equal(lhs, rhs):
return bool(_make._alpha_equal(lhs, rhs))
def assert_alpha_equal(lhs, rhs):
_make._assert_alpha_equal(lhs, rhs)
def graph_equal(lhs, rhs):
return bool(_make._graph_equal(lhs, rhs))
def assert_graph_equal(lhs, rhs):
_make._assert_graph_equal(lhs, rhs)
def collect_device_info(expr):
return _analysis.CollectDeviceInfo(expr)
def collect_device_annotation_ops(expr):
return _analysis.CollectDeviceAnnotationOps(expr)
def get_total_mac_number(expr):
return _analysis.GetTotalMacNumber(expr)
def missing_gradient_check(expr):
_analysis.missing_gradient_check(expr)
def unmatched_cases(match, mod=None):
return _analysis.unmatched_cases(match, mod)
def detect_feature(a, b=None):
if isinstance(a, Module):
a, b = b, a
return set([Feature(int(x)) for x in _analysis.detect_feature(a, b)])
def structural_hash(value):
if isinstance(value, Expr):
return int(_analysis._expr_hash(value))
elif isinstance(value, Type):
return int(_analysis._type_hash(value))
else:
msg = ("found value of type {0} expected" +
"relay.Expr or relay.Type").format(type(value))
raise TypeError(msg)
| true | true |
1c456c74c2cf7a473b376d5f287a9bb1a2b9f3b9 | 5,363 | py | Python | netbox/utilities/forms/widgets.py | letic/netbox | 0930745e16330edf00da081150b079d5ed6ecc02 | [
"Apache-2.0"
] | 2 | 2021-07-08T03:58:12.000Z | 2022-02-11T21:50:46.000Z | netbox/utilities/forms/widgets.py | letic/netbox | 0930745e16330edf00da081150b079d5ed6ecc02 | [
"Apache-2.0"
] | 25 | 2019-09-17T19:40:50.000Z | 2022-03-11T04:01:55.000Z | netbox/utilities/forms/widgets.py | letic/netbox | 0930745e16330edf00da081150b079d5ed6ecc02 | [
"Apache-2.0"
] | 1 | 2022-02-11T21:50:58.000Z | 2022-02-11T21:50:58.000Z | import json
from django import forms
from django.conf import settings
from django.contrib.postgres.forms import SimpleArrayField
from utilities.choices import ColorChoices
from .utils import add_blank_choice, parse_numeric_range
__all__ = (
'APISelect',
'APISelectMultiple',
'BulkEditNullBooleanSelect',
'ColorSelect',
'ContentTypeSelect',
'DatePicker',
'DateTimePicker',
'NumericArrayField',
'SelectWithDisabled',
'SelectWithPK',
'SlugWidget',
'SmallTextarea',
'StaticSelect2',
'StaticSelect2Multiple',
'TimePicker',
)
class SmallTextarea(forms.Textarea):
"""
Subclass used for rendering a smaller textarea element.
"""
pass
class SlugWidget(forms.TextInput):
"""
Subclass TextInput and add a slug regeneration button next to the form field.
"""
template_name = 'widgets/sluginput.html'
class ColorSelect(forms.Select):
"""
Extends the built-in Select widget to colorize each <option>.
"""
option_template_name = 'widgets/colorselect_option.html'
def __init__(self, *args, **kwargs):
kwargs['choices'] = add_blank_choice(ColorChoices)
super().__init__(*args, **kwargs)
self.attrs['class'] = 'netbox-select2-color-picker'
class BulkEditNullBooleanSelect(forms.NullBooleanSelect):
"""
A Select widget for NullBooleanFields
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Override the built-in choice labels
self.choices = (
('1', '---------'),
('2', 'Yes'),
('3', 'No'),
)
self.attrs['class'] = 'netbox-select2-static'
class SelectWithDisabled(forms.Select):
"""
Modified the stock Select widget to accept choices using a dict() for a label. The dict for each option must include
'label' (string) and 'disabled' (boolean).
"""
option_template_name = 'widgets/selectwithdisabled_option.html'
class StaticSelect2(SelectWithDisabled):
"""
A static <select> form widget using the Select2 library.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['class'] = 'netbox-select2-static'
class StaticSelect2Multiple(StaticSelect2, forms.SelectMultiple):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['data-multiple'] = 1
class SelectWithPK(StaticSelect2):
"""
Include the primary key of each option in the option label (e.g. "Router7 (4721)").
"""
option_template_name = 'widgets/select_option_with_pk.html'
class ContentTypeSelect(StaticSelect2):
"""
Appends an `api-value` attribute equal to the slugified model name for each ContentType. For example:
<option value="37" api-value="console-server-port">console server port</option>
This attribute can be used to reference the relevant API endpoint for a particular ContentType.
"""
option_template_name = 'widgets/select_contenttype.html'
class NumericArrayField(SimpleArrayField):
def to_python(self, value):
if not value:
return []
if isinstance(value, str):
value = ','.join([str(n) for n in parse_numeric_range(value)])
return super().to_python(value)
class APISelect(SelectWithDisabled):
"""
A select widget populated via an API call
:param api_url: API endpoint URL. Required if not set automatically by the parent field.
"""
def __init__(self, api_url=None, full=False, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['class'] = 'netbox-select2-api'
if api_url:
self.attrs['data-url'] = '/{}{}'.format(settings.BASE_PATH, api_url.lstrip('/')) # Inject BASE_PATH
def add_query_param(self, name, value):
"""
Add details for an additional query param in the form of a data-* JSON-encoded list attribute.
:param name: The name of the query param
:param value: The value of the query param
"""
key = f'data-query-param-{name}'
values = json.loads(self.attrs.get(key, '[]'))
if type(value) in (list, tuple):
values.extend([str(v) for v in value])
else:
values.append(str(value))
self.attrs[key] = json.dumps(values)
class APISelectMultiple(APISelect, forms.SelectMultiple):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['data-multiple'] = 1
class DatePicker(forms.TextInput):
"""
Date picker using Flatpickr.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['class'] = 'date-picker'
self.attrs['placeholder'] = 'YYYY-MM-DD'
class DateTimePicker(forms.TextInput):
"""
DateTime picker using Flatpickr.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['class'] = 'datetime-picker'
self.attrs['placeholder'] = 'YYYY-MM-DD hh:mm:ss'
class TimePicker(forms.TextInput):
"""
Time picker using Flatpickr.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['class'] = 'time-picker'
self.attrs['placeholder'] = 'hh:mm:ss'
| 28.078534 | 120 | 0.641618 | import json
from django import forms
from django.conf import settings
from django.contrib.postgres.forms import SimpleArrayField
from utilities.choices import ColorChoices
from .utils import add_blank_choice, parse_numeric_range
__all__ = (
'APISelect',
'APISelectMultiple',
'BulkEditNullBooleanSelect',
'ColorSelect',
'ContentTypeSelect',
'DatePicker',
'DateTimePicker',
'NumericArrayField',
'SelectWithDisabled',
'SelectWithPK',
'SlugWidget',
'SmallTextarea',
'StaticSelect2',
'StaticSelect2Multiple',
'TimePicker',
)
class SmallTextarea(forms.Textarea):
pass
class SlugWidget(forms.TextInput):
template_name = 'widgets/sluginput.html'
class ColorSelect(forms.Select):
option_template_name = 'widgets/colorselect_option.html'
def __init__(self, *args, **kwargs):
kwargs['choices'] = add_blank_choice(ColorChoices)
super().__init__(*args, **kwargs)
self.attrs['class'] = 'netbox-select2-color-picker'
class BulkEditNullBooleanSelect(forms.NullBooleanSelect):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.choices = (
('1', '---------'),
('2', 'Yes'),
('3', 'No'),
)
self.attrs['class'] = 'netbox-select2-static'
class SelectWithDisabled(forms.Select):
option_template_name = 'widgets/selectwithdisabled_option.html'
class StaticSelect2(SelectWithDisabled):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['class'] = 'netbox-select2-static'
class StaticSelect2Multiple(StaticSelect2, forms.SelectMultiple):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['data-multiple'] = 1
class SelectWithPK(StaticSelect2):
option_template_name = 'widgets/select_option_with_pk.html'
class ContentTypeSelect(StaticSelect2):
option_template_name = 'widgets/select_contenttype.html'
class NumericArrayField(SimpleArrayField):
def to_python(self, value):
if not value:
return []
if isinstance(value, str):
value = ','.join([str(n) for n in parse_numeric_range(value)])
return super().to_python(value)
class APISelect(SelectWithDisabled):
def __init__(self, api_url=None, full=False, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['class'] = 'netbox-select2-api'
if api_url:
self.attrs['data-url'] = '/{}{}'.format(settings.BASE_PATH, api_url.lstrip('/'))
def add_query_param(self, name, value):
key = f'data-query-param-{name}'
values = json.loads(self.attrs.get(key, '[]'))
if type(value) in (list, tuple):
values.extend([str(v) for v in value])
else:
values.append(str(value))
self.attrs[key] = json.dumps(values)
class APISelectMultiple(APISelect, forms.SelectMultiple):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['data-multiple'] = 1
class DatePicker(forms.TextInput):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['class'] = 'date-picker'
self.attrs['placeholder'] = 'YYYY-MM-DD'
class DateTimePicker(forms.TextInput):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['class'] = 'datetime-picker'
self.attrs['placeholder'] = 'YYYY-MM-DD hh:mm:ss'
class TimePicker(forms.TextInput):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['class'] = 'time-picker'
self.attrs['placeholder'] = 'hh:mm:ss'
| true | true |
1c456cd394ed39fe157a450029b8b5c2dcc40bd1 | 598 | py | Python | src/modax/training/.ipynb_checkpoints/utils-checkpoint.py | GJBoth/modax | c7e1c128d4dd48b776f8ec4fa724c2e4b6e13c82 | [
"MIT"
] | 2 | 2021-12-10T14:36:37.000Z | 2022-02-10T11:47:03.000Z | src/modax/training/.ipynb_checkpoints/utils-checkpoint.py | GJBoth/modax | c7e1c128d4dd48b776f8ec4fa724c2e4b6e13c82 | [
"MIT"
] | null | null | null | src/modax/training/.ipynb_checkpoints/utils-checkpoint.py | GJBoth/modax | c7e1c128d4dd48b776f8ec4fa724c2e4b6e13c82 | [
"MIT"
] | 2 | 2020-12-22T14:49:13.000Z | 2021-04-09T08:52:08.000Z | from jax import jit, value_and_grad
from functools import partial
import jax.profiler
def create_update(loss_fn, loss_fn_args):
def step(opt, state, loss_fn, loss_fn_args):
grad_fn = value_and_grad(loss_fn, argnums=0, has_aux=True)
(loss, (updated_state, metrics, output)), grad = grad_fn(
opt.target, state, *loss_fn_args
)
opt = opt.apply_gradient(grad)
jax.profiler.save_device_memory_profile(f"memory.prof")
return (opt, updated_state), metrics, output
return jit(partial(step, loss_fn=loss_fn, loss_fn_args=loss_fn_args))
| 35.176471 | 73 | 0.704013 | from jax import jit, value_and_grad
from functools import partial
import jax.profiler
def create_update(loss_fn, loss_fn_args):
def step(opt, state, loss_fn, loss_fn_args):
grad_fn = value_and_grad(loss_fn, argnums=0, has_aux=True)
(loss, (updated_state, metrics, output)), grad = grad_fn(
opt.target, state, *loss_fn_args
)
opt = opt.apply_gradient(grad)
jax.profiler.save_device_memory_profile(f"memory.prof")
return (opt, updated_state), metrics, output
return jit(partial(step, loss_fn=loss_fn, loss_fn_args=loss_fn_args))
| true | true |
1c456f4aeae0fd4829f9f0818a661eb6433b1c8e | 399 | py | Python | tests/multidl/downloaders/test_local_file_downloader.py | gazay/chiliad | 771b3d0f7004f2a03094bad7bcc0103715a6c73f | [
"MIT"
] | 16 | 2018-02-12T23:47:26.000Z | 2021-07-23T12:43:05.000Z | tests/multidl/downloaders/test_local_file_downloader.py | gazay/chiliad | 771b3d0f7004f2a03094bad7bcc0103715a6c73f | [
"MIT"
] | 6 | 2017-10-14T15:36:52.000Z | 2022-02-13T17:17:17.000Z | tests/multidl/downloaders/test_local_file_downloader.py | gazay/chiliad | 771b3d0f7004f2a03094bad7bcc0103715a6c73f | [
"MIT"
] | 6 | 2018-05-11T00:16:00.000Z | 2021-05-03T02:02:55.000Z | # -*- coding: utf-8 -*-
import pytest
from multidl.downloaders.local_file_downloader import LocalFileDownloader
@pytest.mark.parametrize('url, expected', [
('file:///dir/file1.txt', 'file1.txt'),
('file:///file2.txt', 'file2.txt'),
])
def test_get_file_name(tmpdir, url, expected):
downloader = LocalFileDownloader(url, str(tmpdir))
assert downloader.get_file_name() == expected
| 26.6 | 73 | 0.704261 |
import pytest
from multidl.downloaders.local_file_downloader import LocalFileDownloader
@pytest.mark.parametrize('url, expected', [
('file:///dir/file1.txt', 'file1.txt'),
('file:///file2.txt', 'file2.txt'),
])
def test_get_file_name(tmpdir, url, expected):
downloader = LocalFileDownloader(url, str(tmpdir))
assert downloader.get_file_name() == expected
| true | true |
1c456f6f57902768c2181ef37ffa76b83cb79aad | 2,283 | py | Python | src/demo/worker_flags.py | Ravi-0809/question-generation | 9065a3b47293b8a69a0548af1f6bedd4a4aa7f9c | [
"MIT"
] | 212 | 2018-08-15T11:06:35.000Z | 2021-11-21T10:21:55.000Z | src/demo/worker_flags.py | Ravi-0809/question-generation | 9065a3b47293b8a69a0548af1f6bedd4a4aa7f9c | [
"MIT"
] | 44 | 2018-10-15T12:50:31.000Z | 2020-11-13T18:02:03.000Z | src/demo/worker_flags.py | Ravi-0809/question-generation | 9065a3b47293b8a69a0548af1f6bedd4a4aa7f9c | [
"MIT"
] | 51 | 2018-08-17T18:17:43.000Z | 2021-03-04T06:14:52.000Z | class FlagsObject(object):
pass
FLAGS = FlagsObject()
# config
FLAGS.testing = False
FLAGS.model_type = 'RL-S2S'
FLAGS.restore = False
FLAGS.restore_path = None
FLAGS.policy_gradient = False
FLAGS.glove_vocab = False
FLAGS.embedding_loss = False
FLAGS.latent_switch = False
FLAGS.combine_vocab = False
FLAGS.lr_schedule = False
FLAGS.eval_freq = 1000
FLAGS.num_epochs = 25
FLAGS.batch_size = 64
FLAGS.eval_batch_size = 16
FLAGS.data_path = '../data/'
FLAGS.log_dir = '../logs/'
FLAGS.model_dir = '../models/'
# hyperparams
FLAGS.filter_window_size_before = 1
FLAGS.filter_window_size_after = 1
FLAGS.filter_max_tokens = 100
FLAGS.max_context_len = 203
FLAGS.max_copy_size = 203
FLAGS.embedding_size = 200
FLAGS.context_encoder_units = 768
FLAGS.answer_encoder_units = 768
FLAGS.full_context_encoding = True
FLAGS.decoder_units = 768
FLAGS.switch_units = 128
FLAGS.ctxt_encoder_depth = 1
FLAGS.ans_encoder_depth = 1
FLAGS.vocab_size = 2000
FLAGS.learning_rate = 2e-4
FLAGS.opt_type = "adam"
FLAGS.entropy_weight = 0.01
FLAGS.suppression_weight = 0.01
FLAGS.dropout_rate = 0.3
FLAGS.context_as_set = True
FLAGS.copy_priority = False
FLAGS.smart_copy = True
FLAGS.separate_copy_mech = False
FLAGS.begin_ans_feat = False
FLAGS.maxout_pointer = False
FLAGS.loc_embeddings = False
FLAGS.out_vocab_cpu = False
FLAGS.advanced_condition_encoding = False
FLAGS.disable_copy = False
FLAGS.disable_shortlist = False
FLAGS.length_penalty = 0.05
FLAGS.pg_burnin = 200
FLAGS.pg_dropout = False
FLAGS.lm_weight = 0.25
FLAGS.qa_weight = 0.5
FLAGS.bleu_weight = 0.0
FLAGS.pg_ml_weight = 1
FLAGS.disc_weight = 0.0
FLAGS.disc_train = False
# QA - MPCM hparams
FLAGS.qa_vocab_size = 20000
FLAGS.qa_encoder_units = 100
FLAGS.qa_match_units = 100
FLAGS.qa_num_epochs = 20
FLAGS.qa_batch_size = 32
FLAGS.qa_learning_rate = 1e-4
# LM hparams
FLAGS.lm_vocab_size = 20000
FLAGS.lm_units = 384
FLAGS.lm_num_epochs = 25
FLAGS.lm_dropout = 0.3
# eval params
FLAGS.beam_width = 16
# FLAGS.num_dev_samples = 4691
FLAGS.num_dev_samples = 10570
# FLAGS.num_eval_samples = 5609
FLAGS.num_eval_samples = 11877
FLAGS.eval_on_dev = True
FLAGS.eval_on_test = False
FLAGS.eval_model_id = ""
FLAGS.eval_metrics = True
FLAGS.diverse_bs = False
FLAGS.beam_groups = 1
FLAGS.beam_diversity = 0.5
| 19.852174 | 41 | 0.782742 | class FlagsObject(object):
pass
FLAGS = FlagsObject()
FLAGS.testing = False
FLAGS.model_type = 'RL-S2S'
FLAGS.restore = False
FLAGS.restore_path = None
FLAGS.policy_gradient = False
FLAGS.glove_vocab = False
FLAGS.embedding_loss = False
FLAGS.latent_switch = False
FLAGS.combine_vocab = False
FLAGS.lr_schedule = False
FLAGS.eval_freq = 1000
FLAGS.num_epochs = 25
FLAGS.batch_size = 64
FLAGS.eval_batch_size = 16
FLAGS.data_path = '../data/'
FLAGS.log_dir = '../logs/'
FLAGS.model_dir = '../models/'
FLAGS.filter_window_size_before = 1
FLAGS.filter_window_size_after = 1
FLAGS.filter_max_tokens = 100
FLAGS.max_context_len = 203
FLAGS.max_copy_size = 203
FLAGS.embedding_size = 200
FLAGS.context_encoder_units = 768
FLAGS.answer_encoder_units = 768
FLAGS.full_context_encoding = True
FLAGS.decoder_units = 768
FLAGS.switch_units = 128
FLAGS.ctxt_encoder_depth = 1
FLAGS.ans_encoder_depth = 1
FLAGS.vocab_size = 2000
FLAGS.learning_rate = 2e-4
FLAGS.opt_type = "adam"
FLAGS.entropy_weight = 0.01
FLAGS.suppression_weight = 0.01
FLAGS.dropout_rate = 0.3
FLAGS.context_as_set = True
FLAGS.copy_priority = False
FLAGS.smart_copy = True
FLAGS.separate_copy_mech = False
FLAGS.begin_ans_feat = False
FLAGS.maxout_pointer = False
FLAGS.loc_embeddings = False
FLAGS.out_vocab_cpu = False
FLAGS.advanced_condition_encoding = False
FLAGS.disable_copy = False
FLAGS.disable_shortlist = False
FLAGS.length_penalty = 0.05
FLAGS.pg_burnin = 200
FLAGS.pg_dropout = False
FLAGS.lm_weight = 0.25
FLAGS.qa_weight = 0.5
FLAGS.bleu_weight = 0.0
FLAGS.pg_ml_weight = 1
FLAGS.disc_weight = 0.0
FLAGS.disc_train = False
FLAGS.qa_vocab_size = 20000
FLAGS.qa_encoder_units = 100
FLAGS.qa_match_units = 100
FLAGS.qa_num_epochs = 20
FLAGS.qa_batch_size = 32
FLAGS.qa_learning_rate = 1e-4
FLAGS.lm_vocab_size = 20000
FLAGS.lm_units = 384
FLAGS.lm_num_epochs = 25
FLAGS.lm_dropout = 0.3
FLAGS.beam_width = 16
FLAGS.num_dev_samples = 10570
FLAGS.num_eval_samples = 11877
FLAGS.eval_on_dev = True
FLAGS.eval_on_test = False
FLAGS.eval_model_id = ""
FLAGS.eval_metrics = True
FLAGS.diverse_bs = False
FLAGS.beam_groups = 1
FLAGS.beam_diversity = 0.5
| true | true |
1c4572039e05249fc64e7f7a9e3e39836024d635 | 9,745 | py | Python | theseus/geometry/so2.py | jeffin07/theseus | 3498bbddf9cca740c2703d0c1aa3a78a7264cb15 | [
"MIT"
] | 236 | 2021-12-03T15:59:29.000Z | 2022-03-30T23:18:33.000Z | theseus/geometry/so2.py | jeffin07/theseus | 3498bbddf9cca740c2703d0c1aa3a78a7264cb15 | [
"MIT"
] | 85 | 2021-12-06T07:04:11.000Z | 2022-03-31T20:29:26.000Z | theseus/geometry/so2.py | jeffin07/theseus | 3498bbddf9cca740c2703d0c1aa3a78a7264cb15 | [
"MIT"
] | 12 | 2021-12-03T22:02:44.000Z | 2022-03-20T14:58:27.000Z | # Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import List, Optional, Tuple, Union, cast
import torch
import theseus.constants
from .lie_group import LieGroup
from .point_types import Point2
class SO2(LieGroup):
def __init__(
self,
theta: Optional[torch.Tensor] = None,
data: Optional[torch.Tensor] = None,
name: Optional[str] = None,
dtype: Optional[torch.dtype] = None,
):
if theta is not None and data is not None:
raise ValueError("Please provide only one of theta or data.")
if theta is not None:
dtype = theta.dtype
super().__init__(data=data, name=name, dtype=dtype)
if theta is not None:
if theta.ndim == 1:
theta = theta.unsqueeze(1)
if theta.ndim != 2 or theta.shape[1] != 1:
raise ValueError(
"Argument theta must be have ndim = 1, or ndim=2 and shape[1] = 1."
)
self.update_from_angle(theta)
@staticmethod
def rand(
*size: int,
generator: Optional[torch.Generator] = None,
dtype: Optional[torch.dtype] = None,
device: Optional[torch.device] = None,
requires_grad: bool = False,
) -> "SO2":
if len(size) != 1:
raise ValueError("The size should be 1D.")
return SO2.exp_map(
2
* theseus.constants.PI
* torch.rand(
size[0],
1,
generator=generator,
dtype=dtype,
device=device,
requires_grad=requires_grad,
)
- theseus.constants.PI
)
@staticmethod
def randn(
*size: int,
generator: Optional[torch.Generator] = None,
dtype: Optional[torch.dtype] = None,
device: Optional[torch.device] = None,
requires_grad: bool = False,
) -> "SO2":
if len(size) != 1:
raise ValueError("The size should be 1D.")
return SO2.exp_map(
theseus.constants.PI
* torch.randn(
size[0],
1,
generator=generator,
dtype=dtype,
device=device,
requires_grad=requires_grad,
)
)
@staticmethod
def _init_data() -> torch.Tensor: # type: ignore
return torch.tensor([1.0, 0.0]).view(1, 2)
def update_from_angle(self, theta: torch.Tensor):
self.update(torch.cat([theta.cos(), theta.sin()], dim=1))
def dof(self) -> int:
return 1
def __repr__(self) -> str:
return f"SO2(data={self.data}, name={self.name})"
def __str__(self) -> str:
with torch.no_grad():
theta = torch.atan2(self[:, 1:], self[:, 0:1])
return f"SO2(theta={theta}, name={self.name})"
def theta(self) -> torch.Tensor:
return self.log_map()
def _adjoint_impl(self) -> torch.Tensor:
return torch.ones(self.shape[0], 1, 1, device=self.device, dtype=self.dtype)
def _project_impl(
self, euclidean_grad: torch.Tensor, is_sparse: bool = False
) -> torch.Tensor:
self._project_check(euclidean_grad, is_sparse)
temp = torch.stack((-self[:, 1], self[:, 0]), dim=1)
if is_sparse:
return torch.einsum("i...k,i...k->i...", euclidean_grad, temp).unsqueeze(-1)
else:
return torch.einsum("...k,...k", euclidean_grad, temp).unsqueeze(-1)
@staticmethod
def exp_map(
tangent_vector: torch.Tensor, jacobians: Optional[List[torch.Tensor]] = None
) -> "SO2":
so2 = SO2(dtype=tangent_vector.dtype)
so2.update_from_angle(tangent_vector)
if jacobians is not None:
SO2._check_jacobians_list(jacobians)
jacobians.append(
torch.ones(
tangent_vector.shape[0],
1,
1,
dtype=tangent_vector.dtype,
device=tangent_vector.device,
)
)
return so2
def _log_map_impl(
self, jacobians: Optional[List[torch.Tensor]] = None
) -> torch.Tensor:
if jacobians is not None:
SO2._check_jacobians_list(jacobians)
jacobians.append(
torch.ones(
self.shape[0],
1,
1,
dtype=self.dtype,
device=self.device,
)
)
cosine, sine = self.to_cos_sin()
return torch.atan2(sine, cosine).unsqueeze(1)
def _compose_impl(self, so2_2: LieGroup) -> "SO2":
so2_2 = cast(SO2, so2_2)
cos_1, sin_1 = self.to_cos_sin()
cos_2, sin_2 = so2_2.to_cos_sin()
new_cos = cos_1 * cos_2 - sin_1 * sin_2
new_sin = sin_1 * cos_2 + cos_1 * sin_2
return SO2(data=torch.stack([new_cos, new_sin], dim=1))
def _inverse_impl(self, get_jacobian: bool = False) -> "SO2":
cosine, sine = self.to_cos_sin()
return SO2(data=torch.stack([cosine, -sine], dim=1))
def _rotate_shape_check(self, point: Union[Point2, torch.Tensor]):
err_msg = (
f"SO2 can only transform vectors of shape [{self.shape[0]}, 2] or [1, 2], "
f"but the input has shape {point.shape}."
)
if isinstance(point, torch.Tensor):
if not point.ndim == 2 or point.shape[1] != 2:
raise ValueError(err_msg)
elif point.dof() != 2:
raise ValueError(err_msg)
if (
point.shape[0] != self.shape[0]
and point.shape[0] != 1
and self.shape[0] != 1
):
raise ValueError(
"Input point batch size is not broadcastable with group batch size."
)
@staticmethod
def _rotate_from_cos_sin(
point: Union[Point2, torch.Tensor],
cosine: torch.Tensor,
sine: torch.Tensor,
) -> Point2:
batch_size = max(point.shape[0], cosine.shape[0])
if isinstance(point, torch.Tensor):
if point.ndim != 2 or point.shape[1] != 2:
raise ValueError(
f"Point tensor must have shape batch_size x 2, "
f"but received {point.shape}."
)
point_data = point
else:
point_data = point.data
px, py = point_data[:, 0], point_data[:, 1]
new_point_data = torch.empty(
batch_size, 2, device=cosine.device, dtype=cosine.dtype
)
new_point_data[:, 0] = cosine * px - sine * py
new_point_data[:, 1] = sine * px + cosine * py
return Point2(data=new_point_data)
def rotate(
self,
point: Union[Point2, torch.Tensor],
jacobians: Optional[List[torch.Tensor]] = None,
) -> Point2:
self._rotate_shape_check(point)
cosine, sine = self.to_cos_sin()
ret = SO2._rotate_from_cos_sin(point, cosine, sine)
if jacobians is not None:
self._check_jacobians_list(jacobians)
Jrot = torch.stack([-ret.y(), ret.x()], dim=1).view(-1, 2, 1)
Jpnt = self.to_matrix().expand(ret.shape[0], -1, -1)
jacobians.extend([Jrot, Jpnt])
return ret
def unrotate(
self,
point: Union[Point2, torch.Tensor],
jacobians: Optional[List[torch.Tensor]] = None,
) -> Point2:
self._rotate_shape_check(point)
cosine, sine = self.to_cos_sin()
ret = SO2._rotate_from_cos_sin(point, cosine, -sine)
if jacobians is not None:
self._check_jacobians_list(jacobians)
Jrot = torch.stack([ret.y(), -ret.x()], dim=1).view(-1, 2, 1)
Jpnt = self.to_matrix().transpose(2, 1).expand(ret.shape[0], -1, -1)
jacobians.extend([Jrot, Jpnt])
return ret
def to_cos_sin(self) -> Tuple[torch.Tensor, torch.Tensor]:
return self[:, 0], self[:, 1]
def to_matrix(self) -> torch.Tensor:
matrix = torch.empty(self.shape[0], 2, 2).to(
device=self.device, dtype=self.dtype
)
cosine, sine = self.to_cos_sin()
matrix[:, 0, 0] = cosine
matrix[:, 0, 1] = -sine
matrix[:, 1, 0] = sine
matrix[:, 1, 1] = cosine
return matrix
@staticmethod
def hat(tangent_vector: torch.Tensor) -> torch.Tensor:
matrix = torch.zeros(tangent_vector.shape[0], 2, 2).to(
dtype=tangent_vector.dtype,
device=tangent_vector.device,
)
matrix[:, 0, 1] = -tangent_vector.view(-1)
matrix[:, 1, 0] = tangent_vector.view(-1)
return matrix
@staticmethod
def vee(matrix: torch.Tensor) -> torch.Tensor:
_check = matrix.ndim == 3 and matrix.shape[1:] == (2, 2)
_check &= matrix[:, 0, 0].abs().max().item() < theseus.constants.EPS
_check &= matrix[:, 1, 1].abs().max().item() < theseus.constants.EPS
_check &= torch.allclose(matrix[:, 0, 1], -matrix[:, 1, 0])
if not _check:
raise ValueError("Invalid hat matrix for SO2.")
return matrix[:, 1, 0].clone().view(-1, 1)
def _copy_impl(self, new_name: Optional[str] = None) -> "SO2":
return SO2(data=self.data.clone(), name=new_name)
# only added to avoid casting downstream
def copy(self, new_name: Optional[str] = None) -> "SO2":
return cast(SO2, super().copy(new_name=new_name))
rand_so2 = SO2.rand
randn_so2 = SO2.randn
| 33.719723 | 88 | 0.548794 |
from typing import List, Optional, Tuple, Union, cast
import torch
import theseus.constants
from .lie_group import LieGroup
from .point_types import Point2
class SO2(LieGroup):
def __init__(
self,
theta: Optional[torch.Tensor] = None,
data: Optional[torch.Tensor] = None,
name: Optional[str] = None,
dtype: Optional[torch.dtype] = None,
):
if theta is not None and data is not None:
raise ValueError("Please provide only one of theta or data.")
if theta is not None:
dtype = theta.dtype
super().__init__(data=data, name=name, dtype=dtype)
if theta is not None:
if theta.ndim == 1:
theta = theta.unsqueeze(1)
if theta.ndim != 2 or theta.shape[1] != 1:
raise ValueError(
"Argument theta must be have ndim = 1, or ndim=2 and shape[1] = 1."
)
self.update_from_angle(theta)
@staticmethod
def rand(
*size: int,
generator: Optional[torch.Generator] = None,
dtype: Optional[torch.dtype] = None,
device: Optional[torch.device] = None,
requires_grad: bool = False,
) -> "SO2":
if len(size) != 1:
raise ValueError("The size should be 1D.")
return SO2.exp_map(
2
* theseus.constants.PI
* torch.rand(
size[0],
1,
generator=generator,
dtype=dtype,
device=device,
requires_grad=requires_grad,
)
- theseus.constants.PI
)
@staticmethod
def randn(
*size: int,
generator: Optional[torch.Generator] = None,
dtype: Optional[torch.dtype] = None,
device: Optional[torch.device] = None,
requires_grad: bool = False,
) -> "SO2":
if len(size) != 1:
raise ValueError("The size should be 1D.")
return SO2.exp_map(
theseus.constants.PI
* torch.randn(
size[0],
1,
generator=generator,
dtype=dtype,
device=device,
requires_grad=requires_grad,
)
)
@staticmethod
def _init_data() -> torch.Tensor:
return torch.tensor([1.0, 0.0]).view(1, 2)
def update_from_angle(self, theta: torch.Tensor):
self.update(torch.cat([theta.cos(), theta.sin()], dim=1))
def dof(self) -> int:
return 1
def __repr__(self) -> str:
return f"SO2(data={self.data}, name={self.name})"
def __str__(self) -> str:
with torch.no_grad():
theta = torch.atan2(self[:, 1:], self[:, 0:1])
return f"SO2(theta={theta}, name={self.name})"
def theta(self) -> torch.Tensor:
return self.log_map()
def _adjoint_impl(self) -> torch.Tensor:
return torch.ones(self.shape[0], 1, 1, device=self.device, dtype=self.dtype)
def _project_impl(
self, euclidean_grad: torch.Tensor, is_sparse: bool = False
) -> torch.Tensor:
self._project_check(euclidean_grad, is_sparse)
temp = torch.stack((-self[:, 1], self[:, 0]), dim=1)
if is_sparse:
return torch.einsum("i...k,i...k->i...", euclidean_grad, temp).unsqueeze(-1)
else:
return torch.einsum("...k,...k", euclidean_grad, temp).unsqueeze(-1)
@staticmethod
def exp_map(
tangent_vector: torch.Tensor, jacobians: Optional[List[torch.Tensor]] = None
) -> "SO2":
so2 = SO2(dtype=tangent_vector.dtype)
so2.update_from_angle(tangent_vector)
if jacobians is not None:
SO2._check_jacobians_list(jacobians)
jacobians.append(
torch.ones(
tangent_vector.shape[0],
1,
1,
dtype=tangent_vector.dtype,
device=tangent_vector.device,
)
)
return so2
def _log_map_impl(
self, jacobians: Optional[List[torch.Tensor]] = None
) -> torch.Tensor:
if jacobians is not None:
SO2._check_jacobians_list(jacobians)
jacobians.append(
torch.ones(
self.shape[0],
1,
1,
dtype=self.dtype,
device=self.device,
)
)
cosine, sine = self.to_cos_sin()
return torch.atan2(sine, cosine).unsqueeze(1)
def _compose_impl(self, so2_2: LieGroup) -> "SO2":
so2_2 = cast(SO2, so2_2)
cos_1, sin_1 = self.to_cos_sin()
cos_2, sin_2 = so2_2.to_cos_sin()
new_cos = cos_1 * cos_2 - sin_1 * sin_2
new_sin = sin_1 * cos_2 + cos_1 * sin_2
return SO2(data=torch.stack([new_cos, new_sin], dim=1))
def _inverse_impl(self, get_jacobian: bool = False) -> "SO2":
cosine, sine = self.to_cos_sin()
return SO2(data=torch.stack([cosine, -sine], dim=1))
def _rotate_shape_check(self, point: Union[Point2, torch.Tensor]):
err_msg = (
f"SO2 can only transform vectors of shape [{self.shape[0]}, 2] or [1, 2], "
f"but the input has shape {point.shape}."
)
if isinstance(point, torch.Tensor):
if not point.ndim == 2 or point.shape[1] != 2:
raise ValueError(err_msg)
elif point.dof() != 2:
raise ValueError(err_msg)
if (
point.shape[0] != self.shape[0]
and point.shape[0] != 1
and self.shape[0] != 1
):
raise ValueError(
"Input point batch size is not broadcastable with group batch size."
)
@staticmethod
def _rotate_from_cos_sin(
point: Union[Point2, torch.Tensor],
cosine: torch.Tensor,
sine: torch.Tensor,
) -> Point2:
batch_size = max(point.shape[0], cosine.shape[0])
if isinstance(point, torch.Tensor):
if point.ndim != 2 or point.shape[1] != 2:
raise ValueError(
f"Point tensor must have shape batch_size x 2, "
f"but received {point.shape}."
)
point_data = point
else:
point_data = point.data
px, py = point_data[:, 0], point_data[:, 1]
new_point_data = torch.empty(
batch_size, 2, device=cosine.device, dtype=cosine.dtype
)
new_point_data[:, 0] = cosine * px - sine * py
new_point_data[:, 1] = sine * px + cosine * py
return Point2(data=new_point_data)
def rotate(
self,
point: Union[Point2, torch.Tensor],
jacobians: Optional[List[torch.Tensor]] = None,
) -> Point2:
self._rotate_shape_check(point)
cosine, sine = self.to_cos_sin()
ret = SO2._rotate_from_cos_sin(point, cosine, sine)
if jacobians is not None:
self._check_jacobians_list(jacobians)
Jrot = torch.stack([-ret.y(), ret.x()], dim=1).view(-1, 2, 1)
Jpnt = self.to_matrix().expand(ret.shape[0], -1, -1)
jacobians.extend([Jrot, Jpnt])
return ret
def unrotate(
self,
point: Union[Point2, torch.Tensor],
jacobians: Optional[List[torch.Tensor]] = None,
) -> Point2:
self._rotate_shape_check(point)
cosine, sine = self.to_cos_sin()
ret = SO2._rotate_from_cos_sin(point, cosine, -sine)
if jacobians is not None:
self._check_jacobians_list(jacobians)
Jrot = torch.stack([ret.y(), -ret.x()], dim=1).view(-1, 2, 1)
Jpnt = self.to_matrix().transpose(2, 1).expand(ret.shape[0], -1, -1)
jacobians.extend([Jrot, Jpnt])
return ret
def to_cos_sin(self) -> Tuple[torch.Tensor, torch.Tensor]:
return self[:, 0], self[:, 1]
def to_matrix(self) -> torch.Tensor:
matrix = torch.empty(self.shape[0], 2, 2).to(
device=self.device, dtype=self.dtype
)
cosine, sine = self.to_cos_sin()
matrix[:, 0, 0] = cosine
matrix[:, 0, 1] = -sine
matrix[:, 1, 0] = sine
matrix[:, 1, 1] = cosine
return matrix
@staticmethod
def hat(tangent_vector: torch.Tensor) -> torch.Tensor:
matrix = torch.zeros(tangent_vector.shape[0], 2, 2).to(
dtype=tangent_vector.dtype,
device=tangent_vector.device,
)
matrix[:, 0, 1] = -tangent_vector.view(-1)
matrix[:, 1, 0] = tangent_vector.view(-1)
return matrix
@staticmethod
def vee(matrix: torch.Tensor) -> torch.Tensor:
_check = matrix.ndim == 3 and matrix.shape[1:] == (2, 2)
_check &= matrix[:, 0, 0].abs().max().item() < theseus.constants.EPS
_check &= matrix[:, 1, 1].abs().max().item() < theseus.constants.EPS
_check &= torch.allclose(matrix[:, 0, 1], -matrix[:, 1, 0])
if not _check:
raise ValueError("Invalid hat matrix for SO2.")
return matrix[:, 1, 0].clone().view(-1, 1)
def _copy_impl(self, new_name: Optional[str] = None) -> "SO2":
return SO2(data=self.data.clone(), name=new_name)
def copy(self, new_name: Optional[str] = None) -> "SO2":
return cast(SO2, super().copy(new_name=new_name))
rand_so2 = SO2.rand
randn_so2 = SO2.randn
| true | true |
1c457240e1e5b43e46789912d1b54ae8e79edea8 | 1,095 | py | Python | clients/python-blueplanet/generated/app/openapi_server/test/test_pal_park_area_controller.py | cliffano/pokeapi-clients | 92af296c68c3e94afac52642ae22057faaf071ee | [
"MIT"
] | null | null | null | clients/python-blueplanet/generated/app/openapi_server/test/test_pal_park_area_controller.py | cliffano/pokeapi-clients | 92af296c68c3e94afac52642ae22057faaf071ee | [
"MIT"
] | null | null | null | clients/python-blueplanet/generated/app/openapi_server/test/test_pal_park_area_controller.py | cliffano/pokeapi-clients | 92af296c68c3e94afac52642ae22057faaf071ee | [
"MIT"
] | null | null | null | # coding: utf-8
from __future__ import absolute_import
from flask import json
from six import BytesIO
from openapi_server.test import BaseTestCase
class TestPalParkAreaController(BaseTestCase):
"""PalParkAreaController integration test stubs"""
def test_pal_park_area_list(self):
"""Test case for pal_park_area_list
"""
query_string = [('limit', 56),
('offset', 56)]
response = self.client.open(
'/api/v2/pal-park-area/',
method='GET',
query_string=query_string)
self.assert200(response,
'Response body is : ' + response.data.decode('utf-8'))
def test_pal_park_area_read(self):
"""Test case for pal_park_area_read
"""
response = self.client.open(
'/api/v2/pal-park-area/{id}'.format(id=56),
method='GET')
self.assert200(response,
'Response body is : ' + response.data.decode('utf-8'))
if __name__ == '__main__':
import unittest
unittest.main()
| 25.465116 | 77 | 0.584475 |
from __future__ import absolute_import
from flask import json
from six import BytesIO
from openapi_server.test import BaseTestCase
class TestPalParkAreaController(BaseTestCase):
def test_pal_park_area_list(self):
query_string = [('limit', 56),
('offset', 56)]
response = self.client.open(
'/api/v2/pal-park-area/',
method='GET',
query_string=query_string)
self.assert200(response,
'Response body is : ' + response.data.decode('utf-8'))
def test_pal_park_area_read(self):
response = self.client.open(
'/api/v2/pal-park-area/{id}'.format(id=56),
method='GET')
self.assert200(response,
'Response body is : ' + response.data.decode('utf-8'))
if __name__ == '__main__':
import unittest
unittest.main()
| true | true |
1c4572b3f0877f03e9b3d4dda6b4a194a5df981e | 8,142 | py | Python | src/onelogin/saml2/logout_response.py | lmegviar/python-saml | 1d15da53470414f4c35e97730be6a2836d9244c5 | [
"BSD-3-Clause"
] | null | null | null | src/onelogin/saml2/logout_response.py | lmegviar/python-saml | 1d15da53470414f4c35e97730be6a2836d9244c5 | [
"BSD-3-Clause"
] | null | null | null | src/onelogin/saml2/logout_response.py | lmegviar/python-saml | 1d15da53470414f4c35e97730be6a2836d9244c5 | [
"BSD-3-Clause"
] | 1 | 2019-06-26T18:50:14.000Z | 2019-06-26T18:50:14.000Z | # -*- coding: utf-8 -*-
""" OneLogin_Saml2_Logout_Response class
Copyright (c) 2014, OneLogin, Inc.
All rights reserved.
Logout Response class of OneLogin's Python Toolkit.
"""
from base64 import b64decode
from defusedxml.lxml import fromstring
from urllib import quote_plus
from xml.dom.minidom import Document
from defusedxml.minidom import parseString
from onelogin.saml2.constants import OneLogin_Saml2_Constants
from onelogin.saml2.utils import OneLogin_Saml2_Utils
class OneLogin_Saml2_Logout_Response(object):
"""
This class handles a Logout Response. It Builds or parses a Logout Response object
and validates it.
"""
def __init__(self, settings, response=None):
"""
Constructs a Logout Response object (Initialize params from settings
and if provided load the Logout Response.
Arguments are:
* (OneLogin_Saml2_Settings) settings. Setting data
* (string) response. An UUEncoded SAML Logout
response from the IdP.
"""
self.__settings = settings
self.__error = None
if response is not None:
self.__logout_response = OneLogin_Saml2_Utils.decode_base64_and_inflate(response)
self.document = parseString(self.__logout_response)
def get_issuer(self):
"""
Gets the Issuer of the Logout Response Message
:return: The Issuer
:rtype: string
"""
issuer = None
issuer_nodes = self.__query('/samlp:LogoutResponse/saml:Issuer')
if len(issuer_nodes) == 1:
issuer = issuer_nodes[0].text
return issuer
def get_status(self):
"""
Gets the Status
:return: The Status
:rtype: string
"""
entries = self.__query('/samlp:LogoutResponse/samlp:Status/samlp:StatusCode')
if len(entries) == 0:
return None
status = entries[0].attrib['Value']
return status
def is_valid(self, request_data, request_id=None):
"""
Determines if the SAML LogoutResponse is valid
:param request_id: The ID of the LogoutRequest sent by this SP to the IdP
:type request_id: string
:return: Returns if the SAML LogoutResponse is or not valid
:rtype: boolean
"""
self.__error = None
try:
idp_data = self.__settings.get_idp_data()
idp_entity_id = idp_data['entityId']
get_data = request_data['get_data']
if self.__settings.is_strict():
res = OneLogin_Saml2_Utils.validate_xml(self.document, 'saml-schema-protocol-2.0.xsd', self.__settings.is_debug_active())
if not isinstance(res, Document):
raise Exception('Invalid SAML Logout Request. Not match the saml-schema-protocol-2.0.xsd')
security = self.__settings.get_security_data()
# Check if the InResponseTo of the Logout Response matchs the ID of the Logout Request (requestId) if provided
if request_id is not None and self.document.documentElement.hasAttribute('InResponseTo'):
in_response_to = self.document.documentElement.getAttribute('InResponseTo')
if request_id != in_response_to:
raise Exception('The InResponseTo of the Logout Response: %s, does not match the ID of the Logout request sent by the SP: %s' % (in_response_to, request_id))
# Check issuer
issuer = self.get_issuer()
if issuer is not None and issuer != idp_entity_id:
raise Exception('Invalid issuer in the Logout Request')
current_url = OneLogin_Saml2_Utils.get_self_url_no_query(request_data)
# Check destination
if self.document.documentElement.hasAttribute('Destination'):
destination = self.document.documentElement.getAttribute('Destination')
if destination != '':
if current_url not in destination:
raise Exception('The LogoutRequest was received at $currentURL instead of $destination')
if security['wantMessagesSigned']:
if 'Signature' not in get_data:
raise Exception('The Message of the Logout Response is not signed and the SP require it')
if 'Signature' in get_data:
if 'SigAlg' not in get_data:
sign_alg = OneLogin_Saml2_Constants.RSA_SHA1
else:
sign_alg = get_data['SigAlg']
if sign_alg != OneLogin_Saml2_Constants.RSA_SHA1:
raise Exception('Invalid signAlg in the recieved Logout Response')
signed_query = 'SAMLResponse=%s' % quote_plus(get_data['SAMLResponse'])
if 'RelayState' in get_data:
signed_query = '%s&RelayState=%s' % (signed_query, quote_plus(get_data['RelayState']))
signed_query = '%s&SigAlg=%s' % (signed_query, quote_plus(sign_alg))
if 'x509cert' not in idp_data or idp_data['x509cert'] is None:
raise Exception('In order to validate the sign on the Logout Response, the x509cert of the IdP is required')
cert = idp_data['x509cert']
if not OneLogin_Saml2_Utils.validate_binary_sign(signed_query, b64decode(get_data['Signature']), cert):
raise Exception('Signature validation failed. Logout Response rejected')
return True
# pylint: disable=R0801
except Exception as err:
self.__error = err.__str__()
debug = self.__settings.is_debug_active()
if debug:
print err.__str__()
return False
def __query(self, query):
"""
Extracts a node from the DOMDocument (Logout Response Menssage)
:param query: Xpath Expresion
:type query: string
:return: The queried node
:rtype: DOMNodeList
"""
# Switch to lxml for querying
xml = self.document.toxml()
return OneLogin_Saml2_Utils.query(fromstring(xml), query)
def build(self, in_response_to):
"""
Creates a Logout Response object.
:param in_response_to: InResponseTo value for the Logout Response.
:type in_response_to: string
"""
sp_data = self.__settings.get_sp_data()
idp_data = self.__settings.get_idp_data()
uid = OneLogin_Saml2_Utils.generate_unique_id()
issue_instant = OneLogin_Saml2_Utils.parse_time_to_SAML(OneLogin_Saml2_Utils.now())
logout_response = """<samlp:LogoutResponse xmlns:samlp="urn:oasis:names:tc:SAML:2.0:protocol"
xmlns:saml="urn:oasis:names:tc:SAML:2.0:assertion"
ID="%(id)s"
Version="2.0"
IssueInstant="%(issue_instant)s"
Destination="%(destination)s"
InResponseTo="%(in_response_to)s"
>
<saml:Issuer>%(entity_id)s</saml:Issuer>
<samlp:Status>
<samlp:StatusCode Value="urn:oasis:names:tc:SAML:2.0:status:Success" />
</samlp:Status>
</samlp:LogoutResponse>""" % \
{
'id': uid,
'issue_instant': issue_instant,
'destination': idp_data['singleLogoutService']['url'],
'in_response_to': in_response_to,
'entity_id': sp_data['entityId'],
}
self.__logout_response = logout_response
def get_response(self):
"""
Returns a Logout Response object.
:return: Logout Response deflated and base64 encoded
:rtype: string
"""
return OneLogin_Saml2_Utils.deflate_and_base64_encode(self.__logout_response)
def get_error(self):
"""
After execute a validation process, if fails this method returns the cause
"""
return self.__error
| 39.333333 | 181 | 0.607836 |
""" OneLogin_Saml2_Logout_Response class
Copyright (c) 2014, OneLogin, Inc.
All rights reserved.
Logout Response class of OneLogin's Python Toolkit.
"""
from base64 import b64decode
from defusedxml.lxml import fromstring
from urllib import quote_plus
from xml.dom.minidom import Document
from defusedxml.minidom import parseString
from onelogin.saml2.constants import OneLogin_Saml2_Constants
from onelogin.saml2.utils import OneLogin_Saml2_Utils
class OneLogin_Saml2_Logout_Response(object):
"""
This class handles a Logout Response. It Builds or parses a Logout Response object
and validates it.
"""
def __init__(self, settings, response=None):
"""
Constructs a Logout Response object (Initialize params from settings
and if provided load the Logout Response.
Arguments are:
* (OneLogin_Saml2_Settings) settings. Setting data
* (string) response. An UUEncoded SAML Logout
response from the IdP.
"""
self.__settings = settings
self.__error = None
if response is not None:
self.__logout_response = OneLogin_Saml2_Utils.decode_base64_and_inflate(response)
self.document = parseString(self.__logout_response)
def get_issuer(self):
"""
Gets the Issuer of the Logout Response Message
:return: The Issuer
:rtype: string
"""
issuer = None
issuer_nodes = self.__query('/samlp:LogoutResponse/saml:Issuer')
if len(issuer_nodes) == 1:
issuer = issuer_nodes[0].text
return issuer
def get_status(self):
"""
Gets the Status
:return: The Status
:rtype: string
"""
entries = self.__query('/samlp:LogoutResponse/samlp:Status/samlp:StatusCode')
if len(entries) == 0:
return None
status = entries[0].attrib['Value']
return status
def is_valid(self, request_data, request_id=None):
"""
Determines if the SAML LogoutResponse is valid
:param request_id: The ID of the LogoutRequest sent by this SP to the IdP
:type request_id: string
:return: Returns if the SAML LogoutResponse is or not valid
:rtype: boolean
"""
self.__error = None
try:
idp_data = self.__settings.get_idp_data()
idp_entity_id = idp_data['entityId']
get_data = request_data['get_data']
if self.__settings.is_strict():
res = OneLogin_Saml2_Utils.validate_xml(self.document, 'saml-schema-protocol-2.0.xsd', self.__settings.is_debug_active())
if not isinstance(res, Document):
raise Exception('Invalid SAML Logout Request. Not match the saml-schema-protocol-2.0.xsd')
security = self.__settings.get_security_data()
# Check if the InResponseTo of the Logout Response matchs the ID of the Logout Request (requestId) if provided
if request_id is not None and self.document.documentElement.hasAttribute('InResponseTo'):
in_response_to = self.document.documentElement.getAttribute('InResponseTo')
if request_id != in_response_to:
raise Exception('The InResponseTo of the Logout Response: %s, does not match the ID of the Logout request sent by the SP: %s' % (in_response_to, request_id))
# Check issuer
issuer = self.get_issuer()
if issuer is not None and issuer != idp_entity_id:
raise Exception('Invalid issuer in the Logout Request')
current_url = OneLogin_Saml2_Utils.get_self_url_no_query(request_data)
# Check destination
if self.document.documentElement.hasAttribute('Destination'):
destination = self.document.documentElement.getAttribute('Destination')
if destination != '':
if current_url not in destination:
raise Exception('The LogoutRequest was received at $currentURL instead of $destination')
if security['wantMessagesSigned']:
if 'Signature' not in get_data:
raise Exception('The Message of the Logout Response is not signed and the SP require it')
if 'Signature' in get_data:
if 'SigAlg' not in get_data:
sign_alg = OneLogin_Saml2_Constants.RSA_SHA1
else:
sign_alg = get_data['SigAlg']
if sign_alg != OneLogin_Saml2_Constants.RSA_SHA1:
raise Exception('Invalid signAlg in the recieved Logout Response')
signed_query = 'SAMLResponse=%s' % quote_plus(get_data['SAMLResponse'])
if 'RelayState' in get_data:
signed_query = '%s&RelayState=%s' % (signed_query, quote_plus(get_data['RelayState']))
signed_query = '%s&SigAlg=%s' % (signed_query, quote_plus(sign_alg))
if 'x509cert' not in idp_data or idp_data['x509cert'] is None:
raise Exception('In order to validate the sign on the Logout Response, the x509cert of the IdP is required')
cert = idp_data['x509cert']
if not OneLogin_Saml2_Utils.validate_binary_sign(signed_query, b64decode(get_data['Signature']), cert):
raise Exception('Signature validation failed. Logout Response rejected')
return True
# pylint: disable=R0801
except Exception as err:
self.__error = err.__str__()
debug = self.__settings.is_debug_active()
if debug:
print err.__str__()
return False
def __query(self, query):
"""
Extracts a node from the DOMDocument (Logout Response Menssage)
:param query: Xpath Expresion
:type query: string
:return: The queried node
:rtype: DOMNodeList
"""
# Switch to lxml for querying
xml = self.document.toxml()
return OneLogin_Saml2_Utils.query(fromstring(xml), query)
def build(self, in_response_to):
"""
Creates a Logout Response object.
:param in_response_to: InResponseTo value for the Logout Response.
:type in_response_to: string
"""
sp_data = self.__settings.get_sp_data()
idp_data = self.__settings.get_idp_data()
uid = OneLogin_Saml2_Utils.generate_unique_id()
issue_instant = OneLogin_Saml2_Utils.parse_time_to_SAML(OneLogin_Saml2_Utils.now())
logout_response = """<samlp:LogoutResponse xmlns:samlp="urn:oasis:names:tc:SAML:2.0:protocol"
xmlns:saml="urn:oasis:names:tc:SAML:2.0:assertion"
ID="%(id)s"
Version="2.0"
IssueInstant="%(issue_instant)s"
Destination="%(destination)s"
InResponseTo="%(in_response_to)s"
>
<saml:Issuer>%(entity_id)s</saml:Issuer>
<samlp:Status>
<samlp:StatusCode Value="urn:oasis:names:tc:SAML:2.0:status:Success" />
</samlp:Status>
</samlp:LogoutResponse>""" % \
{
'id': uid,
'issue_instant': issue_instant,
'destination': idp_data['singleLogoutService']['url'],
'in_response_to': in_response_to,
'entity_id': sp_data['entityId'],
}
self.__logout_response = logout_response
def get_response(self):
"""
Returns a Logout Response object.
:return: Logout Response deflated and base64 encoded
:rtype: string
"""
return OneLogin_Saml2_Utils.deflate_and_base64_encode(self.__logout_response)
def get_error(self):
"""
After execute a validation process, if fails this method returns the cause
"""
return self.__error
| false | true |
1c45730c64130865dfab642c09670520f0db3188 | 5,508 | py | Python | gammagl/layers/conv/hetero_wrapper.py | BUPT-GAMMA/GammaGL | 2b9f32e1ac3533cb75a063243e8a2fa654466d18 | [
"Apache-2.0"
] | null | null | null | gammagl/layers/conv/hetero_wrapper.py | BUPT-GAMMA/GammaGL | 2b9f32e1ac3533cb75a063243e8a2fa654466d18 | [
"Apache-2.0"
] | null | null | null | gammagl/layers/conv/hetero_wrapper.py | BUPT-GAMMA/GammaGL | 2b9f32e1ac3533cb75a063243e8a2fa654466d18 | [
"Apache-2.0"
] | null | null | null | import tensorlayerx as tlx
import gammagl.mpops as mpops
import warnings
from collections import defaultdict
from typing import Dict, Optional
def group(xs, aggr):
if len(xs) == 0:
return None
elif aggr is None:
return tlx.stack(xs, axis=1)
elif len(xs) == 1:
return xs[0]
else:
out = tlx.stack(xs, axis=0)
out = getattr(tlx, 'reduce_'+aggr)(out, dim=0)
out = out[0] if isinstance(out, tuple) else out
return out
class HeteroConv(tlx.nn.Module):
r"""A generic wrapper for computing graph convolution on heterogeneous
graphs.
This layer will pass messages from source nodes to target nodes based on
the bipartite GNN layer given for a specific edge type.
If multiple relations point to the same destination, their results will be
aggregated according to :attr:`aggr`.
.. code:: python
>>> hetero_conv = HeteroConv({
('paper', 'cites', 'paper'): GCNConv(64, 16),
('author', 'writes', 'paper'): SAGEConv((128, 64), 64),
('paper', 'written_by', 'author'): GATConv((64, 128), 64),
}, aggr='sum')
>>> out_dict = hetero_conv(x_dict, edge_index_dict)
>>> print(list(out_dict.keys()))
['paper', 'author']
Parameters
----------
convs: Dict[Tuple[str, str, str], Module]
A dictionary
holding a bipartite
:class:`~torch_geometric.nn.conv.MessagePassing` layer for each
individual edge type.
aggr: string, optional
The aggregation scheme to use for grouping
node embeddings generated by different relations.
(:obj:`"sum"`, :obj:`"mean"`, :obj:`"min"`, :obj:`"max"`,
:obj:`None`). (default: :obj:`"sum"`)
"""
def __init__(self, convs: dict,
aggr: Optional[str] = "sum"):
super().__init__()
src_node_types = set([key[0] for key in convs.keys()])
dst_node_types = set([key[-1] for key in convs.keys()])
if len(src_node_types - dst_node_types) > 0:
warnings.warn(
f"There exist node types ({src_node_types - dst_node_types}) "
f"whose representations do not get updated during message "
f"passing as they do not occur as destination type in any "
f"edge type. This may lead to unexpected behaviour.")
self.convs = ModuleDict({'__'.join(k): v for k, v in convs.items()})
self.aggr = aggr
def reset_parameters(self):
for conv in self.convs.values():
conv.reset_parameters()
def forward(
self,
x_dict,
edge_index_dict,
*args_dict,
**kwargs_dict,
):
r"""
Parameters
----------
x_dict: Dict[str, Tensor]
A dictionary holding node feature
information for each individual node type.
edge_index_dict: Dict[Tuple[str, str, str], Tensor]
A dictionary
holding graph connectivity information for each individual
edge type.
*args_dict: optional
Additional forward arguments of invididual
:class:`torch_geometric.nn.conv.MessagePassing` layers.
**kwargs_dict: optional
Additional forward arguments of
individual :class:`torch_geometric.nn.conv.MessagePassing`
layers.
For example, if a specific GNN layer at edge type
:obj:`edge_type` expects edge attributes :obj:`edge_attr` as a
forward argument, then you can pass them to
:meth:`~torch_geometric.nn.conv.HeteroConv.forward` via
:obj:`edge_attr_dict = { edge_type: edge_attr }`.
"""
out_dict = defaultdict(list)
for edge_type, edge_index in edge_index_dict.items():
src, rel, dst = edge_type
str_edge_type = '__'.join(edge_type)
if str_edge_type not in self.convs:
continue
args = []
for value_dict in args_dict:
if edge_type in value_dict:
args.append(value_dict[edge_type])
elif src == dst and src in value_dict:
args.append(value_dict[src])
elif src in value_dict or dst in value_dict:
args.append(
(value_dict.get(src, None), value_dict.get(dst, None)))
kwargs = {}
for arg, value_dict in kwargs_dict.items():
arg = arg[:-5] # `{*}_dict`
if edge_type in value_dict:
kwargs[arg] = value_dict[edge_type]
elif src == dst and src in value_dict:
kwargs[arg] = value_dict[src]
elif src in value_dict or dst in value_dict:
kwargs[arg] = (value_dict.get(src, None),
value_dict.get(dst, None))
conv = self.convs[str_edge_type]
if src == dst:
out = conv(x_dict[src], edge_index, *args, **kwargs)
else:
out = conv((x_dict[src], x_dict[dst]), edge_index, *args,
**kwargs)
out_dict[dst].append(out)
for key, value in out_dict.items():
out_dict[key] = group(value, self.aggr)
return out_dict
def __repr__(self) -> str:
return f'{self.__class__.__name__}(num_relations={len(self.convs)})' | 37.469388 | 79 | 0.564089 | import tensorlayerx as tlx
import gammagl.mpops as mpops
import warnings
from collections import defaultdict
from typing import Dict, Optional
def group(xs, aggr):
if len(xs) == 0:
return None
elif aggr is None:
return tlx.stack(xs, axis=1)
elif len(xs) == 1:
return xs[0]
else:
out = tlx.stack(xs, axis=0)
out = getattr(tlx, 'reduce_'+aggr)(out, dim=0)
out = out[0] if isinstance(out, tuple) else out
return out
class HeteroConv(tlx.nn.Module):
def __init__(self, convs: dict,
aggr: Optional[str] = "sum"):
super().__init__()
src_node_types = set([key[0] for key in convs.keys()])
dst_node_types = set([key[-1] for key in convs.keys()])
if len(src_node_types - dst_node_types) > 0:
warnings.warn(
f"There exist node types ({src_node_types - dst_node_types}) "
f"whose representations do not get updated during message "
f"passing as they do not occur as destination type in any "
f"edge type. This may lead to unexpected behaviour.")
self.convs = ModuleDict({'__'.join(k): v for k, v in convs.items()})
self.aggr = aggr
def reset_parameters(self):
for conv in self.convs.values():
conv.reset_parameters()
def forward(
self,
x_dict,
edge_index_dict,
*args_dict,
**kwargs_dict,
):
out_dict = defaultdict(list)
for edge_type, edge_index in edge_index_dict.items():
src, rel, dst = edge_type
str_edge_type = '__'.join(edge_type)
if str_edge_type not in self.convs:
continue
args = []
for value_dict in args_dict:
if edge_type in value_dict:
args.append(value_dict[edge_type])
elif src == dst and src in value_dict:
args.append(value_dict[src])
elif src in value_dict or dst in value_dict:
args.append(
(value_dict.get(src, None), value_dict.get(dst, None)))
kwargs = {}
for arg, value_dict in kwargs_dict.items():
arg = arg[:-5]
if edge_type in value_dict:
kwargs[arg] = value_dict[edge_type]
elif src == dst and src in value_dict:
kwargs[arg] = value_dict[src]
elif src in value_dict or dst in value_dict:
kwargs[arg] = (value_dict.get(src, None),
value_dict.get(dst, None))
conv = self.convs[str_edge_type]
if src == dst:
out = conv(x_dict[src], edge_index, *args, **kwargs)
else:
out = conv((x_dict[src], x_dict[dst]), edge_index, *args,
**kwargs)
out_dict[dst].append(out)
for key, value in out_dict.items():
out_dict[key] = group(value, self.aggr)
return out_dict
def __repr__(self) -> str:
return f'{self.__class__.__name__}(num_relations={len(self.convs)})' | true | true |
1c45736df60cd6ec7b8c468f4bdacbfcf5d93fef | 10,202 | py | Python | 9_export_fbx_bitbucket_stable/fbx_deformer.py | makehumancommunity/community-plugins-fbx | 1f78be1723063cee0ae83232587431ab0f2e9894 | [
"MIT"
] | 1 | 2020-04-17T16:32:06.000Z | 2020-04-17T16:32:06.000Z | 9_export_fbx_bitbucket_stable/fbx_deformer.py | makehumancommunity/community-plugins-fbx | 1f78be1723063cee0ae83232587431ab0f2e9894 | [
"MIT"
] | null | null | null | 9_export_fbx_bitbucket_stable/fbx_deformer.py | makehumancommunity/community-plugins-fbx | 1f78be1723063cee0ae83232587431ab0f2e9894 | [
"MIT"
] | 5 | 2019-06-01T07:04:17.000Z | 2022-02-21T14:14:51.000Z | #!/usr/bin/python2.7
# -*- coding: utf-8 -*-
"""
**Project Name:** MakeHuman
**Product Home Page:** http://www.makehuman.org/
**Code Home Page:** https://bitbucket.org/MakeHuman/makehuman/
**Authors:** Thomas Larsson, Jonas Hauquier
**Copyright(c):** MakeHuman Team 2001-2017
**Licensing:** AGPL3
This file is part of MakeHuman (www.makehuman.org).
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Abstract
--------
Fbx mesh
"""
import transformations as tm
from .fbx_utils import *
#--------------------------------------------------------------------
# Object definitions
#--------------------------------------------------------------------
def getObjectCounts(meshes):
"""
Count the total number of vertex groups and shapes required for all
specified meshes.
"""
nVertexGroups = 0
for mesh in meshes:
if mesh.vertexWeights is None:
continue
for weights in mesh.vertexWeights.data:
if weights:
nVertexGroups += 1
nShapes = 0
for mesh in meshes:
if hasattr(mesh, 'shapes') and mesh.shapes is not None:
for key,shape in mesh.shapes:
if shape:
nShapes += 1
return nVertexGroups, nShapes
def countObjects(meshes, skel):
"""
Count the total number of vertex groups and shapes combined, as required
for all specified meshes. If no skeleton rig is attached to the mesh, no
vertex groups for bone weights are required.
"""
nVertexGroups, nShapes = getObjectCounts(meshes)
if skel:
return (nVertexGroups + 1 + 2*nShapes)
else:
return 2*nShapes
def writeObjectDefs(fp, meshes, skel, config):
nVertexGroups, nShapes = getObjectCounts(meshes)
count = countObjects(meshes, skel)
if config.binary:
from . import fbx_binary
elem = fbx_binary.get_child_element(fp, 'Definitions')
if count > 0:
fbx_binary.fbx_template_generate(elem, "Deformer", count)
if skel:
fbx_binary.fbx_template_generate(elem, "Pose", 1)
return
if count > 0:
fp.write(
' ObjectType: "Deformer" {\n' +
' Count: %d' % count +
"""
}
""")
if skel:
fp.write("""
ObjectType: "Pose" {
Count: 1
}
""")
#--------------------------------------------------------------------
# Object properties
#--------------------------------------------------------------------
def writeObjectProps(fp, meshes, skel, config):
if skel:
writeBindPose(fp, meshes, skel, config)
for mesh in meshes:
writeDeformer(fp, mesh.name, config)
for bone in skel.getBones():
try:
weights = mesh.vertexWeights.data[bone.name]
except KeyError:
continue
writeSubDeformer(fp, mesh.name, bone, weights, config)
for mesh in meshes:
# TODO support binary FBX shapekey export
if hasattr(mesh, 'shapes') and mesh.shapes is not None:
for sname,shape in mesh.shapes:
writeShapeGeometry(fp, mesh.name, sname, shape, config)
writeShapeDeformer(fp, mesh.name, sname)
writeShapeSubDeformer(fp, mesh.name, sname)
def writeShapeGeometry(fp, name, sname, shape, config):
id,key = getId("Geometry::%s_%sShape" % (name, sname))
nVerts = len(shape.verts)
fp.write(
' Geometry: %d, "%s", "Shape" {\n' % (id, key) +
' version: 100\n' +
' Indexes: *%d {\n' % nVerts +
' a: ')
string = "".join( ['%d,' % vn for vn in shape.verts] )
fp.write(string[:-1])
fp.write('\n' +
' }\n' +
' Vertices: *%d {\n' % (3*nVerts) +
' a: ')
target = config.scale * shape.data + config.offset
string = "".join( ["%.4f,%.4f,%.4f," % tuple(dr) for dr in target] )
fp.write(string[:-1])
# Must use normals for shapekeys
fp.write('\n' +
' }\n' +
' Normals: *%d {\n' % (3*nVerts) +
' a: ')
string = nVerts * "0,0,0,"
fp.write(string[:-1])
fp.write('\n' +
' }\n' +
' }\n')
def writeShapeDeformer(fp, name, sname):
id,key = getId("Deformer::%s_%sShape" % (name, sname))
fp.write(
' Deformer: %d, "%s", "BlendShape" {\n' % (id, key) +
' Version: 100\n' +
' }\n')
def writeShapeSubDeformer(fp, name, sname, shape):
sid,skey = getId("SubDeformer::%s_%sShape" % (name, sname))
fp.write(
' Deformer: %d, "%s", "BlendShapeChannel" {' % (sid, skey) +
"""
version: 100
deformpercent: 0.0
FullWeights: *1 {
a: 100
}
}
""")
def writeDeformer(fp, name, config):
id,key = getId("Deformer::%s" % name)
properties = [
("MHName", "p_string", "%sSkin" % name, False, True)
]
if config.binary:
from . import fbx_binary
elem = fbx_binary.get_child_element(fp, 'Objects')
fbx_binary.fbx_data_deformer(elem, key, id, properties)
return
import fbx_utils
fp.write(
' Deformer: %d, "%s", "Skin" {' % (id, key) +
"""
Version: 101
Properties70: {
""" + fbx_utils.get_ascii_properties(properties, indent=3) + """
}
Link_DeformAcuracy: 50
}
""")
def writeSubDeformer(fp, name, bone, weights, config):
id,key = getId("SubDeformer::%s_%s" % (bone.name, name))
bindmat,bindinv = bone.getBindMatrix(config.offset)
if config.binary:
from . import fbx_binary
elem = fbx_binary.get_child_element(fp, 'Objects')
fbx_binary.fbx_data_subdeformer(elem, key, id, weights[0], weights[1], bindmat, bindinv)
return
nVertexWeights = len(weights[0])
indexString = ','.join(["%d" % vn for vn in weights[0]])
weightString = ','.join(["%4f" % w for w in weights[1]])
fp.write(
' Deformer: %d, "%s", "Cluster" {\n' % (id, key) +
' Version: 100\n' +
' UserData: "", ""\n' +
' Indexes: *%d {\n' % nVertexWeights +
' a: %s\n' % indexString +
' } \n' +
' Weights: *%d {\n' % nVertexWeights +
' a: %s\n' % weightString +
' }\n')
writeMatrix(fp, 'Transform', bindmat)
writeMatrix(fp, 'TransformLink', bindinv)
fp.write(' }\n')
def writeBindPose(fp, meshes, skel, config):
id,key = getId("Pose::" + skel.name)
nBones = skel.getBoneCount()
nMeshes = len(meshes)
# Skeleton bind matrix
skelbindmat = tm.rotation_matrix(math.pi/2, (1,0,0))
count = 1 + nMeshes + nBones
if config.binary:
from . import fbx_binary
elem = fbx_binary.get_child_element(fp, 'Objects')
pelem = fbx_binary.fbx_data_bindpose_element(elem, key, id, count)
else:
fp.write(
' Pose: %d, "%s", "BindPose" {\n' % (id, key)+
' Type: "BindPose"\n' +
' Version: 100\n' +
' NbPoseNodes: %d\n' % count)
startLinking()
key = "Model::%s" % skel.name
if config.binary:
id,_ = getId(key)
fbx_binary.fbx_data_pose_node_element(pelem, key, id, skelbindmat)
else:
poseNode(fp, key, skelbindmat)
for mesh in meshes:
key = "Model::%sMesh" % mesh.name
if config.binary:
id,_ = getId(key)
fbx_binary.fbx_data_pose_node_element(pelem, key, id, skelbindmat)
else:
poseNode(fp, key, skelbindmat)
for bone in skel.getBones():
key = "Model::%s" % bone.name
bindmat,_ = bone.getBindMatrix(config.offset)
if config.binary:
id,_ = getId(key)
fbx_binary.fbx_data_pose_node_element(pelem, key, id, bindmat)
else:
poseNode(fp, key, bindmat)
stopLinking()
if not config.binary:
fp.write(' }\n')
def poseNode(fp, key, matrix):
pid,_ = getId(key)
matrix[:3,3] = 0
fp.write(
' PoseNode: {\n' +
' Node: %d\n' % pid)
writeMatrix(fp, 'Matrix', matrix, " ")
fp.write(' }\n')
#--------------------------------------------------------------------
# Links
#--------------------------------------------------------------------
def writeLinks(fp, meshes, skel, config):
if skel:
for mesh in meshes:
ooLink(fp, 'Deformer::%s' % mesh.name, 'Geometry::%s' % mesh.name, config)
for bone in skel.getBones():
subdef = 'SubDeformer::%s_%s' % (bone.name, mesh.name)
try:
getId(subdef)
except NameError:
continue
ooLink(fp, subdef, 'Deformer::%s' % mesh.name, config)
ooLink(fp, 'Model::%s' % bone.name, subdef, config)
for mesh in meshes:
if hasattr(mesh, 'shapes') and mesh.shapes is not None:
for sname, shape in mesh.shapes:
deform = "Deformer::%s_%sShape" % (mesh.name, sname)
subdef = "SubDeformer::%s_%sShape" % (mesh.name, sname)
ooLink(fp, "Geometry::%s_%sShape" % (mesh.name, sname), subdef)
ooLink(fp, subdef, deform)
ooLink(fp, deform, "Geometry::%s" % mesh.name)
| 29.917889 | 96 | 0.525387 |
import transformations as tm
from .fbx_utils import *
def getObjectCounts(meshes):
nVertexGroups = 0
for mesh in meshes:
if mesh.vertexWeights is None:
continue
for weights in mesh.vertexWeights.data:
if weights:
nVertexGroups += 1
nShapes = 0
for mesh in meshes:
if hasattr(mesh, 'shapes') and mesh.shapes is not None:
for key,shape in mesh.shapes:
if shape:
nShapes += 1
return nVertexGroups, nShapes
def countObjects(meshes, skel):
nVertexGroups, nShapes = getObjectCounts(meshes)
if skel:
return (nVertexGroups + 1 + 2*nShapes)
else:
return 2*nShapes
def writeObjectDefs(fp, meshes, skel, config):
nVertexGroups, nShapes = getObjectCounts(meshes)
count = countObjects(meshes, skel)
if config.binary:
from . import fbx_binary
elem = fbx_binary.get_child_element(fp, 'Definitions')
if count > 0:
fbx_binary.fbx_template_generate(elem, "Deformer", count)
if skel:
fbx_binary.fbx_template_generate(elem, "Pose", 1)
return
if count > 0:
fp.write(
' ObjectType: "Deformer" {\n' +
' Count: %d' % count +
"""
}
""")
if skel:
fp.write("""
ObjectType: "Pose" {
Count: 1
}
""")
def writeObjectProps(fp, meshes, skel, config):
if skel:
writeBindPose(fp, meshes, skel, config)
for mesh in meshes:
writeDeformer(fp, mesh.name, config)
for bone in skel.getBones():
try:
weights = mesh.vertexWeights.data[bone.name]
except KeyError:
continue
writeSubDeformer(fp, mesh.name, bone, weights, config)
for mesh in meshes:
if hasattr(mesh, 'shapes') and mesh.shapes is not None:
for sname,shape in mesh.shapes:
writeShapeGeometry(fp, mesh.name, sname, shape, config)
writeShapeDeformer(fp, mesh.name, sname)
writeShapeSubDeformer(fp, mesh.name, sname)
def writeShapeGeometry(fp, name, sname, shape, config):
id,key = getId("Geometry::%s_%sShape" % (name, sname))
nVerts = len(shape.verts)
fp.write(
' Geometry: %d, "%s", "Shape" {\n' % (id, key) +
' version: 100\n' +
' Indexes: *%d {\n' % nVerts +
' a: ')
string = "".join( ['%d,' % vn for vn in shape.verts] )
fp.write(string[:-1])
fp.write('\n' +
' }\n' +
' Vertices: *%d {\n' % (3*nVerts) +
' a: ')
target = config.scale * shape.data + config.offset
string = "".join( ["%.4f,%.4f,%.4f," % tuple(dr) for dr in target] )
fp.write(string[:-1])
fp.write('\n' +
' }\n' +
' Normals: *%d {\n' % (3*nVerts) +
' a: ')
string = nVerts * "0,0,0,"
fp.write(string[:-1])
fp.write('\n' +
' }\n' +
' }\n')
def writeShapeDeformer(fp, name, sname):
id,key = getId("Deformer::%s_%sShape" % (name, sname))
fp.write(
' Deformer: %d, "%s", "BlendShape" {\n' % (id, key) +
' Version: 100\n' +
' }\n')
def writeShapeSubDeformer(fp, name, sname, shape):
sid,skey = getId("SubDeformer::%s_%sShape" % (name, sname))
fp.write(
' Deformer: %d, "%s", "BlendShapeChannel" {' % (sid, skey) +
"""
version: 100
deformpercent: 0.0
FullWeights: *1 {
a: 100
}
}
""")
def writeDeformer(fp, name, config):
id,key = getId("Deformer::%s" % name)
properties = [
("MHName", "p_string", "%sSkin" % name, False, True)
]
if config.binary:
from . import fbx_binary
elem = fbx_binary.get_child_element(fp, 'Objects')
fbx_binary.fbx_data_deformer(elem, key, id, properties)
return
import fbx_utils
fp.write(
' Deformer: %d, "%s", "Skin" {' % (id, key) +
"""
Version: 101
Properties70: {
""" + fbx_utils.get_ascii_properties(properties, indent=3) + """
}
Link_DeformAcuracy: 50
}
""")
def writeSubDeformer(fp, name, bone, weights, config):
id,key = getId("SubDeformer::%s_%s" % (bone.name, name))
bindmat,bindinv = bone.getBindMatrix(config.offset)
if config.binary:
from . import fbx_binary
elem = fbx_binary.get_child_element(fp, 'Objects')
fbx_binary.fbx_data_subdeformer(elem, key, id, weights[0], weights[1], bindmat, bindinv)
return
nVertexWeights = len(weights[0])
indexString = ','.join(["%d" % vn for vn in weights[0]])
weightString = ','.join(["%4f" % w for w in weights[1]])
fp.write(
' Deformer: %d, "%s", "Cluster" {\n' % (id, key) +
' Version: 100\n' +
' UserData: "", ""\n' +
' Indexes: *%d {\n' % nVertexWeights +
' a: %s\n' % indexString +
' } \n' +
' Weights: *%d {\n' % nVertexWeights +
' a: %s\n' % weightString +
' }\n')
writeMatrix(fp, 'Transform', bindmat)
writeMatrix(fp, 'TransformLink', bindinv)
fp.write(' }\n')
def writeBindPose(fp, meshes, skel, config):
id,key = getId("Pose::" + skel.name)
nBones = skel.getBoneCount()
nMeshes = len(meshes)
skelbindmat = tm.rotation_matrix(math.pi/2, (1,0,0))
count = 1 + nMeshes + nBones
if config.binary:
from . import fbx_binary
elem = fbx_binary.get_child_element(fp, 'Objects')
pelem = fbx_binary.fbx_data_bindpose_element(elem, key, id, count)
else:
fp.write(
' Pose: %d, "%s", "BindPose" {\n' % (id, key)+
' Type: "BindPose"\n' +
' Version: 100\n' +
' NbPoseNodes: %d\n' % count)
startLinking()
key = "Model::%s" % skel.name
if config.binary:
id,_ = getId(key)
fbx_binary.fbx_data_pose_node_element(pelem, key, id, skelbindmat)
else:
poseNode(fp, key, skelbindmat)
for mesh in meshes:
key = "Model::%sMesh" % mesh.name
if config.binary:
id,_ = getId(key)
fbx_binary.fbx_data_pose_node_element(pelem, key, id, skelbindmat)
else:
poseNode(fp, key, skelbindmat)
for bone in skel.getBones():
key = "Model::%s" % bone.name
bindmat,_ = bone.getBindMatrix(config.offset)
if config.binary:
id,_ = getId(key)
fbx_binary.fbx_data_pose_node_element(pelem, key, id, bindmat)
else:
poseNode(fp, key, bindmat)
stopLinking()
if not config.binary:
fp.write(' }\n')
def poseNode(fp, key, matrix):
pid,_ = getId(key)
matrix[:3,3] = 0
fp.write(
' PoseNode: {\n' +
' Node: %d\n' % pid)
writeMatrix(fp, 'Matrix', matrix, " ")
fp.write(' }\n')
def writeLinks(fp, meshes, skel, config):
if skel:
for mesh in meshes:
ooLink(fp, 'Deformer::%s' % mesh.name, 'Geometry::%s' % mesh.name, config)
for bone in skel.getBones():
subdef = 'SubDeformer::%s_%s' % (bone.name, mesh.name)
try:
getId(subdef)
except NameError:
continue
ooLink(fp, subdef, 'Deformer::%s' % mesh.name, config)
ooLink(fp, 'Model::%s' % bone.name, subdef, config)
for mesh in meshes:
if hasattr(mesh, 'shapes') and mesh.shapes is not None:
for sname, shape in mesh.shapes:
deform = "Deformer::%s_%sShape" % (mesh.name, sname)
subdef = "SubDeformer::%s_%sShape" % (mesh.name, sname)
ooLink(fp, "Geometry::%s_%sShape" % (mesh.name, sname), subdef)
ooLink(fp, subdef, deform)
ooLink(fp, deform, "Geometry::%s" % mesh.name)
| true | true |
1c4573e4d03b690f34aa54ed5a53a2890c2dddaf | 1,030 | py | Python | src/data/425.py | NULLCT/LOMC | 79a16474a8f21310e0fb47e536d527dd5dc6d655 | [
"MIT"
] | null | null | null | src/data/425.py | NULLCT/LOMC | 79a16474a8f21310e0fb47e536d527dd5dc6d655 | [
"MIT"
] | null | null | null | src/data/425.py | NULLCT/LOMC | 79a16474a8f21310e0fb47e536d527dd5dc6d655 | [
"MIT"
] | null | null | null | import sys
def main():
sys.setrecursionlimit(1000000)
N, Q = [int(x) for x in input().split()]
# 隣接リスト形式でグラフをつくる
# 重み付きの場合は、[行き先, weight]をそれぞれの行に持たせれば良い。
graph = [[] for i in range(N)]
for i in range(N - 1):
a, b = [int(x) for x in input().split()]
graph[a - 1].append(b - 1)
graph[b - 1].append(a - 1)
queries = []
for i in range(Q):
c, d = [int(x) for x in input().split()]
queries.append([c - 1, d - 1])
distances = [-1 for x in range(N)]
distances[0] = 0
dfs(graph, distances, 0)
for query in queries:
if (distances[query[0]] - distances[query[1]]) % 2 == 0:
print("Town")
else:
print("Road")
def dfs(graph, distances, current_node):
next_nodes = graph[current_node]
for next_node in next_nodes:
if distances[next_node] < 0:
distances[next_node] = distances[current_node] + 1
dfs(graph, distances, next_node)
if __name__ == "__main__":
main()
| 23.953488 | 64 | 0.551456 | import sys
def main():
sys.setrecursionlimit(1000000)
N, Q = [int(x) for x in input().split()]
graph = [[] for i in range(N)]
for i in range(N - 1):
a, b = [int(x) for x in input().split()]
graph[a - 1].append(b - 1)
graph[b - 1].append(a - 1)
queries = []
for i in range(Q):
c, d = [int(x) for x in input().split()]
queries.append([c - 1, d - 1])
distances = [-1 for x in range(N)]
distances[0] = 0
dfs(graph, distances, 0)
for query in queries:
if (distances[query[0]] - distances[query[1]]) % 2 == 0:
print("Town")
else:
print("Road")
def dfs(graph, distances, current_node):
next_nodes = graph[current_node]
for next_node in next_nodes:
if distances[next_node] < 0:
distances[next_node] = distances[current_node] + 1
dfs(graph, distances, next_node)
if __name__ == "__main__":
main()
| true | true |
1c4574061d4be29467fa53f9afe975e345de3bfa | 3,268 | py | Python | pyInstaller/dash/dash/resources.py | rianawillers/dash-lineplot | b72c3e4799d39bdc33bbcae2202fdefd6f4af00e | [
"MIT"
] | 2 | 2021-09-17T14:23:28.000Z | 2021-09-17T22:12:50.000Z | pyInstaller/dash/dash/resources.py | rianawillers/dash-lineplot | b72c3e4799d39bdc33bbcae2202fdefd6f4af00e | [
"MIT"
] | null | null | null | pyInstaller/dash/dash/resources.py | rianawillers/dash-lineplot | b72c3e4799d39bdc33bbcae2202fdefd6f4af00e | [
"MIT"
] | 17 | 2019-11-21T14:11:29.000Z | 2019-11-21T15:26:23.000Z | import json
import warnings
import os
from .development.base_component import ComponentRegistry
from . import exceptions
class Resources:
def __init__(self, resource_name):
self._resources = []
self.resource_name = resource_name
def append_resource(self, resource):
self._resources.append(resource)
def _filter_resources(self, all_resources, dev_bundles=False):
filtered_resources = []
for s in all_resources:
filtered_resource = {}
if 'dynamic' in s:
filtered_resource['dynamic'] = s['dynamic']
if 'namespace' in s:
filtered_resource['namespace'] = s['namespace']
if 'external_url' in s and not self.config.serve_locally:
filtered_resource['external_url'] = s['external_url']
elif 'dev_package_path' in s and dev_bundles:
filtered_resource['relative_package_path'] = (
s['dev_package_path']
)
elif 'relative_package_path' in s:
filtered_resource['relative_package_path'] = (
s['relative_package_path']
)
elif 'absolute_path' in s:
filtered_resource['absolute_path'] = s['absolute_path']
elif 'asset_path' in s:
info = os.stat(s['filepath'])
filtered_resource['asset_path'] = s['asset_path']
filtered_resource['ts'] = info.st_mtime
elif self.config.serve_locally:
warnings.warn(
'A local version of {} is not available'.format(
s['external_url']
)
)
continue
else:
raise exceptions.ResourceException(
'{} does not have a '
'relative_package_path, absolute_path, or an '
'external_url.'.format(
json.dumps(filtered_resource)
)
)
filtered_resources.append(filtered_resource)
return filtered_resources
def get_all_resources(self, dev_bundles=False):
lib_resources = ComponentRegistry.get_resources(self.resource_name)
all_resources = lib_resources + self._resources
return self._filter_resources(all_resources, dev_bundles)
# pylint: disable=too-few-public-methods
class _Config:
def __init__(self, serve_locally):
self.serve_locally = serve_locally
class Css:
def __init__(self, serve_locally):
self._resources = Resources('_css_dist')
self._resources.config = self.config = _Config(serve_locally)
def append_css(self, stylesheet):
self._resources.append_resource(stylesheet)
def get_all_css(self):
return self._resources.get_all_resources()
class Scripts:
def __init__(self, serve_locally):
self._resources = Resources('_js_dist')
self._resources.config = self.config = _Config(serve_locally)
def append_script(self, script):
self._resources.append_resource(script)
def get_all_scripts(self, dev_bundles=False):
return self._resources.get_all_resources(dev_bundles)
| 34.041667 | 75 | 0.604651 | import json
import warnings
import os
from .development.base_component import ComponentRegistry
from . import exceptions
class Resources:
def __init__(self, resource_name):
self._resources = []
self.resource_name = resource_name
def append_resource(self, resource):
self._resources.append(resource)
def _filter_resources(self, all_resources, dev_bundles=False):
filtered_resources = []
for s in all_resources:
filtered_resource = {}
if 'dynamic' in s:
filtered_resource['dynamic'] = s['dynamic']
if 'namespace' in s:
filtered_resource['namespace'] = s['namespace']
if 'external_url' in s and not self.config.serve_locally:
filtered_resource['external_url'] = s['external_url']
elif 'dev_package_path' in s and dev_bundles:
filtered_resource['relative_package_path'] = (
s['dev_package_path']
)
elif 'relative_package_path' in s:
filtered_resource['relative_package_path'] = (
s['relative_package_path']
)
elif 'absolute_path' in s:
filtered_resource['absolute_path'] = s['absolute_path']
elif 'asset_path' in s:
info = os.stat(s['filepath'])
filtered_resource['asset_path'] = s['asset_path']
filtered_resource['ts'] = info.st_mtime
elif self.config.serve_locally:
warnings.warn(
'A local version of {} is not available'.format(
s['external_url']
)
)
continue
else:
raise exceptions.ResourceException(
'{} does not have a '
'relative_package_path, absolute_path, or an '
'external_url.'.format(
json.dumps(filtered_resource)
)
)
filtered_resources.append(filtered_resource)
return filtered_resources
def get_all_resources(self, dev_bundles=False):
lib_resources = ComponentRegistry.get_resources(self.resource_name)
all_resources = lib_resources + self._resources
return self._filter_resources(all_resources, dev_bundles)
class _Config:
def __init__(self, serve_locally):
self.serve_locally = serve_locally
class Css:
def __init__(self, serve_locally):
self._resources = Resources('_css_dist')
self._resources.config = self.config = _Config(serve_locally)
def append_css(self, stylesheet):
self._resources.append_resource(stylesheet)
def get_all_css(self):
return self._resources.get_all_resources()
class Scripts:
def __init__(self, serve_locally):
self._resources = Resources('_js_dist')
self._resources.config = self.config = _Config(serve_locally)
def append_script(self, script):
self._resources.append_resource(script)
def get_all_scripts(self, dev_bundles=False):
return self._resources.get_all_resources(dev_bundles)
| true | true |
1c45743ca4e65273720ebd7ad9326b42e0788bfd | 1,734 | py | Python | mergesort.py | maurendeviia/pythoncharmers | b5775d0f51a6f2e5dc0365345e0436dea4c72c14 | [
"MIT"
] | 37 | 2020-10-01T15:20:12.000Z | 2021-10-04T14:17:06.000Z | mergesort.py | maurendeviia/pythoncharmers | b5775d0f51a6f2e5dc0365345e0436dea4c72c14 | [
"MIT"
] | 27 | 2020-10-01T12:32:41.000Z | 2021-10-04T11:05:34.000Z | mergesort.py | maurendeviia/pythoncharmers | b5775d0f51a6f2e5dc0365345e0436dea4c72c14 | [
"MIT"
] | 57 | 2020-10-01T11:24:26.000Z | 2022-02-16T05:09:50.000Z | # Python program for implementation of MergeSort
# Merges two subarrays of arr[].
# First subarray is arr[l..m]
# Second subarray is arr[m+1..r]
def merge(arr, l, m, r):
n1 = m - l + 1
n2 = r- m
# create temp arrays
L = [0] * (n1)
R = [0] * (n2)
# Copy data to temp arrays L[] and R[]
for i in range(0 , n1):
L[i] = arr[l + i]
for j in range(0 , n2):
R[j] = arr[m + 1 + j]
# Merge the temp arrays back into arr[l..r]
i = 0 # Initial index of first subarray
j = 0 # Initial index of second subarray
k = l # Initial index of merged subarray
while i < n1 and j < n2 :
if L[i] <= R[j]:
arr[k] = L[i]
i += 1
else:
arr[k] = R[j]
j += 1
k += 1
# Copy the remaining elements of L[], if there
# are any
while i < n1:
arr[k] = L[i]
i += 1
k += 1
# Copy the remaining elements of R[], if there
# are any
while j < n2:
arr[k] = R[j]
j += 1
k += 1
# l is for left index and r is right index of the
# sub-array of arr to be sorted
def mergeSort(arr,l,r):
if l < r:
# Same as (l+r)//2, but avoids overflow for
# large l and h
m = (l+(r-1))//2
# Sort first and second halves
mergeSort(arr, l, m)
mergeSort(arr, m+1, r)
merge(arr, l, m, r)
# Driver code to test above
arr = [12, 11, 13, 5, 6, 7]
n = len(arr)
print ("Given array is")
for i in range(n):
print ("%d" %arr[i]),
mergeSort(arr,0,n-1)
print ("\n\nSorted array is")
for i in range(n):
print ("%d" %arr[i]),
| 23.12 | 52 | 0.474625 |
def merge(arr, l, m, r):
n1 = m - l + 1
n2 = r- m
L = [0] * (n1)
R = [0] * (n2)
for i in range(0 , n1):
L[i] = arr[l + i]
for j in range(0 , n2):
R[j] = arr[m + 1 + j]
i = 0
j = 0
k = l
while i < n1 and j < n2 :
if L[i] <= R[j]:
arr[k] = L[i]
i += 1
else:
arr[k] = R[j]
j += 1
k += 1
while i < n1:
arr[k] = L[i]
i += 1
k += 1
while j < n2:
arr[k] = R[j]
j += 1
k += 1
def mergeSort(arr,l,r):
if l < r:
m = (l+(r-1))//2
mergeSort(arr, l, m)
mergeSort(arr, m+1, r)
merge(arr, l, m, r)
arr = [12, 11, 13, 5, 6, 7]
n = len(arr)
print ("Given array is")
for i in range(n):
print ("%d" %arr[i]),
mergeSort(arr,0,n-1)
print ("\n\nSorted array is")
for i in range(n):
print ("%d" %arr[i]),
| true | true |
1c4574486b85926786807ba74806c14f69cb0642 | 716 | py | Python | test/ie.py | napoler/Terry-toolkit | 8b1a607fb6d27801b0441b67f7eb0962794a728a | [
"MIT"
] | null | null | null | test/ie.py | napoler/Terry-toolkit | 8b1a607fb6d27801b0441b67f7eb0962794a728a | [
"MIT"
] | 14 | 2019-11-15T14:28:22.000Z | 2022-02-10T00:24:28.000Z | test/ie.py | napoler/Terry-toolkit | 8b1a607fb6d27801b0441b67f7eb0962794a728a | [
"MIT"
] | 1 | 2020-04-15T12:58:00.000Z | 2020-04-15T12:58:00.000Z | #encoding=utf-8
from __future__ import unicode_literals
import sys
sys.path.append("../")
import Terry_toolkit as tkit
t= tkit.Text()
text="""
柯基犬是个十足的小狗子
"""
# li = t.summary(text=text)
# print(li)
# li = t.get_keyphrases(text=text)
# print(li)
# li = t.sentence_segmentation(text=text)
# print(li)
# li = t.participle(text=text,dotype='words_all_filters')
# print(li)
ie=tkit.TripleIE(model_path='/mnt/data/dev/model/ltp/ltp_data_v3.4.0')
s=ie.get(text)
# print(s)
#
for item in s:
if item==None:
pass
else:
print(item[0],item[1],item[2])
# extractor = tkit.TripleExtractor()
# svos = extractor.triples_main(text)
# # print(svos)
# for item in svos:
# print("".join(item)) | 16.272727 | 70 | 0.666201 |
from __future__ import unicode_literals
import sys
sys.path.append("../")
import Terry_toolkit as tkit
t= tkit.Text()
text="""
柯基犬是个十足的小狗子
"""
ie=tkit.TripleIE(model_path='/mnt/data/dev/model/ltp/ltp_data_v3.4.0')
s=ie.get(text)
for item in s:
if item==None:
pass
else:
print(item[0],item[1],item[2])
| true | true |
1c45754bd696bde5b9f6046fcb305c9e2b18fb6e | 8,617 | py | Python | make_knockoffs.py | wfbradley/snpko | abc77349d702915519518eacdf919f06579413d0 | [
"MIT"
] | null | null | null | make_knockoffs.py | wfbradley/snpko | abc77349d702915519518eacdf919f06579413d0 | [
"MIT"
] | null | null | null | make_knockoffs.py | wfbradley/snpko | abc77349d702915519518eacdf919f06579413d0 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import pandas as pd
import os
import numpy as np
import SNPknock.fastphase as fp
from SNPknock import knockoffHMM
from joblib import Parallel, delayed
import utils_snpko as utils
logger = utils.logger
def make_knockoff(chromosome=None, grouped_by_chromosome=None, df_SNP=None,
df_geno_experiment=None, df_geno_ensembl=None,
SNP_to_wild_type=None, cache_dir=None, path_to_fp=None,
em_iterations=25, random_seed=123):
# assert chromosome!=None and grouped_by_chromosome!=None and df_SNP!=None
assert chromosome is not None
assert grouped_by_chromosome is not None
assert df_SNP is not None
logger.debug("################")
logger.debug("Chromosome %2d #" % chromosome)
logger.debug("################")
num_experiment_people = len(df_geno_experiment)
num_ensembl_people = len(df_geno_ensembl)
indices = grouped_by_chromosome.groups[chromosome]
df_SNP_chromo = df_SNP.iloc[indices].sort_values('chromosome_position')
SNPs_on_chromosome = df_SNP_chromo['SNP'].values
X_experiment = np.empty((num_experiment_people, len(SNPs_on_chromosome)))
X_ensembl = np.empty((num_ensembl_people, len(SNPs_on_chromosome)))
for X, df in [
(X_experiment, df_geno_experiment),
(X_ensembl, df_geno_ensembl)]:
for j, SNP in enumerate(SNPs_on_chromosome):
X[:, j] = utils.genotype_to_nonwild_type_count(
df[SNP].values, SNP_to_wild_type[SNP])
out_path = '%s/chrom_%d' % (cache_dir, chromosome)
# If all relevant files are found in cache, skip EM recomputation; otherwise,
# redo the whole thing.
target_file_suffix_list = [
'alphahat.txt', 'finallikelihoods', 'origchars', 'rhat.txt', 'thetahat.txt']
already_in_cache = True
for suffix in target_file_suffix_list:
target_path = os.path.join(
cache_dir, 'chrom_%d_%s' % (chromosome, suffix))
if not os.path.exists(target_path):
already_in_cache = False
break
if already_in_cache:
logger.debug("Found chrom %d HMM in cache" % chromosome)
else:
# Write array to file
Xfp_file = '%s/X_%d.inp' % (cache_dir, chromosome)
fp.writeX(X_ensembl, Xfp_file)
# Run fastPhase on data (which runs EM)
fp.runFastPhase(path_to_fp, Xfp_file, out_path,
K=12, numit=em_iterations)
# Read in fastPhase results (i.e., HMM parameters) from file:
r_file = out_path + "_rhat.txt"
alpha_file = out_path + "_alphahat.txt"
theta_file = out_path + "_thetahat.txt"
# Why is X_ensembl[0, :] in the function arguments below?
hmm = fp.loadFit(r_file, theta_file, alpha_file, X_ensembl[0, :])
# Actually produce the knockoffs
knockoffs = knockoffHMM(hmm["pInit"], hmm["Q"], hmm[
"pEmit"], seed=random_seed)
X_knockoffs = knockoffs.sample(X_experiment)
return(X_knockoffs, X_experiment, SNPs_on_chromosome)
def make_all_knockoffs(args):
'''
For each chromosome, independently:
Sort SNPs according to position on genome.
Train HMM parameters with EM on ENSEMBL data.
Generate knockoffs of experimentals SNP data.
For now, we ignore sex of persons, although that is
available in ENSEMBL
'''
logger.info("####################################")
logger.info("Fitting HMM and generating knockoffs")
path_to_fp = os.path.join(args.fastPHASE_path, 'fastPHASE')
if not(os.path.exists(path_to_fp)):
logger.info("Cannot find fastPHASE at %s" % path_to_fp)
raise Exception
cache_dir = os.path.join(args.working_dir, 'fastphase_cache')
utils.safe_mkdir(cache_dir)
df_geno_ensembl = pd.read_csv(os.path.join(
(args.working_dir), 'pruned_ensembl.csv'))
# SNP,wild_type,chromosome,chromosome_position
df_SNP = pd.read_csv(os.path.join(
(args.working_dir), 'pruned_SNP_facts.csv'))
df_wild = pd.read_csv(os.path.join(args.working_dir, 'wild_types.csv'))
SNP_to_wild_type = dict(
zip(df_wild['SNP'].values, df_wild['wild_type'].values))
chromosome_list = np.sort(np.unique(df_SNP['chromosome']))
for chromosome in chromosome_list:
assert chromosome in np.arange(1, 24)
df_geno_experiment = pd.read_csv(os.path.join(
(args.working_dir), 'pruned_experiment.csv'))
# Make sure we have the same SNPs everywhere.
assert (set([c for c in df_geno_ensembl.columns if c.startswith('rs')]) ==
set([c for c in df_geno_experiment.columns if c.startswith('rs')]))
for SNP in df_SNP.SNP.values:
assert SNP in df_geno_ensembl.columns
grouped_by_chromosome = df_SNP.groupby('chromosome')
num_experiment_people = len(df_geno_experiment)
knockoff_SNP_list = []
utils.safe_mkdir(os.path.join(args.working_dir, 'knockoffs'))
em_iterations = 500
logger.info('Number of EM iterations: %d' % em_iterations)
for knockoff_trial_count in xrange(args.num_knockoff_trials):
random_seed = knockoff_trial_count + args.random_seed
if ((args.num_knockoff_trials <= 20) or
knockoff_trial_count % ((args.num_knockoff_trials) // 20) == 0):
logger.info("Knockoff sampling %d of %d" % (
knockoff_trial_count, args.num_knockoff_trials))
if False:
# Serial version; code preserved for debugging purposes
for chromosome in chromosome_list:
knockoff_SNP_list.append(
make_knockoff(
chromosome=chromosome,
grouped_by_chromosome=grouped_by_chromosome, df_SNP=df_SNP,
df_geno_experiment=df_geno_experiment, df_geno_ensembl=df_geno_ensembl,
SNP_to_wild_type=SNP_to_wild_type, cache_dir=cache_dir,
path_to_fp=path_to_fp, em_iterations=em_iterations, random_seed=random_seed))
else:
knockoff_SNP_list = Parallel(n_jobs=args.num_workers)(
delayed(make_knockoff)(
chromosome=i,
grouped_by_chromosome=grouped_by_chromosome, df_SNP=df_SNP,
df_geno_experiment=df_geno_experiment, df_geno_ensembl=df_geno_ensembl,
SNP_to_wild_type=SNP_to_wild_type, cache_dir=cache_dir, path_to_fp=path_to_fp,
em_iterations=em_iterations, random_seed=random_seed)
for i in chromosome_list)
# Stitch results for each chromosome back together into a single dataframe
# Knockoff results
SNP_columns = [
x for x in df_geno_ensembl.columns if x.startswith('rs')]
df_knockoffs = pd.DataFrame(
columns=SNP_columns, index=np.arange(num_experiment_people))
# Matched experimental observations + knockoffs in one dataframe
matched_columns = []
data_labels = []
for field in df_geno_experiment.columns:
if field.startswith('rs'):
matched_columns.append(field)
matched_columns.append(field + '_knockoff')
elif field.startswith(args.data_prefix):
data_labels.append(field)
else:
continue
df_matched = pd.DataFrame(columns=matched_columns + data_labels,
index=np.arange(num_experiment_people))
for (X_knockoffs, X_experiment, SNPs_on_chromosome) in knockoff_SNP_list:
for i in xrange(num_experiment_people):
for j, SNP in enumerate(SNPs_on_chromosome):
df_knockoffs[SNP].values[i] = X_knockoffs[i, j]
df_matched[SNP].values[i] = int(X_experiment[i, j])
df_matched[
SNP + '_knockoff'].values[i] = int(X_knockoffs[i, j])
for data_label in data_labels:
df_matched[data_label] = df_geno_experiment[data_label]
# Sanity check that all fields are filled in.
for field in df_knockoffs:
for i in xrange(num_experiment_people):
assert pd.notnull(df_knockoffs[field].values[i])
df_matched.to_csv(os.path.join((args.working_dir), 'knockoffs',
'knockoffs_%03d.csv' % knockoff_trial_count),
index=False)
logger.info("Done making knockoffs!!!")
if __name__ == '__main__':
args = utils.parse_arguments()
utils.initialize_logger(args)
make_all_knockoffs(args)
| 40.455399 | 101 | 0.646049 |
import pandas as pd
import os
import numpy as np
import SNPknock.fastphase as fp
from SNPknock import knockoffHMM
from joblib import Parallel, delayed
import utils_snpko as utils
logger = utils.logger
def make_knockoff(chromosome=None, grouped_by_chromosome=None, df_SNP=None,
df_geno_experiment=None, df_geno_ensembl=None,
SNP_to_wild_type=None, cache_dir=None, path_to_fp=None,
em_iterations=25, random_seed=123):
assert chromosome is not None
assert grouped_by_chromosome is not None
assert df_SNP is not None
logger.debug("################")
logger.debug("Chromosome %2d #" % chromosome)
logger.debug("################")
num_experiment_people = len(df_geno_experiment)
num_ensembl_people = len(df_geno_ensembl)
indices = grouped_by_chromosome.groups[chromosome]
df_SNP_chromo = df_SNP.iloc[indices].sort_values('chromosome_position')
SNPs_on_chromosome = df_SNP_chromo['SNP'].values
X_experiment = np.empty((num_experiment_people, len(SNPs_on_chromosome)))
X_ensembl = np.empty((num_ensembl_people, len(SNPs_on_chromosome)))
for X, df in [
(X_experiment, df_geno_experiment),
(X_ensembl, df_geno_ensembl)]:
for j, SNP in enumerate(SNPs_on_chromosome):
X[:, j] = utils.genotype_to_nonwild_type_count(
df[SNP].values, SNP_to_wild_type[SNP])
out_path = '%s/chrom_%d' % (cache_dir, chromosome)
target_file_suffix_list = [
'alphahat.txt', 'finallikelihoods', 'origchars', 'rhat.txt', 'thetahat.txt']
already_in_cache = True
for suffix in target_file_suffix_list:
target_path = os.path.join(
cache_dir, 'chrom_%d_%s' % (chromosome, suffix))
if not os.path.exists(target_path):
already_in_cache = False
break
if already_in_cache:
logger.debug("Found chrom %d HMM in cache" % chromosome)
else:
Xfp_file = '%s/X_%d.inp' % (cache_dir, chromosome)
fp.writeX(X_ensembl, Xfp_file)
fp.runFastPhase(path_to_fp, Xfp_file, out_path,
K=12, numit=em_iterations)
r_file = out_path + "_rhat.txt"
alpha_file = out_path + "_alphahat.txt"
theta_file = out_path + "_thetahat.txt"
hmm = fp.loadFit(r_file, theta_file, alpha_file, X_ensembl[0, :])
knockoffs = knockoffHMM(hmm["pInit"], hmm["Q"], hmm[
"pEmit"], seed=random_seed)
X_knockoffs = knockoffs.sample(X_experiment)
return(X_knockoffs, X_experiment, SNPs_on_chromosome)
def make_all_knockoffs(args):
logger.info("####################################")
logger.info("Fitting HMM and generating knockoffs")
path_to_fp = os.path.join(args.fastPHASE_path, 'fastPHASE')
if not(os.path.exists(path_to_fp)):
logger.info("Cannot find fastPHASE at %s" % path_to_fp)
raise Exception
cache_dir = os.path.join(args.working_dir, 'fastphase_cache')
utils.safe_mkdir(cache_dir)
df_geno_ensembl = pd.read_csv(os.path.join(
(args.working_dir), 'pruned_ensembl.csv'))
df_SNP = pd.read_csv(os.path.join(
(args.working_dir), 'pruned_SNP_facts.csv'))
df_wild = pd.read_csv(os.path.join(args.working_dir, 'wild_types.csv'))
SNP_to_wild_type = dict(
zip(df_wild['SNP'].values, df_wild['wild_type'].values))
chromosome_list = np.sort(np.unique(df_SNP['chromosome']))
for chromosome in chromosome_list:
assert chromosome in np.arange(1, 24)
df_geno_experiment = pd.read_csv(os.path.join(
(args.working_dir), 'pruned_experiment.csv'))
assert (set([c for c in df_geno_ensembl.columns if c.startswith('rs')]) ==
set([c for c in df_geno_experiment.columns if c.startswith('rs')]))
for SNP in df_SNP.SNP.values:
assert SNP in df_geno_ensembl.columns
grouped_by_chromosome = df_SNP.groupby('chromosome')
num_experiment_people = len(df_geno_experiment)
knockoff_SNP_list = []
utils.safe_mkdir(os.path.join(args.working_dir, 'knockoffs'))
em_iterations = 500
logger.info('Number of EM iterations: %d' % em_iterations)
for knockoff_trial_count in xrange(args.num_knockoff_trials):
random_seed = knockoff_trial_count + args.random_seed
if ((args.num_knockoff_trials <= 20) or
knockoff_trial_count % ((args.num_knockoff_trials) // 20) == 0):
logger.info("Knockoff sampling %d of %d" % (
knockoff_trial_count, args.num_knockoff_trials))
if False:
for chromosome in chromosome_list:
knockoff_SNP_list.append(
make_knockoff(
chromosome=chromosome,
grouped_by_chromosome=grouped_by_chromosome, df_SNP=df_SNP,
df_geno_experiment=df_geno_experiment, df_geno_ensembl=df_geno_ensembl,
SNP_to_wild_type=SNP_to_wild_type, cache_dir=cache_dir,
path_to_fp=path_to_fp, em_iterations=em_iterations, random_seed=random_seed))
else:
knockoff_SNP_list = Parallel(n_jobs=args.num_workers)(
delayed(make_knockoff)(
chromosome=i,
grouped_by_chromosome=grouped_by_chromosome, df_SNP=df_SNP,
df_geno_experiment=df_geno_experiment, df_geno_ensembl=df_geno_ensembl,
SNP_to_wild_type=SNP_to_wild_type, cache_dir=cache_dir, path_to_fp=path_to_fp,
em_iterations=em_iterations, random_seed=random_seed)
for i in chromosome_list)
SNP_columns = [
x for x in df_geno_ensembl.columns if x.startswith('rs')]
df_knockoffs = pd.DataFrame(
columns=SNP_columns, index=np.arange(num_experiment_people))
matched_columns = []
data_labels = []
for field in df_geno_experiment.columns:
if field.startswith('rs'):
matched_columns.append(field)
matched_columns.append(field + '_knockoff')
elif field.startswith(args.data_prefix):
data_labels.append(field)
else:
continue
df_matched = pd.DataFrame(columns=matched_columns + data_labels,
index=np.arange(num_experiment_people))
for (X_knockoffs, X_experiment, SNPs_on_chromosome) in knockoff_SNP_list:
for i in xrange(num_experiment_people):
for j, SNP in enumerate(SNPs_on_chromosome):
df_knockoffs[SNP].values[i] = X_knockoffs[i, j]
df_matched[SNP].values[i] = int(X_experiment[i, j])
df_matched[
SNP + '_knockoff'].values[i] = int(X_knockoffs[i, j])
for data_label in data_labels:
df_matched[data_label] = df_geno_experiment[data_label]
for field in df_knockoffs:
for i in xrange(num_experiment_people):
assert pd.notnull(df_knockoffs[field].values[i])
df_matched.to_csv(os.path.join((args.working_dir), 'knockoffs',
'knockoffs_%03d.csv' % knockoff_trial_count),
index=False)
logger.info("Done making knockoffs!!!")
if __name__ == '__main__':
args = utils.parse_arguments()
utils.initialize_logger(args)
make_all_knockoffs(args)
| true | true |
1c45754f10176c7ca4335379fbf3a06ceffc08ee | 13,895 | py | Python | robots/login.py | sicekit/sicekit | 30d6b665ed083893792579f6640c897e932b4ff0 | [
"MIT"
] | 11 | 2015-04-13T15:40:00.000Z | 2021-11-09T14:55:25.000Z | robots/login.py | sicekit/sicekit | 30d6b665ed083893792579f6640c897e932b4ff0 | [
"MIT"
] | null | null | null | robots/login.py | sicekit/sicekit | 30d6b665ed083893792579f6640c897e932b4ff0 | [
"MIT"
] | 4 | 2016-06-07T06:39:39.000Z | 2019-05-11T09:34:20.000Z | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Script to log the robot in to a wiki account.
Suggestion is to make a special account to use for robot use only. Make
sure this robot account is well known on your home wikipedia before using.
Parameters:
-all Try to log in on all sites where a username is defined in
user-config.py.
-pass Useful in combination with -all when you have accounts for
several sites and use the same password for all of them.
Asks you for the password, then logs in on all given sites.
-pass:XXXX Uses XXXX as password. Be careful if you use this
parameter because your password will be shown on your
screen, and will probably be saved in your command line
history. This is NOT RECOMMENDED for use on computers
where others have either physical or remote access.
Use -pass instead.
-sysop Log in with your sysop account.
-force Ignores if the user is already logged in, and tries to log in.
-v -v Shows http requests made when logging in. This might leak
(doubly private data (password, session id), so make sure to check the
verbose) output. Using -log is recommended: this will output a lot of
data
If not given as parameter, the script will ask for your username and password
(password entry will be hidden), log in to your home wiki using this
combination, and store the resulting cookies (containing your password hash,
so keep it secured!) in a file in the login-data subdirectory.
All scripts in this library will be looking for this cookie file and will use the
login information if it is present.
To log out, throw away the XX-login.data file that is created in the login-data
subdirectory.
"""
#
# (C) Rob W.W. Hooft, 2003
#
# Distributed under the terms of the MIT license.
#
__version__='$Id: login.py 7034 2009-07-09 10:11:29Z alexsh $'
import re
import urllib2
import wikipedia, config
# On some wikis you are only allowed to run a bot if there is a link to
# the bot's user page in a specific list.
botList = {
'wikipedia': {
'en': u'Wikipedia:Registered bots',
# Disabled because they are now using a template system which
# we can't check with our current code.
#'simple': u'Wikipedia:Bots',
},
'gentoo': {
'en': u'Help:Bots',
}
}
class LoginManager:
def __init__(self, password = None, sysop = False, site = None, username=None, verbose=False):
self.site = site or wikipedia.getSite()
if username:
self.username=username
# perform writeback.
if site.family.name not in config.usernames:
config.usernames[site.family.name]={}
config.usernames[site.family.name][self.site.lang]=username
else:
if sysop:
try:
self.username = config.sysopnames[self.site.family.name][self.site.lang]
except:
raise wikipedia.NoUsername(u'ERROR: Sysop username for %s:%s is undefined.\nIf you have a sysop account for that site, please add such a line to user-config.py:\n\nsysopnames[\'%s\'][\'%s\'] = \'myUsername\'' % (self.site.family.name, self.site.lang, self.site.family.name, self.site.lang))
else:
try:
self.username = config.usernames[self.site.family.name][self.site.lang]
except:
raise wikipedia.NoUsername(u'ERROR: Username for %s:%s is undefined.\nIf you have an account for that site, please add such a line to user-config.py:\n\nusernames[\'%s\'][\'%s\'] = \'myUsername\'' % (self.site.family.name, self.site.lang, self.site.family.name, self.site.lang))
self.password = password
self.verbose = verbose
if getattr(config, 'password_file', ''):
self.readPassword()
def botAllowed(self):
"""
Checks whether the bot is listed on a specific page to comply with
the policy on the respective wiki.
"""
if self.site.family.name in botList and self.site.language() in botList[self.site.family.name]:
botListPageTitle = wikipedia.translate(self.site.language(), botList)
botListPage = wikipedia.Page(self.site, botListPageTitle)
for linkedPage in botListPage.linkedPages():
if linkedPage.titleWithoutNamespace() == self.username:
return True
return False
else:
# No bot policies on other
return True
def getCookie(self, api = config.use_api_login, remember=True, captcha = None):
"""
Login to the site.
remember Remember login (default: True)
captchaId A dictionary containing the captcha id and answer, if any
Returns cookie data if succesful, None otherwise.
"""
if api:
predata = {
'action': 'login',
'lgname': self.username.encode(self.site.encoding()),
'lgpassword': self.password,
'lgdomain': self.site.family.ldapDomain,
}
address = self.site.api_address()
else:
predata = {
"wpName": self.username.encode(self.site.encoding()),
"wpPassword": self.password,
"wpDomain": self.site.family.ldapDomain, # VistaPrint fix
"wpLoginattempt": "Aanmelden & Inschrijven", # dutch button label seems to work for all wikis
"wpRemember": str(int(bool(remember))),
"wpSkipCookieCheck": '1'
}
if captcha:
predata["wpCaptchaId"] = captcha['id']
predata["wpCaptchaWord"] = captcha['answer']
login_address = self.site.login_address()
address = login_address + '&action=submit'
if self.site.hostname() in config.authenticate.keys():
headers = {
"Content-type": "application/x-www-form-urlencoded",
"User-agent": wikipedia.useragent
}
data = self.site.urlEncode(predata)
if self.verbose:
fakepredata = predata
fakepredata['wpPassword'] = u'XXXX'
wikipedia.output(u"urllib2.urlopen(urllib2.Request('%s', %s, %s)):" % (self.site.protocol() + '://' + self.site.hostname() + address, self.site.urlEncode(fakepredata), headers))
response = urllib2.urlopen(urllib2.Request(self.site.protocol() + '://' + self.site.hostname() + address, data, headers))
data = response.read()
if self.verbose:
fakedata = re.sub(r"(session|Token)=..........", r"session=XXXXXXXXXX", data)
trans = config.transliterate
config.transliterate = False #transliteration breaks for some reason
wikipedia.output(fakedata.decode(self.site.encoding()))
config.transliterate = trans
wikipedia.cj.save(wikipedia.COOKIEFILE)
return "Ok"
else:
response, data = self.site.postData(address, self.site.urlEncode(predata))
if self.verbose:
fakepredata = predata
fakepredata['wpPassword'] = fakepredata['lgpassword'] = u'XXXXX'
wikipedia.output(u"self.site.postData(%s, %s)" % (address, self.site.urlEncode(fakepredata)))
fakeresponsemsg = re.sub(r"(session|Token)=..........", r"session=XXXXXXXXXX", response.msg.__str__())
wikipedia.output(u"%s/%s\n%s" % (response.status, response.reason, fakeresponsemsg))
wikipedia.output(u"%s" % data)
Reat=re.compile(': (.*?);')
L = []
for eat in response.msg.getallmatchingheaders('set-cookie'):
m = Reat.search(eat)
if m:
L.append(m.group(1))
got_token = got_user = False
for Ldata in L:
if 'Token=' in Ldata:
got_token = True
if 'User=' in Ldata or 'UserName=' in Ldata:
got_user = True
if got_token and got_user:
return "\n".join(L)
elif not captcha:
solve = self.site.solveCaptcha(data)
if solve:
return self.getCookie(api = api, remember = remember, captcha = solve)
return None
def storecookiedata(self, data):
"""
Stores cookie data.
The argument data is the raw data, as returned by getCookie().
Returns nothing."""
filename = wikipedia.config.datafilepath('login-data',
'%s-%s-%s-login.data'
% (self.site.family.name, self.site.lang, self.username))
f = open(filename, 'w')
f.write(data)
f.close()
def readPassword(self):
"""
Reads passwords from a file. DO NOT FORGET TO REMOVE READ
ACCESS FOR OTHER USERS!!! Use chmod 600 password-file.
All lines below should be valid Python tuples in the form
(code, family, username, password) or (username, password)
to set a default password for an username. Default usernames
should occur above specific usernames.
Example:
("my_username", "my_default_password")
("my_sysop_user", "my_sysop_password")
("en", "wikipedia", "my_en_user", "my_en_pass")
"""
file = open(wikipedia.config.datafilepath(config.password_file))
for line in file:
if not line.strip(): continue
entry = eval(line)
if len(entry) == 2: #for default userinfo
if entry[0] == self.username: self.password = entry[1]
elif len(entry) == 4: #for userinfo included code and family
if entry[0] == self.site.lang and \
entry[1] == self.site.family.name and \
entry[2] == self.username:
self.password = entry[3]
file.close()
def login(self, api = config.use_api_login, retry = False):
if not self.password:
# As we don't want the password to appear on the screen, we set
# password = True
self.password = wikipedia.input(u'Password for user %s on %s:' % (self.username, self.site), password = True)
self.password = self.password.encode(self.site.encoding())
wikipedia.output(u"Logging in to %s as %s" % (self.site, self.username))
try:
cookiedata = self.getCookie(api = api)
except NotImplementedError:
wikipedia.output('API disabled because this site does not support.')
config.use_api_login = api = False
cookiedata = self.getCookie(api = api)
if cookiedata:
self.storecookiedata(cookiedata)
wikipedia.output(u"Should be logged in now")
# Show a warning according to the local bot policy
if not self.botAllowed():
wikipedia.output(u'*** Your username is not listed on [[%s]].\n*** Please make sure you are allowed to use the robot before actually using it!' % botList[self.site.family.name][self.site.lang])
return True
else:
wikipedia.output(u"Login failed. Wrong password or CAPTCHA answer?")
if api:
wikipedia.output(u"API login failed, retrying using standard webpage.")
return self.login(api = False, retry = retry)
if retry:
self.password = None
return self.login(api = api, retry = True)
else:
return False
def showCaptchaWindow(self, url):
pass
def main():
username = password = None
sysop = False
logall = False
forceLogin = False
verbose = False
for arg in wikipedia.handleArgs():
if arg.startswith("-pass"):
if len(arg) == 5:
password = wikipedia.input(u'Password for all accounts:', password = True)
else:
password = arg[6:]
elif arg == "-sysop":
sysop = True
elif arg == "-all":
logall = True
elif arg == "-force":
forceLogin = True
else:
wikipedia.showHelp('login')
return
if wikipedia.verbose > 1:
wikipedia.output(u"WARNING: Using -v -v on login.py might leak private data. When sharing, please double check your password is not readable and log out your bots session.")
verbose = True # only use this verbose when running from login.py
if logall:
if sysop:
namedict = config.sysopnames
else:
namedict = config.usernames
for familyName in namedict.iterkeys():
for lang in namedict[familyName].iterkeys():
try:
site = wikipedia.getSite( code=lang, fam=familyName )
if not forceLogin and site.loggedInAs(sysop = sysop) is not None:
wikipedia.output(u'Already logged in on %s' % site)
else:
loginMan = LoginManager(password, sysop = sysop, site = site, verbose=verbose)
loginMan.login()
except wikipedia.NoSuchSite:
wikipedia.output(lang+ u'.' + familyName + u' is not a valid site, please remove it from your config')
else:
loginMan = LoginManager(password, sysop = sysop, verbose=verbose)
loginMan.login()
if __name__ == "__main__":
try:
main()
finally:
wikipedia.stopme()
| 42.362805 | 310 | 0.580784 |
__version__='$Id: login.py 7034 2009-07-09 10:11:29Z alexsh $'
import re
import urllib2
import wikipedia, config
botList = {
'wikipedia': {
'en': u'Wikipedia:Registered bots',
# Disabled because they are now using a template system which
# we can't check with our current code.
},
'gentoo': {
'en': u'Help:Bots',
}
}
class LoginManager:
def __init__(self, password = None, sysop = False, site = None, username=None, verbose=False):
self.site = site or wikipedia.getSite()
if username:
self.username=username
if site.family.name not in config.usernames:
config.usernames[site.family.name]={}
config.usernames[site.family.name][self.site.lang]=username
else:
if sysop:
try:
self.username = config.sysopnames[self.site.family.name][self.site.lang]
except:
raise wikipedia.NoUsername(u'ERROR: Sysop username for %s:%s is undefined.\nIf you have a sysop account for that site, please add such a line to user-config.py:\n\nsysopnames[\'%s\'][\'%s\'] = \'myUsername\'' % (self.site.family.name, self.site.lang, self.site.family.name, self.site.lang))
else:
try:
self.username = config.usernames[self.site.family.name][self.site.lang]
except:
raise wikipedia.NoUsername(u'ERROR: Username for %s:%s is undefined.\nIf you have an account for that site, please add such a line to user-config.py:\n\nusernames[\'%s\'][\'%s\'] = \'myUsername\'' % (self.site.family.name, self.site.lang, self.site.family.name, self.site.lang))
self.password = password
self.verbose = verbose
if getattr(config, 'password_file', ''):
self.readPassword()
def botAllowed(self):
if self.site.family.name in botList and self.site.language() in botList[self.site.family.name]:
botListPageTitle = wikipedia.translate(self.site.language(), botList)
botListPage = wikipedia.Page(self.site, botListPageTitle)
for linkedPage in botListPage.linkedPages():
if linkedPage.titleWithoutNamespace() == self.username:
return True
return False
else:
return True
def getCookie(self, api = config.use_api_login, remember=True, captcha = None):
if api:
predata = {
'action': 'login',
'lgname': self.username.encode(self.site.encoding()),
'lgpassword': self.password,
'lgdomain': self.site.family.ldapDomain,
}
address = self.site.api_address()
else:
predata = {
"wpName": self.username.encode(self.site.encoding()),
"wpPassword": self.password,
"wpDomain": self.site.family.ldapDomain,
"wpLoginattempt": "Aanmelden & Inschrijven",
"wpRemember": str(int(bool(remember))),
"wpSkipCookieCheck": '1'
}
if captcha:
predata["wpCaptchaId"] = captcha['id']
predata["wpCaptchaWord"] = captcha['answer']
login_address = self.site.login_address()
address = login_address + '&action=submit'
if self.site.hostname() in config.authenticate.keys():
headers = {
"Content-type": "application/x-www-form-urlencoded",
"User-agent": wikipedia.useragent
}
data = self.site.urlEncode(predata)
if self.verbose:
fakepredata = predata
fakepredata['wpPassword'] = u'XXXX'
wikipedia.output(u"urllib2.urlopen(urllib2.Request('%s', %s, %s)):" % (self.site.protocol() + '://' + self.site.hostname() + address, self.site.urlEncode(fakepredata), headers))
response = urllib2.urlopen(urllib2.Request(self.site.protocol() + '://' + self.site.hostname() + address, data, headers))
data = response.read()
if self.verbose:
fakedata = re.sub(r"(session|Token)=..........", r"session=XXXXXXXXXX", data)
trans = config.transliterate
config.transliterate = False
wikipedia.output(fakedata.decode(self.site.encoding()))
config.transliterate = trans
wikipedia.cj.save(wikipedia.COOKIEFILE)
return "Ok"
else:
response, data = self.site.postData(address, self.site.urlEncode(predata))
if self.verbose:
fakepredata = predata
fakepredata['wpPassword'] = fakepredata['lgpassword'] = u'XXXXX'
wikipedia.output(u"self.site.postData(%s, %s)" % (address, self.site.urlEncode(fakepredata)))
fakeresponsemsg = re.sub(r"(session|Token)=..........", r"session=XXXXXXXXXX", response.msg.__str__())
wikipedia.output(u"%s/%s\n%s" % (response.status, response.reason, fakeresponsemsg))
wikipedia.output(u"%s" % data)
Reat=re.compile(': (.*?);')
L = []
for eat in response.msg.getallmatchingheaders('set-cookie'):
m = Reat.search(eat)
if m:
L.append(m.group(1))
got_token = got_user = False
for Ldata in L:
if 'Token=' in Ldata:
got_token = True
if 'User=' in Ldata or 'UserName=' in Ldata:
got_user = True
if got_token and got_user:
return "\n".join(L)
elif not captcha:
solve = self.site.solveCaptcha(data)
if solve:
return self.getCookie(api = api, remember = remember, captcha = solve)
return None
def storecookiedata(self, data):
filename = wikipedia.config.datafilepath('login-data',
'%s-%s-%s-login.data'
% (self.site.family.name, self.site.lang, self.username))
f = open(filename, 'w')
f.write(data)
f.close()
def readPassword(self):
file = open(wikipedia.config.datafilepath(config.password_file))
for line in file:
if not line.strip(): continue
entry = eval(line)
if len(entry) == 2:
if entry[0] == self.username: self.password = entry[1]
elif len(entry) == 4:
if entry[0] == self.site.lang and \
entry[1] == self.site.family.name and \
entry[2] == self.username:
self.password = entry[3]
file.close()
def login(self, api = config.use_api_login, retry = False):
if not self.password:
# password = True
self.password = wikipedia.input(u'Password for user %s on %s:' % (self.username, self.site), password = True)
self.password = self.password.encode(self.site.encoding())
wikipedia.output(u"Logging in to %s as %s" % (self.site, self.username))
try:
cookiedata = self.getCookie(api = api)
except NotImplementedError:
wikipedia.output('API disabled because this site does not support.')
config.use_api_login = api = False
cookiedata = self.getCookie(api = api)
if cookiedata:
self.storecookiedata(cookiedata)
wikipedia.output(u"Should be logged in now")
# Show a warning according to the local bot policy
if not self.botAllowed():
wikipedia.output(u'*** Your username is not listed on [[%s]].\n*** Please make sure you are allowed to use the robot before actually using it!' % botList[self.site.family.name][self.site.lang])
return True
else:
wikipedia.output(u"Login failed. Wrong password or CAPTCHA answer?")
if api:
wikipedia.output(u"API login failed, retrying using standard webpage.")
return self.login(api = False, retry = retry)
if retry:
self.password = None
return self.login(api = api, retry = True)
else:
return False
def showCaptchaWindow(self, url):
pass
def main():
username = password = None
sysop = False
logall = False
forceLogin = False
verbose = False
for arg in wikipedia.handleArgs():
if arg.startswith("-pass"):
if len(arg) == 5:
password = wikipedia.input(u'Password for all accounts:', password = True)
else:
password = arg[6:]
elif arg == "-sysop":
sysop = True
elif arg == "-all":
logall = True
elif arg == "-force":
forceLogin = True
else:
wikipedia.showHelp('login')
return
if wikipedia.verbose > 1:
wikipedia.output(u"WARNING: Using -v -v on login.py might leak private data. When sharing, please double check your password is not readable and log out your bots session.")
verbose = True # only use this verbose when running from login.py
if logall:
if sysop:
namedict = config.sysopnames
else:
namedict = config.usernames
for familyName in namedict.iterkeys():
for lang in namedict[familyName].iterkeys():
try:
site = wikipedia.getSite( code=lang, fam=familyName )
if not forceLogin and site.loggedInAs(sysop = sysop) is not None:
wikipedia.output(u'Already logged in on %s' % site)
else:
loginMan = LoginManager(password, sysop = sysop, site = site, verbose=verbose)
loginMan.login()
except wikipedia.NoSuchSite:
wikipedia.output(lang+ u'.' + familyName + u' is not a valid site, please remove it from your config')
else:
loginMan = LoginManager(password, sysop = sysop, verbose=verbose)
loginMan.login()
if __name__ == "__main__":
try:
main()
finally:
wikipedia.stopme()
| true | true |
1c4576baa0e49856245c1d52cad14c426975599c | 15,984 | py | Python | tfx/orchestration/metadata_test.py | romiosarkar6991/tfx-romio | 0703c1dd037c676e1d438c2e5ce831decfc9eed9 | [
"Apache-2.0"
] | 1 | 2019-10-10T06:06:12.000Z | 2019-10-10T06:06:12.000Z | tfx/orchestration/metadata_test.py | romiosarkar6991/tfx-romio | 0703c1dd037c676e1d438c2e5ce831decfc9eed9 | [
"Apache-2.0"
] | null | null | null | tfx/orchestration/metadata_test.py | romiosarkar6991/tfx-romio | 0703c1dd037c676e1d438c2e5ce831decfc9eed9 | [
"Apache-2.0"
] | 1 | 2019-10-06T03:39:58.000Z | 2019-10-06T03:39:58.000Z | # Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tfx.orchestration.metadata."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Standard Imports
import mock
import tensorflow as tf
from ml_metadata.proto import metadata_store_pb2
from tfx import types
from tfx.orchestration import data_types
from tfx.orchestration import metadata
from tfx.types import standard_artifacts
from tfx.types.artifact import ArtifactState
class MetadataTest(tf.test.TestCase):
def setUp(self):
super(MetadataTest, self).setUp()
self._connection_config = metadata_store_pb2.ConnectionConfig()
self._connection_config.sqlite.SetInParent()
self._component_info = data_types.ComponentInfo(
component_type='a.b.c', component_id='my_component')
self._component_info2 = data_types.ComponentInfo(
component_type='a.b.d', component_id='my_component_2')
self._pipeline_info = data_types.PipelineInfo(
pipeline_name='my_pipeline', pipeline_root='/tmp', run_id='my_run_id')
self._pipeline_info2 = data_types.PipelineInfo(
pipeline_name='my_pipeline', pipeline_root='/tmp', run_id='my_run_id2')
self._pipeline_info3 = data_types.PipelineInfo(
pipeline_name='my_pipeline2', pipeline_root='/tmp', run_id='my_run_id')
self._pipeline_info4 = data_types.PipelineInfo(
pipeline_name='my_pipeline2', pipeline_root='/tmp', run_id='my_run_id2')
def testEmptyArtifact(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
m.publish_artifacts([])
eid = m.register_execution(
exec_properties={},
pipeline_info=self._pipeline_info,
component_info=self._component_info)
m.publish_execution(eid, {}, {})
[execution] = m.store.get_executions_by_id([eid])
self.assertProtoEquals(
"""
id: 1
type_id: 1
properties {
key: "state"
value {
string_value: "complete"
}
}
properties {
key: "pipeline_name"
value {
string_value: "my_pipeline"
}
}
properties {
key: "pipeline_root"
value {
string_value: "/tmp"
}
}
properties {
key: "run_id"
value {
string_value: "my_run_id"
}
}
properties {
key: "component_id"
value {
string_value: "my_component"
}
}""", execution)
def testArtifact(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
self.assertListEqual([], m.get_all_artifacts())
# Test publish artifact.
artifact = standard_artifacts.Examples()
artifact.uri = 'uri'
m.publish_artifacts([artifact])
[artifact] = m.store.get_artifacts()
self.assertProtoEquals(
"""id: 1
type_id: 1
uri: "uri"
properties {
key: "split"
value {
string_value: ""
}
}
properties {
key: "state"
value {
string_value: "published"
}
}
properties {
key: "type_name"
value {
string_value: "ExamplesPath"
}
}""", artifact)
# Test get artifact.
self.assertListEqual([artifact], m.get_all_artifacts())
self.assertListEqual([artifact], m.get_artifacts_by_uri('uri'))
self.assertListEqual([artifact], m.get_artifacts_by_type('ExamplesPath'))
# Test artifact state.
m.check_artifact_state(artifact, ArtifactState.PUBLISHED)
m.update_artifact_state(artifact, ArtifactState.DELETED)
m.check_artifact_state(artifact, ArtifactState.DELETED)
self.assertRaises(RuntimeError, m.check_artifact_state, artifact,
ArtifactState.PUBLISHED)
def testExecution(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
context_id = m.register_run_context_if_not_exists(self._pipeline_info)
# Test prepare_execution.
exec_properties = {'arg_one': 1}
eid = m.register_execution(
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=self._component_info,
run_context_id=context_id)
[execution] = m.store.get_executions_by_context(context_id)
self.assertProtoEquals(
"""
id: 1
type_id: 2
properties {
key: "state"
value {
string_value: "new"
}
}
properties {
key: "pipeline_name"
value {
string_value: "my_pipeline"
}
}
properties {
key: "pipeline_root"
value {
string_value: "/tmp"
}
}
properties {
key: "run_id"
value {
string_value: "my_run_id"
}
}
properties {
key: "component_id"
value {
string_value: "my_component"
}
}
properties {
key: "arg_one"
value {
string_value: "1"
}
}""", execution)
# Test publish_execution.
input_artifact = standard_artifacts.Examples()
m.publish_artifacts([input_artifact])
output_artifact = standard_artifacts.Examples()
input_dict = {'input': [input_artifact]}
output_dict = {'output': [output_artifact]}
m.publish_execution(eid, input_dict, output_dict)
# Make sure artifacts in output_dict are published.
self.assertEqual(ArtifactState.PUBLISHED, output_artifact.state)
# Make sure execution state are changed.
[execution] = m.store.get_executions_by_id([eid])
self.assertEqual(metadata.EXECUTION_STATE_COMPLETE,
execution.properties['state'].string_value)
# Make sure events are published.
events = m.store.get_events_by_execution_ids([eid])
self.assertEqual(2, len(events))
self.assertEqual(input_artifact.id, events[0].artifact_id)
self.assertEqual(metadata_store_pb2.Event.INPUT, events[0].type)
self.assertProtoEquals(
"""
steps {
key: "input"
}
steps {
index: 0
}""", events[0].path)
self.assertEqual(output_artifact.id, events[1].artifact_id)
self.assertEqual(metadata_store_pb2.Event.OUTPUT, events[1].type)
self.assertProtoEquals(
"""
steps {
key: "output"
}
steps {
index: 0
}""", events[1].path)
def testFetchPreviousResult(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
# Create an 'previous' execution.
exec_properties = {'log_root': 'path'}
eid = m.register_execution(
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=self._component_info)
input_artifact = standard_artifacts.Examples()
m.publish_artifacts([input_artifact])
output_artifact = standard_artifacts.Examples()
input_artifacts = {'input': [input_artifact]}
output_artifacts = {'output': [output_artifact]}
m.publish_execution(eid, input_artifacts, output_artifacts)
# Test previous_run.
self.assertEqual(
None,
m.previous_execution(
input_artifacts=input_artifacts,
exec_properties={},
pipeline_info=self._pipeline_info,
component_info=self._component_info))
self.assertEqual(
None,
m.previous_execution(
input_artifacts={},
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=self._component_info))
self.assertEqual(
None,
m.previous_execution(
input_artifacts=input_artifacts,
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=data_types.ComponentInfo(
component_id='unique', component_type='a.b.c')))
self.assertEqual(
eid,
m.previous_execution(
input_artifacts=input_artifacts,
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=self._component_info))
# Test fetch_previous_result_artifacts.
new_output_artifact = standard_artifacts.Examples()
self.assertNotEqual(ArtifactState.PUBLISHED,
new_output_artifact.state)
new_output_dict = {'output': [new_output_artifact]}
updated_output_dict = m.fetch_previous_result_artifacts(
new_output_dict, eid)
previous_artifact = output_artifacts['output'][-1].artifact
current_artifact = updated_output_dict['output'][-1].artifact
self.assertEqual(ArtifactState.PUBLISHED,
current_artifact.properties['state'].string_value)
self.assertEqual(previous_artifact.id, current_artifact.id)
self.assertEqual(previous_artifact.type_id, current_artifact.type_id)
def testGetCachedExecutionIds(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
mock_store = mock.Mock()
mock_store.get_events_by_execution_ids.side_effect = [
[
metadata_store_pb2.Event(
artifact_id=1, type=metadata_store_pb2.Event.INPUT)
],
[
metadata_store_pb2.Event(
artifact_id=1, type=metadata_store_pb2.Event.INPUT),
metadata_store_pb2.Event(
artifact_id=2, type=metadata_store_pb2.Event.INPUT),
metadata_store_pb2.Event(
artifact_id=3, type=metadata_store_pb2.Event.INPUT)
],
[
metadata_store_pb2.Event(
artifact_id=1, type=metadata_store_pb2.Event.INPUT),
metadata_store_pb2.Event(
artifact_id=2, type=metadata_store_pb2.Event.INPUT),
],
]
m._store = mock_store
input_one = standard_artifacts.Examples()
input_one.id = 1
input_two = standard_artifacts.Examples()
input_two.id = 2
input_dict = {
'input_one': [input_one],
'input_two': [input_two],
}
self.assertEqual(1, m._get_cached_execution_id(input_dict, [3, 2, 1]))
def testSearchArtifacts(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
exec_properties = {'log_root': 'path'}
eid = m.register_execution(
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=self._component_info)
input_artifact = standard_artifacts.Examples()
m.publish_artifacts([input_artifact])
output_artifact = types.Artifact(type_name='MyOutputArtifact')
output_artifact.uri = 'my/uri'
input_dict = {'input': [input_artifact]}
output_dict = {'output': [output_artifact]}
m.publish_execution(eid, input_dict, output_dict)
[artifact] = m.search_artifacts(
artifact_name='output',
pipeline_name=self._pipeline_info.pipeline_name,
run_id=self._pipeline_info.run_id,
producer_component_id=self._component_info.component_id)
self.assertEqual(artifact.uri, output_artifact.uri)
def testPublishSkippedExecution(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
exec_properties = {'log_root': 'path'}
eid = m.register_execution(
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=self._component_info)
input_artifact = standard_artifacts.Examples()
m.publish_artifacts([input_artifact])
output_artifact = types.Artifact(type_name='MyOutputArtifact')
output_artifact.uri = 'my/uri'
[published_artifact] = m.publish_artifacts([output_artifact])
output_artifact.artifact = published_artifact
input_dict = {'input': [input_artifact]}
output_dict = {'output': [output_artifact]}
m.publish_execution(
eid, input_dict, output_dict, state=metadata.EXECUTION_STATE_CACHED)
def testGetExecutionStates(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
context_id = m.register_run_context_if_not_exists(self._pipeline_info)
context_id2 = m.register_run_context_if_not_exists(self._pipeline_info2)
self.assertListEqual(
[self._pipeline_info.run_id, self._pipeline_info2.run_id],
m.get_all_runs('my_pipeline'))
eid = m.register_execution(
exec_properties={},
pipeline_info=self._pipeline_info,
component_info=self._component_info,
run_context_id=context_id)
m.publish_execution(eid, {}, {})
m.register_execution(
exec_properties={},
pipeline_info=self._pipeline_info,
component_info=self._component_info2,
run_context_id=context_id)
m.register_execution(
exec_properties={},
pipeline_info=self._pipeline_info2,
component_info=self._component_info,
run_context_id=context_id2)
states = m.get_execution_states(self._pipeline_info)
self.assertDictEqual(
{
self._component_info.component_id:
metadata.EXECUTION_STATE_COMPLETE,
self._component_info2.component_id:
metadata.EXECUTION_STATE_NEW,
}, states)
def testContext(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
cid1 = m.register_run_context_if_not_exists(self._pipeline_info)
cid2 = m.register_run_context_if_not_exists(self._pipeline_info2)
cid3 = m.register_run_context_if_not_exists(self._pipeline_info3)
context_type = m.store.get_context_type('run')
self.assertProtoEquals(
"""
id: 1
name: 'run'
properties {
key: "pipeline_name"
value: STRING
}
properties {
key: "run_id"
value: STRING
}
""", context_type)
[context] = m.store.get_contexts_by_id([cid1])
self.assertProtoEquals(
"""
id: 1
type_id: 1
name: 'my_pipeline.my_run_id'
properties {
key: "pipeline_name"
value {
string_value: "my_pipeline"
}
}
properties {
key: "run_id"
value {
string_value: "my_run_id"
}
}
""", context)
self.assertEqual(
cid1, m.register_run_context_if_not_exists(self._pipeline_info))
self.assertEqual(cid1, m._get_run_context_id(self._pipeline_info))
self.assertEqual(cid2, m._get_run_context_id(self._pipeline_info2))
self.assertEqual(cid3, m._get_run_context_id(self._pipeline_info3))
self.assertEqual(None, m._get_run_context_id(self._pipeline_info4))
if __name__ == '__main__':
tf.test.main()
| 35.52 | 80 | 0.635385 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import mock
import tensorflow as tf
from ml_metadata.proto import metadata_store_pb2
from tfx import types
from tfx.orchestration import data_types
from tfx.orchestration import metadata
from tfx.types import standard_artifacts
from tfx.types.artifact import ArtifactState
class MetadataTest(tf.test.TestCase):
def setUp(self):
super(MetadataTest, self).setUp()
self._connection_config = metadata_store_pb2.ConnectionConfig()
self._connection_config.sqlite.SetInParent()
self._component_info = data_types.ComponentInfo(
component_type='a.b.c', component_id='my_component')
self._component_info2 = data_types.ComponentInfo(
component_type='a.b.d', component_id='my_component_2')
self._pipeline_info = data_types.PipelineInfo(
pipeline_name='my_pipeline', pipeline_root='/tmp', run_id='my_run_id')
self._pipeline_info2 = data_types.PipelineInfo(
pipeline_name='my_pipeline', pipeline_root='/tmp', run_id='my_run_id2')
self._pipeline_info3 = data_types.PipelineInfo(
pipeline_name='my_pipeline2', pipeline_root='/tmp', run_id='my_run_id')
self._pipeline_info4 = data_types.PipelineInfo(
pipeline_name='my_pipeline2', pipeline_root='/tmp', run_id='my_run_id2')
def testEmptyArtifact(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
m.publish_artifacts([])
eid = m.register_execution(
exec_properties={},
pipeline_info=self._pipeline_info,
component_info=self._component_info)
m.publish_execution(eid, {}, {})
[execution] = m.store.get_executions_by_id([eid])
self.assertProtoEquals(
"""
id: 1
type_id: 1
properties {
key: "state"
value {
string_value: "complete"
}
}
properties {
key: "pipeline_name"
value {
string_value: "my_pipeline"
}
}
properties {
key: "pipeline_root"
value {
string_value: "/tmp"
}
}
properties {
key: "run_id"
value {
string_value: "my_run_id"
}
}
properties {
key: "component_id"
value {
string_value: "my_component"
}
}""", execution)
def testArtifact(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
self.assertListEqual([], m.get_all_artifacts())
artifact = standard_artifacts.Examples()
artifact.uri = 'uri'
m.publish_artifacts([artifact])
[artifact] = m.store.get_artifacts()
self.assertProtoEquals(
"""id: 1
type_id: 1
uri: "uri"
properties {
key: "split"
value {
string_value: ""
}
}
properties {
key: "state"
value {
string_value: "published"
}
}
properties {
key: "type_name"
value {
string_value: "ExamplesPath"
}
}""", artifact)
self.assertListEqual([artifact], m.get_all_artifacts())
self.assertListEqual([artifact], m.get_artifacts_by_uri('uri'))
self.assertListEqual([artifact], m.get_artifacts_by_type('ExamplesPath'))
m.check_artifact_state(artifact, ArtifactState.PUBLISHED)
m.update_artifact_state(artifact, ArtifactState.DELETED)
m.check_artifact_state(artifact, ArtifactState.DELETED)
self.assertRaises(RuntimeError, m.check_artifact_state, artifact,
ArtifactState.PUBLISHED)
def testExecution(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
context_id = m.register_run_context_if_not_exists(self._pipeline_info)
exec_properties = {'arg_one': 1}
eid = m.register_execution(
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=self._component_info,
run_context_id=context_id)
[execution] = m.store.get_executions_by_context(context_id)
self.assertProtoEquals(
"""
id: 1
type_id: 2
properties {
key: "state"
value {
string_value: "new"
}
}
properties {
key: "pipeline_name"
value {
string_value: "my_pipeline"
}
}
properties {
key: "pipeline_root"
value {
string_value: "/tmp"
}
}
properties {
key: "run_id"
value {
string_value: "my_run_id"
}
}
properties {
key: "component_id"
value {
string_value: "my_component"
}
}
properties {
key: "arg_one"
value {
string_value: "1"
}
}""", execution)
input_artifact = standard_artifacts.Examples()
m.publish_artifacts([input_artifact])
output_artifact = standard_artifacts.Examples()
input_dict = {'input': [input_artifact]}
output_dict = {'output': [output_artifact]}
m.publish_execution(eid, input_dict, output_dict)
self.assertEqual(ArtifactState.PUBLISHED, output_artifact.state)
[execution] = m.store.get_executions_by_id([eid])
self.assertEqual(metadata.EXECUTION_STATE_COMPLETE,
execution.properties['state'].string_value)
events = m.store.get_events_by_execution_ids([eid])
self.assertEqual(2, len(events))
self.assertEqual(input_artifact.id, events[0].artifact_id)
self.assertEqual(metadata_store_pb2.Event.INPUT, events[0].type)
self.assertProtoEquals(
"""
steps {
key: "input"
}
steps {
index: 0
}""", events[0].path)
self.assertEqual(output_artifact.id, events[1].artifact_id)
self.assertEqual(metadata_store_pb2.Event.OUTPUT, events[1].type)
self.assertProtoEquals(
"""
steps {
key: "output"
}
steps {
index: 0
}""", events[1].path)
def testFetchPreviousResult(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
exec_properties = {'log_root': 'path'}
eid = m.register_execution(
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=self._component_info)
input_artifact = standard_artifacts.Examples()
m.publish_artifacts([input_artifact])
output_artifact = standard_artifacts.Examples()
input_artifacts = {'input': [input_artifact]}
output_artifacts = {'output': [output_artifact]}
m.publish_execution(eid, input_artifacts, output_artifacts)
self.assertEqual(
None,
m.previous_execution(
input_artifacts=input_artifacts,
exec_properties={},
pipeline_info=self._pipeline_info,
component_info=self._component_info))
self.assertEqual(
None,
m.previous_execution(
input_artifacts={},
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=self._component_info))
self.assertEqual(
None,
m.previous_execution(
input_artifacts=input_artifacts,
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=data_types.ComponentInfo(
component_id='unique', component_type='a.b.c')))
self.assertEqual(
eid,
m.previous_execution(
input_artifacts=input_artifacts,
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=self._component_info))
new_output_artifact = standard_artifacts.Examples()
self.assertNotEqual(ArtifactState.PUBLISHED,
new_output_artifact.state)
new_output_dict = {'output': [new_output_artifact]}
updated_output_dict = m.fetch_previous_result_artifacts(
new_output_dict, eid)
previous_artifact = output_artifacts['output'][-1].artifact
current_artifact = updated_output_dict['output'][-1].artifact
self.assertEqual(ArtifactState.PUBLISHED,
current_artifact.properties['state'].string_value)
self.assertEqual(previous_artifact.id, current_artifact.id)
self.assertEqual(previous_artifact.type_id, current_artifact.type_id)
def testGetCachedExecutionIds(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
mock_store = mock.Mock()
mock_store.get_events_by_execution_ids.side_effect = [
[
metadata_store_pb2.Event(
artifact_id=1, type=metadata_store_pb2.Event.INPUT)
],
[
metadata_store_pb2.Event(
artifact_id=1, type=metadata_store_pb2.Event.INPUT),
metadata_store_pb2.Event(
artifact_id=2, type=metadata_store_pb2.Event.INPUT),
metadata_store_pb2.Event(
artifact_id=3, type=metadata_store_pb2.Event.INPUT)
],
[
metadata_store_pb2.Event(
artifact_id=1, type=metadata_store_pb2.Event.INPUT),
metadata_store_pb2.Event(
artifact_id=2, type=metadata_store_pb2.Event.INPUT),
],
]
m._store = mock_store
input_one = standard_artifacts.Examples()
input_one.id = 1
input_two = standard_artifacts.Examples()
input_two.id = 2
input_dict = {
'input_one': [input_one],
'input_two': [input_two],
}
self.assertEqual(1, m._get_cached_execution_id(input_dict, [3, 2, 1]))
def testSearchArtifacts(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
exec_properties = {'log_root': 'path'}
eid = m.register_execution(
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=self._component_info)
input_artifact = standard_artifacts.Examples()
m.publish_artifacts([input_artifact])
output_artifact = types.Artifact(type_name='MyOutputArtifact')
output_artifact.uri = 'my/uri'
input_dict = {'input': [input_artifact]}
output_dict = {'output': [output_artifact]}
m.publish_execution(eid, input_dict, output_dict)
[artifact] = m.search_artifacts(
artifact_name='output',
pipeline_name=self._pipeline_info.pipeline_name,
run_id=self._pipeline_info.run_id,
producer_component_id=self._component_info.component_id)
self.assertEqual(artifact.uri, output_artifact.uri)
def testPublishSkippedExecution(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
exec_properties = {'log_root': 'path'}
eid = m.register_execution(
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=self._component_info)
input_artifact = standard_artifacts.Examples()
m.publish_artifacts([input_artifact])
output_artifact = types.Artifact(type_name='MyOutputArtifact')
output_artifact.uri = 'my/uri'
[published_artifact] = m.publish_artifacts([output_artifact])
output_artifact.artifact = published_artifact
input_dict = {'input': [input_artifact]}
output_dict = {'output': [output_artifact]}
m.publish_execution(
eid, input_dict, output_dict, state=metadata.EXECUTION_STATE_CACHED)
def testGetExecutionStates(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
context_id = m.register_run_context_if_not_exists(self._pipeline_info)
context_id2 = m.register_run_context_if_not_exists(self._pipeline_info2)
self.assertListEqual(
[self._pipeline_info.run_id, self._pipeline_info2.run_id],
m.get_all_runs('my_pipeline'))
eid = m.register_execution(
exec_properties={},
pipeline_info=self._pipeline_info,
component_info=self._component_info,
run_context_id=context_id)
m.publish_execution(eid, {}, {})
m.register_execution(
exec_properties={},
pipeline_info=self._pipeline_info,
component_info=self._component_info2,
run_context_id=context_id)
m.register_execution(
exec_properties={},
pipeline_info=self._pipeline_info2,
component_info=self._component_info,
run_context_id=context_id2)
states = m.get_execution_states(self._pipeline_info)
self.assertDictEqual(
{
self._component_info.component_id:
metadata.EXECUTION_STATE_COMPLETE,
self._component_info2.component_id:
metadata.EXECUTION_STATE_NEW,
}, states)
def testContext(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
cid1 = m.register_run_context_if_not_exists(self._pipeline_info)
cid2 = m.register_run_context_if_not_exists(self._pipeline_info2)
cid3 = m.register_run_context_if_not_exists(self._pipeline_info3)
context_type = m.store.get_context_type('run')
self.assertProtoEquals(
"""
id: 1
name: 'run'
properties {
key: "pipeline_name"
value: STRING
}
properties {
key: "run_id"
value: STRING
}
""", context_type)
[context] = m.store.get_contexts_by_id([cid1])
self.assertProtoEquals(
"""
id: 1
type_id: 1
name: 'my_pipeline.my_run_id'
properties {
key: "pipeline_name"
value {
string_value: "my_pipeline"
}
}
properties {
key: "run_id"
value {
string_value: "my_run_id"
}
}
""", context)
self.assertEqual(
cid1, m.register_run_context_if_not_exists(self._pipeline_info))
self.assertEqual(cid1, m._get_run_context_id(self._pipeline_info))
self.assertEqual(cid2, m._get_run_context_id(self._pipeline_info2))
self.assertEqual(cid3, m._get_run_context_id(self._pipeline_info3))
self.assertEqual(None, m._get_run_context_id(self._pipeline_info4))
if __name__ == '__main__':
tf.test.main()
| true | true |
1c4576f26d0bd1dafc5d7d1a3c7a8a3a5b06fec8 | 2,734 | py | Python | unsupNFP/train.py | pfnet-research/hierarchical-molecular-learning | 2c88a4737c9268e691e97d92bf2e9e2c7e2c1790 | [
"MIT"
] | 13 | 2018-06-20T11:16:27.000Z | 2020-06-23T18:56:20.000Z | unsupNFP/train.py | pfnet-research/hierarchical-molecular-learning | 2c88a4737c9268e691e97d92bf2e9e2c7e2c1790 | [
"MIT"
] | null | null | null | unsupNFP/train.py | pfnet-research/hierarchical-molecular-learning | 2c88a4737c9268e691e97d92bf2e9e2c7e2c1790 | [
"MIT"
] | 4 | 2019-09-14T23:53:44.000Z | 2021-12-09T23:36:27.000Z | import argparse
from chainer import optimizers
from chainer import serializers
import numpy as np
import model
import load_mutag
import load_nci1
import classification
n_epoch = 200
n_parts = 5
parser = argparse.ArgumentParser()
parser.add_argument('dataset', type=str, choices=('mutag', 'ptc'))
args = parser.parse_args()
if args.dataset == 'mutag':
mutag_file_name = "MUTAG.mat"
graphs = load_mutag.load_whole_data('MUTAG.mat')
MAX_EDGE_TYPE = load_mutag.MAX_EDGE_TYPE
MAX_NUMBER_ATOM = load_mutag.MAX_NUMBER_ATOM
elif args.dataset == 'ptc':
smile_filename = 'corrected_smiles.txt'
result_filename = 'corrected_results.txt'
graphs = load_nci1.load_ptc(smile_filename, result_filename)
MAX_EDGE_TYPE = load_nci1.MAX_EDGE_TYPE
MAX_NUMBER_ATOM = load_nci1.MAX_NUMBER_ATOM
else:
raise ValueError('Invalid dataset type: {}'.format(args.dataset))
model.MAX_EDGE_TYPE = MAX_EDGE_TYPE
model.MAX_NUMBER_ATOM = MAX_NUMBER_ATOM
indexs_test = np.random.permutation(len(graphs))
n_graphs = len(graphs)
print("num of graphs:", n_graphs)
rep_dim = 101
max_degree = 5
num_levels = 6
neg_size = 10
batchsize = 100
hid_dim = 100
out_dim = 2
softmax = model.SoftmaxCrossEntropy(rep_dim, MAX_NUMBER_ATOM)
print("[CONFIG: representation dim =", rep_dim, "]")
atom2vec = model.Atom2vec(MAX_NUMBER_ATOM, rep_dim, max_degree, softmax)
model = model.Mol2Vec(len(graphs), rep_dim, max_degree,
num_levels, neg_size, atom2vec)
optimizer = optimizers.Adam()
optimizer.setup(model)
print("start training")
for epoch in range(1, n_epoch + 1):
print("epoch:", epoch)
indexes = np.random.permutation(len(graphs))
sum_loss = 0
for i in range(0, n_graphs, batchsize):
maxid = min(i + batchsize, n_graphs)
ids = indexes[i:maxid]
graphids = []
adjs = []
atom_arrays = []
for id in indexes[i:maxid]:
graphids.append(graphs[id][0])
# index 1 and 2 need to be changed for MUTAG or NCI1 datasets
atom_arrays.append(graphs[id][1])
adjs.append(graphs[id][2])
graphids = np.asarray(graphids)
adjs = np.asarray(adjs, dtype=np.float32)
atom_arrays = np.asarray(atom_arrays, dtype=np.int32)
optimizer.update(model, graphids, adjs, atom_arrays)
sum_loss += float(model.loss.data) * len(graphids)
print("-----", float(model.loss.data) * len(graphids))
print("loss: ", sum_loss / n_graphs)
serializers.save_npz(str(rep_dim) + "_model_ptc.npz", model)
# after each epcoh, check result
if epoch % 10 == 0:
classification.MLPClassifier(model, graphs, indexs_test,
rep_dim, batchsize)
| 29.717391 | 73 | 0.685077 | import argparse
from chainer import optimizers
from chainer import serializers
import numpy as np
import model
import load_mutag
import load_nci1
import classification
n_epoch = 200
n_parts = 5
parser = argparse.ArgumentParser()
parser.add_argument('dataset', type=str, choices=('mutag', 'ptc'))
args = parser.parse_args()
if args.dataset == 'mutag':
mutag_file_name = "MUTAG.mat"
graphs = load_mutag.load_whole_data('MUTAG.mat')
MAX_EDGE_TYPE = load_mutag.MAX_EDGE_TYPE
MAX_NUMBER_ATOM = load_mutag.MAX_NUMBER_ATOM
elif args.dataset == 'ptc':
smile_filename = 'corrected_smiles.txt'
result_filename = 'corrected_results.txt'
graphs = load_nci1.load_ptc(smile_filename, result_filename)
MAX_EDGE_TYPE = load_nci1.MAX_EDGE_TYPE
MAX_NUMBER_ATOM = load_nci1.MAX_NUMBER_ATOM
else:
raise ValueError('Invalid dataset type: {}'.format(args.dataset))
model.MAX_EDGE_TYPE = MAX_EDGE_TYPE
model.MAX_NUMBER_ATOM = MAX_NUMBER_ATOM
indexs_test = np.random.permutation(len(graphs))
n_graphs = len(graphs)
print("num of graphs:", n_graphs)
rep_dim = 101
max_degree = 5
num_levels = 6
neg_size = 10
batchsize = 100
hid_dim = 100
out_dim = 2
softmax = model.SoftmaxCrossEntropy(rep_dim, MAX_NUMBER_ATOM)
print("[CONFIG: representation dim =", rep_dim, "]")
atom2vec = model.Atom2vec(MAX_NUMBER_ATOM, rep_dim, max_degree, softmax)
model = model.Mol2Vec(len(graphs), rep_dim, max_degree,
num_levels, neg_size, atom2vec)
optimizer = optimizers.Adam()
optimizer.setup(model)
print("start training")
for epoch in range(1, n_epoch + 1):
print("epoch:", epoch)
indexes = np.random.permutation(len(graphs))
sum_loss = 0
for i in range(0, n_graphs, batchsize):
maxid = min(i + batchsize, n_graphs)
ids = indexes[i:maxid]
graphids = []
adjs = []
atom_arrays = []
for id in indexes[i:maxid]:
graphids.append(graphs[id][0])
atom_arrays.append(graphs[id][1])
adjs.append(graphs[id][2])
graphids = np.asarray(graphids)
adjs = np.asarray(adjs, dtype=np.float32)
atom_arrays = np.asarray(atom_arrays, dtype=np.int32)
optimizer.update(model, graphids, adjs, atom_arrays)
sum_loss += float(model.loss.data) * len(graphids)
print("-----", float(model.loss.data) * len(graphids))
print("loss: ", sum_loss / n_graphs)
serializers.save_npz(str(rep_dim) + "_model_ptc.npz", model)
if epoch % 10 == 0:
classification.MLPClassifier(model, graphs, indexs_test,
rep_dim, batchsize)
| true | true |
1c457730cf5448e958549f79c322f8bde85c2542 | 13,955 | py | Python | stumpy/aamp_ostinato.py | alvii147/stumpy | 6dacfcf35ce03255951d70e5dd2f8b3f4e20a27f | [
"BSD-3-Clause"
] | 2 | 2022-01-25T22:38:56.000Z | 2022-01-31T10:59:02.000Z | stumpy/aamp_ostinato.py | vishalbelsare/stumpy | 5f192a0a41fbb44f144cc4b676d525f19aaeaa98 | [
"BSD-3-Clause"
] | null | null | null | stumpy/aamp_ostinato.py | vishalbelsare/stumpy | 5f192a0a41fbb44f144cc4b676d525f19aaeaa98 | [
"BSD-3-Clause"
] | null | null | null | # STUMPY
# Copyright 2019 TD Ameritrade. Released under the terms of the 3-Clause BSD license.
# STUMPY is a trademark of TD Ameritrade IP Company, Inc. All rights reserved.
import numpy as np
from . import core, aamp, aamped
def _aamp_across_series_nearest_neighbors(
Ts, Ts_idx, subseq_idx, m, Ts_squared, Ts_subseq_isfinite
):
"""
For multiple time series find, per individual time series, the subsequences closest
to a given query.
Parameters
----------
Ts : list
A list of time series for which to find the nearest neighbor subsequences that
are closest to the query subsequence `Ts[Ts_idx][subseq_idx : subseq_idx + m]`
Ts_idx : int
The index of time series in `Ts` which contains the query subsequence
subseq_idx : int
The subsequence index in the time series `Ts[Ts_idx]` that contains the query
subsequence
m : int
Window size
Ts_squared : list
A list of rolling window `T_squared` for each time series in `Ts`
Ts_subseq_isfinite : list
A list of rolling window `T_subseq_isfinite` for each time series in `Ts`
Returns
-------
nns_radii : numpy.ndarray
Radii to subsequences in each time series of `Ts` that are closest to the
query subsequence `Ts[Ts_idx][subseq_idx : subseq_idx + m]`
nns_subseq_idx : numpy.ndarray
Indices to subsequences in each time series of `Ts` that are closest to the
query subsequence `Ts[Ts_idx][subseq_idx : subseq_idx + m]`
"""
k = len(Ts)
Q = Ts[Ts_idx][subseq_idx : subseq_idx + m]
Q_squared = np.sum(Q * Q)
nns_radii = np.zeros(k, dtype=np.float64)
nns_subseq_idx = np.zeros(k, dtype=np.int64)
for i in range(k):
if np.any(~np.isfinite(Q)): # pragma: no cover
distance_profile = np.empty(Ts[i].shape[0] - m + 1, dtype=np.float64)
distance_profile[:] = np.inf
else:
QT = core.sliding_dot_product(
Ts[Ts_idx][subseq_idx : subseq_idx + m], Ts[i]
)
distance_profile = core._mass_absolute(Q_squared, Ts_squared[i], QT)
distance_profile[~Ts_subseq_isfinite[i]] = np.inf
nns_subseq_idx[i] = np.argmin(distance_profile)
nns_radii[i] = distance_profile[nns_subseq_idx[i]]
return nns_radii, nns_subseq_idx
def _get_aamp_central_motif(
Ts, bsf_radius, bsf_Ts_idx, bsf_subseq_idx, m, Ts_squared, Ts_subseq_isfinite
):
"""
Compare subsequences with the same radius and return the most central motif (i.e.,
having the smallest average nearest neighbor radii)
Parameters
----------
Ts : list
A list of time series for which to find the most central motif
bsf_radius : float
Best-so-far sradius found by a consensus search algorithm
bsf_Ts_idx : int
The index of time series in `Ts` where the `bsf_radius` was first observed
bsf_subseq_idx : int
The subsequence index in `Ts[bsf_Ts_idx]` that has radius `bsf_radius`
m : int
Window size
Ts_squared : list
A list of rolling window `T_squared` for each time series in `Ts`
Ts_subseq_isfinite : list
A list of rolling window `T_subseq_isfinite` for each time series in `Ts`
Returns
-------
bsf_radius : float
The updated best-so-far radius of the most central consensus motif
bsf_Ts_idx : int
The updated index of time series in `Ts` which contains the most central
consensus motif
bsf_subseq_idx : int
The updated subsequence index in the time series `Ts[bsf_Ts_idx]` that contains
the most central consensus motif
"""
bsf_nns_radii, bsf_nns_subseq_idx = _aamp_across_series_nearest_neighbors(
Ts, bsf_Ts_idx, bsf_subseq_idx, m, Ts_squared, Ts_subseq_isfinite
)
bsf_nns_mean_radii = bsf_nns_radii.mean()
candidate_nns_Ts_idx = np.flatnonzero(np.isclose(bsf_nns_radii, bsf_radius))
candidate_nns_subseq_idx = bsf_nns_subseq_idx[candidate_nns_Ts_idx]
for Ts_idx, subseq_idx in zip(candidate_nns_Ts_idx, candidate_nns_subseq_idx):
candidate_nns_radii, _ = _aamp_across_series_nearest_neighbors(
Ts, Ts_idx, subseq_idx, m, Ts_squared, Ts_subseq_isfinite
)
if (
np.isclose(candidate_nns_radii.max(), bsf_radius)
and candidate_nns_radii.mean() < bsf_nns_mean_radii
):
bsf_Ts_idx = Ts_idx
bsf_subseq_idx = subseq_idx
bsf_nns_mean_radii = candidate_nns_radii.mean()
return bsf_radius, bsf_Ts_idx, bsf_subseq_idx
def _aamp_ostinato(
Ts,
m,
Ts_squared,
Ts_subseq_isfinite,
dask_client=None,
device_id=None,
mp_func=aamp,
):
"""
Find the consensus motif amongst a list of time series
Parameters
----------
Ts : list
A list of time series for which to find the consensus motif
m : int
Window size
Ts_squared : list
A list of rolling window `T_squared` for each time series in `Ts`
Ts_subseq_isfinite : list
A list of rolling window `T_subseq_isfinite` for each time series in `Ts`
dask_client : client, default None
A Dask Distributed client that is connected to a Dask scheduler and
Dask workers. Setting up a Dask distributed cluster is beyond the
scope of this library. Please refer to the Dask Distributed
documentation.
device_id : int or list, default None
The (GPU) device number to use. The default value is `0`. A list of
valid device ids (int) may also be provided for parallel GPU-STUMP
computation. A list of all valid device ids can be obtained by
executing `[device.id for device in numba.cuda.list_devices()]`.
mp_func : object, default stump
Specify a custom matrix profile function to use for computing matrix profiles
Returns
-------
bsf_radius : float
The (best-so-far) Radius of the consensus motif
bsf_Ts_idx : int
The time series index in `Ts` which contains the consensus motif
bsf_subseq_idx : int
The subsequence index within time series `Ts[bsf_Ts_idx]` the contains the
consensus motif
Notes
-----
`DOI: 10.1109/ICDM.2019.00140 \
<https://www.cs.ucr.edu/~eamonn/consensus_Motif_ICDM_Long_version.pdf>`__
See Table 2
The ostinato algorithm proposed in the paper finds the best radius
in `Ts`. Intuitively, the radius is the minimum distance of a
subsequence to encompass at least one nearest neighbor subsequence
from all other time series. The best radius in `Ts` is the minimum
radius amongst all radii. Some data sets might contain multiple
subsequences which have the same optimal radius.
The greedy Ostinato algorithm only finds one of them, which might
not be the most central motif. The most central motif amongst the
subsequences with the best radius is the one with the smallest mean
distance to nearest neighbors in all other time series. To find this
central motif it is necessary to search the subsequences with the
best radius via `stumpy.ostinato._get_central_motif`
"""
bsf_radius = np.inf
bsf_Ts_idx = 0
bsf_subseq_idx = 0
partial_mp_func = core._get_partial_mp_func(
mp_func, dask_client=dask_client, device_id=device_id
)
k = len(Ts)
for j in range(k):
if j < (k - 1):
h = j + 1
else:
h = 0
mp = partial_mp_func(Ts[j], m, Ts[h], ignore_trivial=False)
si = np.argsort(mp[:, 0])
for q in si:
Q = Ts[j][q : q + m]
Q_squared = np.sum(Q * Q)
radius = mp[q, 0]
if radius >= bsf_radius:
break
for i in range(k):
if i != j and i != h:
if np.any(~np.isfinite(Q)): # pragma: no cover
distance_profile = np.empty(Ts[i].shape[0] - m + 1)
distance_profile[:] = np.inf
else:
QT = core.sliding_dot_product(Ts[j][q : q + m], Ts[i])
distance_profile = core._mass_absolute(
Q_squared, Ts_squared[i], QT
)
distance_profile[~Ts_subseq_isfinite[i]] = np.inf
radius = np.max((radius, np.min(distance_profile)))
if radius >= bsf_radius:
break
if radius < bsf_radius:
bsf_radius, bsf_Ts_idx, bsf_subseq_idx = radius, j, q
return bsf_radius, bsf_Ts_idx, bsf_subseq_idx
def aamp_ostinato(Ts, m):
"""
Find the non-normalized (i.e., without z-normalization) consensus motif of multiple
time series
This is a wrapper around the vanilla version of the ostinato algorithm
which finds the best radius and a helper function that finds the most
central conserved motif.
Parameters
----------
Ts : list
A list of time series for which to find the most central consensus motif
m : int
Window size
Returns
-------
central_radius : float
Radius of the most central consensus motif
central_Ts_idx : int
The time series index in `Ts` which contains the most central consensus motif
central_subseq_idx : int
The subsequence index within time series `Ts[central_motif_Ts_idx]` the contains
most central consensus motif
Notes
-----
`DOI: 10.1109/ICDM.2019.00140 \
<https://www.cs.ucr.edu/~eamonn/consensus_Motif_ICDM_Long_version.pdf>`__
See Table 2
The ostinato algorithm proposed in the paper finds the best radius
in `Ts`. Intuitively, the radius is the minimum distance of a
subsequence to encompass at least one nearest neighbor subsequence
from all other time series. The best radius in `Ts` is the minimum
radius amongst all radii. Some data sets might contain multiple
subsequences which have the same optimal radius.
The greedy Ostinato algorithm only finds one of them, which might
not be the most central motif. The most central motif amongst the
subsequences with the best radius is the one with the smallest mean
distance to nearest neighbors in all other time series. To find this
central motif it is necessary to search the subsequences with the
best radius via `stumpy.ostinato._get_central_motif`
"""
Ts_squared = [None] * len(Ts)
Ts_subseq_isfinite = [None] * len(Ts)
for i, T in enumerate(Ts):
Ts[i], Ts_subseq_isfinite[i] = core.preprocess_non_normalized(T, m)
Ts_squared[i] = np.sum(core.rolling_window(Ts[i] * Ts[i], m), axis=1)
bsf_radius, bsf_Ts_idx, bsf_subseq_idx = _aamp_ostinato(
Ts, m, Ts_squared, Ts_subseq_isfinite
)
(central_radius, central_Ts_idx, central_subseq_idx,) = _get_aamp_central_motif(
Ts, bsf_radius, bsf_Ts_idx, bsf_subseq_idx, m, Ts_squared, Ts_subseq_isfinite
)
return central_radius, central_Ts_idx, central_subseq_idx
def aamp_ostinatoed(dask_client, Ts, m):
"""
Find the non-normalized (i.e., without z-normalization) consensus motif of multiple
time series with a distributed dask cluster
This is a wrapper around the vanilla version of the ostinato algorithm
which finds the best radius and a helper function that finds the most
central conserved motif.
Parameters
----------
dask_client : client
A Dask Distributed client that is connected to a Dask scheduler and
Dask workers. Setting up a Dask distributed cluster is beyond the
scope of this library. Please refer to the Dask Distributed
documentation.
Ts : list
A list of time series for which to find the most central consensus motif
m : int
Window size
Returns
-------
central_radius : float
Radius of the most central consensus motif
central_Ts_idx : int
The time series index in `Ts` which contains the most central consensus motif
central_subseq_idx : int
The subsequence index within time series `Ts[central_motif_Ts_idx]` the contains
most central consensus motif
Notes
-----
`DOI: 10.1109/ICDM.2019.00140 \
<https://www.cs.ucr.edu/~eamonn/consensus_Motif_ICDM_Long_version.pdf>`__
See Table 2
The ostinato algorithm proposed in the paper finds the best radius
in `Ts`. Intuitively, the radius is the minimum distance of a
subsequence to encompass at least one nearest neighbor subsequence
from all other time series. The best radius in `Ts` is the minimum
radius amongst all radii. Some data sets might contain multiple
subsequences which have the same optimal radius.
The greedy Ostinato algorithm only finds one of them, which might
not be the most central motif. The most central motif amongst the
subsequences with the best radius is the one with the smallest mean
distance to nearest neighbors in all other time series. To find this
central motif it is necessary to search the subsequences with the
best radius via `stumpy.ostinato._get_central_motif`
"""
Ts_squared = [None] * len(Ts)
Ts_subseq_isfinite = [None] * len(Ts)
for i, T in enumerate(Ts):
Ts[i], Ts_subseq_isfinite[i] = core.preprocess_non_normalized(T, m)
Ts_squared[i] = np.sum(core.rolling_window(Ts[i] * Ts[i], m), axis=1)
bsf_radius, bsf_Ts_idx, bsf_subseq_idx = _aamp_ostinato(
Ts, m, Ts_squared, Ts_subseq_isfinite, dask_client=dask_client, mp_func=aamped
)
(central_radius, central_Ts_idx, central_subseq_idx,) = _get_aamp_central_motif(
Ts, bsf_radius, bsf_Ts_idx, bsf_subseq_idx, m, Ts_squared, Ts_subseq_isfinite
)
return central_radius, central_Ts_idx, central_subseq_idx
| 35.874036 | 88 | 0.668649 |
import numpy as np
from . import core, aamp, aamped
def _aamp_across_series_nearest_neighbors(
Ts, Ts_idx, subseq_idx, m, Ts_squared, Ts_subseq_isfinite
):
k = len(Ts)
Q = Ts[Ts_idx][subseq_idx : subseq_idx + m]
Q_squared = np.sum(Q * Q)
nns_radii = np.zeros(k, dtype=np.float64)
nns_subseq_idx = np.zeros(k, dtype=np.int64)
for i in range(k):
if np.any(~np.isfinite(Q)):
distance_profile = np.empty(Ts[i].shape[0] - m + 1, dtype=np.float64)
distance_profile[:] = np.inf
else:
QT = core.sliding_dot_product(
Ts[Ts_idx][subseq_idx : subseq_idx + m], Ts[i]
)
distance_profile = core._mass_absolute(Q_squared, Ts_squared[i], QT)
distance_profile[~Ts_subseq_isfinite[i]] = np.inf
nns_subseq_idx[i] = np.argmin(distance_profile)
nns_radii[i] = distance_profile[nns_subseq_idx[i]]
return nns_radii, nns_subseq_idx
def _get_aamp_central_motif(
Ts, bsf_radius, bsf_Ts_idx, bsf_subseq_idx, m, Ts_squared, Ts_subseq_isfinite
):
bsf_nns_radii, bsf_nns_subseq_idx = _aamp_across_series_nearest_neighbors(
Ts, bsf_Ts_idx, bsf_subseq_idx, m, Ts_squared, Ts_subseq_isfinite
)
bsf_nns_mean_radii = bsf_nns_radii.mean()
candidate_nns_Ts_idx = np.flatnonzero(np.isclose(bsf_nns_radii, bsf_radius))
candidate_nns_subseq_idx = bsf_nns_subseq_idx[candidate_nns_Ts_idx]
for Ts_idx, subseq_idx in zip(candidate_nns_Ts_idx, candidate_nns_subseq_idx):
candidate_nns_radii, _ = _aamp_across_series_nearest_neighbors(
Ts, Ts_idx, subseq_idx, m, Ts_squared, Ts_subseq_isfinite
)
if (
np.isclose(candidate_nns_radii.max(), bsf_radius)
and candidate_nns_radii.mean() < bsf_nns_mean_radii
):
bsf_Ts_idx = Ts_idx
bsf_subseq_idx = subseq_idx
bsf_nns_mean_radii = candidate_nns_radii.mean()
return bsf_radius, bsf_Ts_idx, bsf_subseq_idx
def _aamp_ostinato(
Ts,
m,
Ts_squared,
Ts_subseq_isfinite,
dask_client=None,
device_id=None,
mp_func=aamp,
):
bsf_radius = np.inf
bsf_Ts_idx = 0
bsf_subseq_idx = 0
partial_mp_func = core._get_partial_mp_func(
mp_func, dask_client=dask_client, device_id=device_id
)
k = len(Ts)
for j in range(k):
if j < (k - 1):
h = j + 1
else:
h = 0
mp = partial_mp_func(Ts[j], m, Ts[h], ignore_trivial=False)
si = np.argsort(mp[:, 0])
for q in si:
Q = Ts[j][q : q + m]
Q_squared = np.sum(Q * Q)
radius = mp[q, 0]
if radius >= bsf_radius:
break
for i in range(k):
if i != j and i != h:
if np.any(~np.isfinite(Q)):
distance_profile = np.empty(Ts[i].shape[0] - m + 1)
distance_profile[:] = np.inf
else:
QT = core.sliding_dot_product(Ts[j][q : q + m], Ts[i])
distance_profile = core._mass_absolute(
Q_squared, Ts_squared[i], QT
)
distance_profile[~Ts_subseq_isfinite[i]] = np.inf
radius = np.max((radius, np.min(distance_profile)))
if radius >= bsf_radius:
break
if radius < bsf_radius:
bsf_radius, bsf_Ts_idx, bsf_subseq_idx = radius, j, q
return bsf_radius, bsf_Ts_idx, bsf_subseq_idx
def aamp_ostinato(Ts, m):
Ts_squared = [None] * len(Ts)
Ts_subseq_isfinite = [None] * len(Ts)
for i, T in enumerate(Ts):
Ts[i], Ts_subseq_isfinite[i] = core.preprocess_non_normalized(T, m)
Ts_squared[i] = np.sum(core.rolling_window(Ts[i] * Ts[i], m), axis=1)
bsf_radius, bsf_Ts_idx, bsf_subseq_idx = _aamp_ostinato(
Ts, m, Ts_squared, Ts_subseq_isfinite
)
(central_radius, central_Ts_idx, central_subseq_idx,) = _get_aamp_central_motif(
Ts, bsf_radius, bsf_Ts_idx, bsf_subseq_idx, m, Ts_squared, Ts_subseq_isfinite
)
return central_radius, central_Ts_idx, central_subseq_idx
def aamp_ostinatoed(dask_client, Ts, m):
Ts_squared = [None] * len(Ts)
Ts_subseq_isfinite = [None] * len(Ts)
for i, T in enumerate(Ts):
Ts[i], Ts_subseq_isfinite[i] = core.preprocess_non_normalized(T, m)
Ts_squared[i] = np.sum(core.rolling_window(Ts[i] * Ts[i], m), axis=1)
bsf_radius, bsf_Ts_idx, bsf_subseq_idx = _aamp_ostinato(
Ts, m, Ts_squared, Ts_subseq_isfinite, dask_client=dask_client, mp_func=aamped
)
(central_radius, central_Ts_idx, central_subseq_idx,) = _get_aamp_central_motif(
Ts, bsf_radius, bsf_Ts_idx, bsf_subseq_idx, m, Ts_squared, Ts_subseq_isfinite
)
return central_radius, central_Ts_idx, central_subseq_idx
| true | true |
1c4577dd8ddebd2c787183b01c730deb2a42ac5d | 1,252 | py | Python | tests/unit_tests/data/inline_service_integration_test/requester_service.py | ZacharyATanenbaum/service_framework | b5dde4407998350d1b7ad09284110b986fd4e12a | [
"MIT"
] | 1 | 2020-03-20T21:33:56.000Z | 2020-03-20T21:33:56.000Z | tests/unit_tests/data/inline_service_integration_test/requester_service.py | ZacharyATanenbaum/service_framework | b5dde4407998350d1b7ad09284110b986fd4e12a | [
"MIT"
] | 1 | 2020-03-22T03:48:45.000Z | 2020-03-22T03:48:45.000Z | tests/unit_tests/data/inline_service_integration_test/requester_service.py | ZacharyATanenbaum/service_framework | b5dde4407998350d1b7ad09284110b986fd4e12a | [
"MIT"
] | null | null | null | """ File to house a requester service """
from service_framework.utils.logging_utils import get_logger
LOG = get_logger()
def setup_config(config):
"""
Make config arguments the proper type!
"""
LOG.info('Setting up config!')
config['num_req_to_send'] = int(config.get('num_req_to_send', 2))
config['responses_recieved'] = []
return config
def main(to_send, config):
"""
This function is the main entrance into the Requester Service
"""
for num in range(config['num_req_to_send']):
payload = {'to_echo': 'Hello World - ' + str(num)}
LOG.info('Sending payload: %s', payload)
returned = to_send('request', payload)
LOG.info('Got Response: %s', returned)
config['responses_recieved'].append(returned)
LOG.info('GOT ALL RESPONSES')
config_model = {
'required': {
'num_req_to_send': int,
},
'optional': {
'responses_recieved': str,
}
}
connection_models = {
'out': {
'request': {
'connection_type': 'requester',
'required_arguments': {
'to_echo': str,
},
'required_return_arguments': {
'echoed': str,
}
}
}
}
| 22.357143 | 69 | 0.572684 |
from service_framework.utils.logging_utils import get_logger
LOG = get_logger()
def setup_config(config):
LOG.info('Setting up config!')
config['num_req_to_send'] = int(config.get('num_req_to_send', 2))
config['responses_recieved'] = []
return config
def main(to_send, config):
for num in range(config['num_req_to_send']):
payload = {'to_echo': 'Hello World - ' + str(num)}
LOG.info('Sending payload: %s', payload)
returned = to_send('request', payload)
LOG.info('Got Response: %s', returned)
config['responses_recieved'].append(returned)
LOG.info('GOT ALL RESPONSES')
config_model = {
'required': {
'num_req_to_send': int,
},
'optional': {
'responses_recieved': str,
}
}
connection_models = {
'out': {
'request': {
'connection_type': 'requester',
'required_arguments': {
'to_echo': str,
},
'required_return_arguments': {
'echoed': str,
}
}
}
}
| true | true |
1c4577df150e0de677cd366f5bf958d6cbeb0911 | 2,234 | py | Python | src/the_tale/the_tale/game/heroes/conf.py | Alacrate/the-tale | 43b211f3a99e93964e95abc20a8ed649a205ffcf | [
"BSD-3-Clause"
] | 85 | 2017-11-21T12:22:02.000Z | 2022-03-27T23:07:17.000Z | src/the_tale/the_tale/game/heroes/conf.py | Alacrate/the-tale | 43b211f3a99e93964e95abc20a8ed649a205ffcf | [
"BSD-3-Clause"
] | 545 | 2017-11-04T14:15:04.000Z | 2022-03-27T14:19:27.000Z | src/the_tale/the_tale/game/heroes/conf.py | Alacrate/the-tale | 43b211f3a99e93964e95abc20a8ed649a205ffcf | [
"BSD-3-Clause"
] | 45 | 2017-11-11T12:36:30.000Z | 2022-02-25T06:10:44.000Z |
import smart_imports
smart_imports.all()
NAME_REGEX = r'^[\-\ а-яА-Я«»\'ёЁ]+$' if not django_settings.TESTS_RUNNING else r'^[\-\ а-яА-Я«»\'\,ёЁ]+$'
settings = utils_app_settings.app_settings('HEROES',
USE_ABILITY_CHANCE=0.1,
MESSAGES_LOG_LENGTH=10,
DIARY_LOG_LENGTH=50,
MIN_PVP_BATTLES=25,
UI_CACHING_KEY='hero_ui_%d',
# not cache livetime, but time period after setupped ui_caching_started_at
# in which ui_caching is turned on
UI_CACHING_TIME=10 * 60,
# time before caching end, when we send next cache command
UI_CACHING_CONTINUE_TIME=60,
# cache livetime
UI_CACHING_TIMEOUT=60,
# should we dump cached heroes to database
DUMP_CACHED_HEROES=False,
START_ENERGY_BONUS=10,
MAX_HELPS_IN_TURN=10,
NAME_REGEX=NAME_REGEX,
NAME_SYMBOLS_DESCRITION='пробел, -, а-я, А-Я, «», \' ',
NAME_MIN_LENGHT=3,
ABILITIES_RESET_TIMEOUT=datetime.timedelta(days=30),
UNLOAD_TIMEOUT=c.TURN_DELTA * 3,
RARE_OPERATIONS_INTERVAL=1000,
INACTIVE_HERO_DELAY=int(10), # для неактивных героев замедлять время в N раз
TT_DIARY_ENTRY_POINT='http://localhost:10001/',
MAX_HERO_DESCRIPTION_LENGTH=10000,
REMOVE_HERO_DELAY=10*60)
| 44.68 | 120 | 0.389884 |
import smart_imports
smart_imports.all()
NAME_REGEX = r'^[\-\ а-яА-Я«»\'ёЁ]+$' if not django_settings.TESTS_RUNNING else r'^[\-\ а-яА-Я«»\'\,ёЁ]+$'
settings = utils_app_settings.app_settings('HEROES',
USE_ABILITY_CHANCE=0.1,
MESSAGES_LOG_LENGTH=10,
DIARY_LOG_LENGTH=50,
MIN_PVP_BATTLES=25,
UI_CACHING_KEY='hero_ui_%d',
UI_CACHING_TIME=10 * 60,
UI_CACHING_CONTINUE_TIME=60,
UI_CACHING_TIMEOUT=60,
DUMP_CACHED_HEROES=False,
START_ENERGY_BONUS=10,
MAX_HELPS_IN_TURN=10,
NAME_REGEX=NAME_REGEX,
NAME_SYMBOLS_DESCRITION='пробел, -, а-я, А-Я, «», \' ',
NAME_MIN_LENGHT=3,
ABILITIES_RESET_TIMEOUT=datetime.timedelta(days=30),
UNLOAD_TIMEOUT=c.TURN_DELTA * 3,
RARE_OPERATIONS_INTERVAL=1000,
INACTIVE_HERO_DELAY=int(10), # для неактивных героев замедлять время в N раз
TT_DIARY_ENTRY_POINT='http://localhost:10001/',
MAX_HERO_DESCRIPTION_LENGTH=10000,
REMOVE_HERO_DELAY=10*60)
| true | true |
1c4579efb456751f3e85a187b14430807fac4cfc | 1,051 | py | Python | solutions/093.restore-ip-addresses/restore-ip-addresses.py | wangsongiam/leetcode | 96ff21bca1871816ae51fccb1fa13587b378dc50 | [
"MIT"
] | 3 | 2018-11-25T15:19:57.000Z | 2019-09-28T03:01:11.000Z | solutions/093.restore-ip-addresses/restore-ip-addresses.py | casprwang/leetcode | 96ff21bca1871816ae51fccb1fa13587b378dc50 | [
"MIT"
] | null | null | null | solutions/093.restore-ip-addresses/restore-ip-addresses.py | casprwang/leetcode | 96ff21bca1871816ae51fccb1fa13587b378dc50 | [
"MIT"
] | 3 | 2018-02-11T20:23:44.000Z | 2020-06-05T15:39:56.000Z | class Solution:
def restoreIpAddresses(self, s):
"""
:type s: str
:rtype: List[str]
"""
ret = []
def traverse(pos, cnt, tmp):
print(tmp)
nonlocal ret, s
if cnt == 0:
if not s[pos:]:
return
if len(s[pos:]) > 1 and s[pos:][0] == '0':
return
if int(s[pos:]) < 256 and int(s[pos:]) > -1:
ret.append(tmp + s[pos:])
return
if (cnt + 1) * 3 < len(s) - pos:
return
for i in range(1, 4): # 1 2 3
if pos + i >= len(s):
return
if len(s[pos:pos+i]) > 1 and s[pos:pos+i][0] == '0':
continue
if int(s[pos:pos+i]) < 0 or int(s[pos:pos+i]) > 255 and s[pos:pos+i][0] != '0':
continue
traverse(pos + i, cnt - 1, tmp + s[pos:pos+i] + '.')
traverse(0, 3, '')
return ret
| 26.948718 | 95 | 0.358706 | class Solution:
def restoreIpAddresses(self, s):
ret = []
def traverse(pos, cnt, tmp):
print(tmp)
nonlocal ret, s
if cnt == 0:
if not s[pos:]:
return
if len(s[pos:]) > 1 and s[pos:][0] == '0':
return
if int(s[pos:]) < 256 and int(s[pos:]) > -1:
ret.append(tmp + s[pos:])
return
if (cnt + 1) * 3 < len(s) - pos:
return
for i in range(1, 4):
if pos + i >= len(s):
return
if len(s[pos:pos+i]) > 1 and s[pos:pos+i][0] == '0':
continue
if int(s[pos:pos+i]) < 0 or int(s[pos:pos+i]) > 255 and s[pos:pos+i][0] != '0':
continue
traverse(pos + i, cnt - 1, tmp + s[pos:pos+i] + '.')
traverse(0, 3, '')
return ret
| true | true |
1c457a9117673b494d492a9f4ab781bd3957996b | 1,632 | py | Python | Data Scientist Career Path/12. Foundations of Machine Learning Unsupervised Learning/2. KMeans++/1. intro.py | myarist/Codecademy | 2ba0f104bc67ab6ef0f8fb869aa12aa02f5f1efb | [
"MIT"
] | 23 | 2021-06-06T15:35:55.000Z | 2022-03-21T06:53:42.000Z | Data Scientist Career Path/12. Foundations of Machine Learning Unsupervised Learning/2. KMeans++/1. intro.py | shivaniverma1/Data-Scientist | f82939a411484311171465591455880c8e354750 | [
"MIT"
] | null | null | null | Data Scientist Career Path/12. Foundations of Machine Learning Unsupervised Learning/2. KMeans++/1. intro.py | shivaniverma1/Data-Scientist | f82939a411484311171465591455880c8e354750 | [
"MIT"
] | 9 | 2021-06-08T01:32:04.000Z | 2022-03-18T15:38:09.000Z | import codecademylib3_seaborn
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from sklearn.cluster import KMeans
import random
import timeit
mu = 1
std = 0.5
np.random.seed(100)
xs = np.append(np.append(np.append(np.random.normal(0.25,std,100), np.random.normal(0.75,std,100)), np.random.normal(0.25,std,100)), np.random.normal(0.75,std,100))
ys = np.append(np.append(np.append(np.random.normal(0.25,std,100), np.random.normal(0.25,std,100)), np.random.normal(0.75,std,100)), np.random.normal(0.75,std,100))
values = list(zip(xs, ys))
model = KMeans(init='random', n_clusters=2)
results = model.fit_predict(values)
print("The inertia of model that randomly initialized centroids is " + str(model.inertia_))
colors = ['#6400e4', '#ffc740']
plt.subplot(211)
for i in range(2):
points = np.array([values[j] for j in range(len(values)) if results[j] == i])
plt.scatter(points[:, 0], points[:, 1], c=colors[i], alpha=0.6)
plt.title('Codecademy Mobile Feedback - Centroids Initialized Randomly')
plt.xlabel('Learn Python')
plt.ylabel('Learn SQL')
plt.subplot(212)
model = KMeans( n_clusters=2)
results = model.fit_predict(values)
print("The inertia of model that initialized the centroids using KMeans++ is " + str(model.inertia_))
colors = ['#6400e4', '#ffc740']
for i in range(2):
points = np.array([values[j] for j in range(len(values)) if results[j] == i])
plt.scatter(points[:, 0], points[:, 1], c=colors[i], alpha=0.6)
plt.title('Codecademy Mobile Feedback - Centroids Initialized Using KMeans++')
plt.xlabel('Learn Python')
plt.ylabel('Learn SQL')
plt.tight_layout()
plt.show()
| 27.2 | 164 | 0.712623 | import codecademylib3_seaborn
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from sklearn.cluster import KMeans
import random
import timeit
mu = 1
std = 0.5
np.random.seed(100)
xs = np.append(np.append(np.append(np.random.normal(0.25,std,100), np.random.normal(0.75,std,100)), np.random.normal(0.25,std,100)), np.random.normal(0.75,std,100))
ys = np.append(np.append(np.append(np.random.normal(0.25,std,100), np.random.normal(0.25,std,100)), np.random.normal(0.75,std,100)), np.random.normal(0.75,std,100))
values = list(zip(xs, ys))
model = KMeans(init='random', n_clusters=2)
results = model.fit_predict(values)
print("The inertia of model that randomly initialized centroids is " + str(model.inertia_))
colors = ['#6400e4', '#ffc740']
plt.subplot(211)
for i in range(2):
points = np.array([values[j] for j in range(len(values)) if results[j] == i])
plt.scatter(points[:, 0], points[:, 1], c=colors[i], alpha=0.6)
plt.title('Codecademy Mobile Feedback - Centroids Initialized Randomly')
plt.xlabel('Learn Python')
plt.ylabel('Learn SQL')
plt.subplot(212)
model = KMeans( n_clusters=2)
results = model.fit_predict(values)
print("The inertia of model that initialized the centroids using KMeans++ is " + str(model.inertia_))
colors = ['#6400e4', '#ffc740']
for i in range(2):
points = np.array([values[j] for j in range(len(values)) if results[j] == i])
plt.scatter(points[:, 0], points[:, 1], c=colors[i], alpha=0.6)
plt.title('Codecademy Mobile Feedback - Centroids Initialized Using KMeans++')
plt.xlabel('Learn Python')
plt.ylabel('Learn SQL')
plt.tight_layout()
plt.show()
| true | true |
1c457b258f46e8b97aa913da1acea83fba03eaed | 944 | py | Python | rrpython/tests/types/test_str.py | afoolsbag/rrPython | cb4d376b7c02e39d4e88163f272456ebb9eeafc9 | [
"Unlicense"
] | null | null | null | rrpython/tests/types/test_str.py | afoolsbag/rrPython | cb4d376b7c02e39d4e88163f272456ebb9eeafc9 | [
"Unlicense"
] | null | null | null | rrpython/tests/types/test_str.py | afoolsbag/rrPython | cb4d376b7c02e39d4e88163f272456ebb9eeafc9 | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python3
# coding: utf-8
r"""
字符串类型。
::
+-> Container: obj.__contains__(self, item) # item in obj
|
+-> Sized: obj.__len__(self) # len(obj)
|
+-> Iterable: obj.__iter__(self) # iter(obj)
|
+-> Collection
|
| +-> Iterable: obj.__iter__(self) # iter(obj)
| |
+-> Reversible: obj.__reversed__(self) # reversed(obj)
|
+-> Sequence: obj.__getitem__(self, index) # obj[index]
| obj.count(self, value)
| obj.index(self, value, start=0, stop=None)
|
str
Notes
-----
- `字符串类型 <https://docs.python.org/zh-cn/3/library/stdtypes.html#text-sequence-type-str>`_
"""
__version__ = '2020.09.27'
__since__ = '2020.09.24'
__author__ = 'zhengrr'
__license__ = 'UNLICENSE'
from typing import Sequence
def test_issubclass() -> None:
assert issubclass(str, Sequence)
| 23.02439 | 89 | 0.54661 |
__version__ = '2020.09.27'
__since__ = '2020.09.24'
__author__ = 'zhengrr'
__license__ = 'UNLICENSE'
from typing import Sequence
def test_issubclass() -> None:
assert issubclass(str, Sequence)
| true | true |
1c457bc8969abfb76d85c1df6226dd8f0956c564 | 13,447 | py | Python | lime/optics.py | binggu56/lime | 07f60c5105f0bedb11ac389fd671f4f1737a71fe | [
"MIT"
] | 4 | 2020-01-15T11:52:23.000Z | 2021-01-05T19:40:36.000Z | lime/optics.py | binggu56/lime | 07f60c5105f0bedb11ac389fd671f4f1737a71fe | [
"MIT"
] | null | null | null | lime/optics.py | binggu56/lime | 07f60c5105f0bedb11ac389fd671f4f1737a71fe | [
"MIT"
] | 3 | 2020-02-14T07:10:44.000Z | 2021-04-14T17:49:45.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 26 17:26:02 2019
@author: binggu
"""
import numpy as np
from scipy.sparse import lil_matrix, csr_matrix, kron, identity, linalg
from numpy import sqrt, exp, pi
import matplotlib.pyplot as plt
from lime.units import au2k, au2ev
from lime.fft import fft2
from lime.phys import rect, sinc, dag, interval
from lime.style import set_style, imshow
from numba import jit
class Pulse:
def __init__(self, tau, omegac, delay=0., amplitude=0.001, cep=0., beta=0):
"""
(linearly chirped) Gaussian pulse
The positive frequency component reads
E = A/2 * exp(-(t-t0)^2/2/T^2) * exp[-i w (t-t0)(1 + beta (t-t0)/T)]
A: electric field amplitude
T: time delay
sigma: duration
"""
self.delay = delay
self.tau = tau
self.sigma = tau # for compatibility only
self.omegac = omegac # central frequency
self.unit = 'au'
self.amplitude = amplitude
self.cep = cep
self.bandwidth = 1./tau
self.duration = 2. * tau
self.beta = beta # linear chirping rate, dimensionless
self.ndim = 1
def envelop(self, t):
return np.exp(-(t-self.delay)**2/2./self.tau**2)
def spectrum(self, omega):
"""
Fourier transform of the Gaussian pulse
"""
omega0 = self.omegac
T = self.tau
A0 = self.amplitude
beta = self.beta
# if beta is None:
# return A0 * sigma * np.sqrt(2.*np.pi) * np.exp(-(omega-omega0)**2 * sigma**2/2.)
# else:
a = 0.5/T**2 + 1j * beta * omega0/T
return A0 * np.sqrt(np.pi/a) * np.exp(-(omega - omega0)**2/4./a)
def field(self, t):
'''
electric field
'''
return self.efield(t)
def efield(self, t):
"""
Parameters
----------
t : TYPE
DESCRIPTION.
Returns
-------
electric field at time t.
"""
omegac = self.omegac
t0 = self.delay
a = self.amplitude
tau = self.sigma
beta = self.beta
#
# if beta is None:
# return a * np.exp(-(t-delay)**2/2./sigma**2)*np.cos(omegac * (t-delay))
# else:
E = a * np.exp(-(t-t0)**2/2./tau**2)*np.exp(-1j * omegac * (t-t0))\
* np.exp(-1j * beta * omegac * (t-t0)**2/tau)
return E.real
def spectrogram(self, efield):
# from tftb.processing import WignerVilleDistribution
# wvd = WignerVilleDistribution(z)
# w, ts, fs = wvd.run()
return
# def heaviside(x):
# """
# Heaviside function defined in a grid.
# returns 0 if x<=0, and 1 if x>0
# """
# x = np.asarray(x)
# y = np.zeros(x.shape)
# y[x > 0] = 1.0
# return y
class Biphoton:
def __init__(self, omegap, bw, Te, p=None, q=None, phase_matching='sinc'):
"""
Class for entangled photon pair.
Parameters
----------
omegap: float
pump carrier frequency
bw: float
pump bandwidth
p: signal grid
q: idler grid
phase_matching: str
type of phase matching. Default is 'sinc'. A narrowband approxmation is invoked.
"""
self.omegap = omegap
self.pump_bandwidth = bw
self.phase_matching = phase_matching
self.signal_center_frequency = omegap / 2.
self.idler_center_frequency = omegap / 2.
self.entanglement_time = Te
self.jsa = None
self.jta = None
self.p = p
self.q = q
if p is not None:
self.dp = interval(p)
self.dq = interval(q)
self.grid = [p, q]
def pump(self, bandwidth):
"""
pump pulse envelope
Parameters
----------
bandwidth
Returns
-------
"""
alpha = np.sqrt(1. / (np.sqrt(2. * np.pi) * bandwidth)) * \
np.exp(-(p + q) ** 2 / 4. / bandwidth ** 2)
return alpha
def set_grid(self, p, q):
self.p = p
self.q = q
return
def get_jsa(self):
"""
Returns
-------
jsa: array
joint spectral amplitude
"""
p = self.p
q = self.q
bw = self.pump_bandwidth
self.jsa = _jsa(p, q, bw, model=self.phase_matching,
Te=self.entanglement_time)
return self.jsa
def get_jta(self):
"""
Compute the joint temporal amplitude J(ts, ti) over a temporal meshgrid.
Returns
-------
ts: 1d array
signal time grid
ti: 1d array
idler temporal grid
jta: 2d array
joint temporal amplitude
"""
p = self.p
q = self.q
dp = p[1] - p[0]
dq = q[1] - q[0]
if self.jsa is not None:
ts, ti, jta = fft2(self.jsa, dp, dq)
self.jta = jta
return ts, ti, jta
else:
raise ValueError('jsa is None. Call get_jsa() first.')
def jta(self, ts, ti):
return
def detect(self):
"""
two-photon detection amplitude in a temporal grid defined by
the spectral grid.
Returns
-------
t1: 1d array
t2: 1d array
d: detection amplitude in the temporal grid (t1, t2)
"""
if self.jsa is None:
raise ValueError('Please call get_jsa() to compute the jsa first.')
bw = self.pump_bandwidth
omega_s = self.signal_center_frequency
omega_i = self.idler_center_frequency
p = self.p
q = self.q
dp = p[1] - p[0]
dq = q[1] - q[0]
return _detection_amplitude(self.jsa, omega_s, omega_i, dp, dq)
def detect_si(self):
pass
def detect_is(self):
pass
def g2(self):
pass
def bandwidth(self, which='signal'):
"""
Compute the bandwidth of the signal/idler mode
Parameters
----------
which : TYPE, optional
DESCRIPTION. The default is 'signal'.
Returns
-------
None.
"""
p, q = self.p, self.q
dp = interval(p)
dq = interval(q)
f = self.jsa
if which == 'signal':
rho = rdm(f, dq, which='x')
sigma = sqrt(rho.diagonal().dot(p**2) * dp)
elif which == 'idler':
rho = rdm(f, dp, which='y')
sigma = sqrt(rho.diagonal().dot(q**2) * dq)
return sigma
def plt_jsa(self, xlabel=None, ylabel=None, fname=None):
if self.jsa is None:
self.get_jsa()
plt, ax = imshow(self.p * au2ev, self.q * au2ev, np.abs(self.jsa))
if xlabel is not None: ax.set_xlabel(xlabel)
if ylabel is not None: ax.set_xlabel(ylabel)
if fname is not None:
plt.savefig(fname)
plt.show()
return ax
def rdm(self, which='signal'):
if which == 'signal':
return rdm(self.jsa, dy=self.dq, which='x')
def jta(t2, t1, omegap, sigmap, Te):
"""
Analytical form for the joint temporal amplitude for SPDC type-II
two-photon state.
Note that two single-photon electric field prefactors are neglected.
Parameters
----------
t2 : TYPE
DESCRIPTION.
t1 : TYPE
DESCRIPTION.
Returns
-------
None.
"""
omegas = omegap/2.
omegai = omegap/2.
tau = t2 - t1
amp = sqrt(sigmap/Te) * (2.*pi)**(3./4) * \
rect(tau/2./Te) * exp(-sigmap**2*(t1+t2)**2/4.) *\
exp(-1j * omegas * t1 - 1j*omegai * t2)
return amp
def rdm(f, dx=1, dy=1, which='x'):
'''
Compute the reduced density matrix by tracing out the other dof for a 2D wavefunction
Parameters
----------
f : 2D array
2D wavefunction
dx : float, optional
DESCRIPTION. The default is 1.
dy : float, optional
DESCRIPTION. The default is 1.
which: str
indicator which rdm is required. Default is 'x'.
Returns
-------
rho1 : TYPE
Reduced density matrix
'''
if which == 'x':
rho = f.dot(dag(f)) * dy
elif which == 'y':
rho = f.T.dot(np.conj(f)) * dx
else:
raise ValueError('The argument which can only be x or y.')
return rho
def _jsa(p, q, pump_bw, model='sinc', Te=None):
'''
Construct the joint spectral amplitude
Parameters
----------
p : 1d array
signal frequency (detuning from the center frequency)
q : 1d array
idler frequency
pump_bw : float
pump bandwidth
sm : float
1/entanglement time
Te : float
Entanglement time.
Returns
-------
jsa : TYPE
DESCRIPTION.
'''
P, Q = np.meshgrid(p, q)
sigma_plus = pump_bw
sigma_minus = 1. / Te
# pump envelope
alpha = np.sqrt(1. / (np.sqrt(2. * np.pi) * sigma_plus)) * \
np.exp(-(P + Q) ** 2 / 4. / sigma_plus ** 2)
# phase-matching function
if model == 'Gaussian':
beta = np.sqrt(1. / np.sqrt(2. * np.pi) / sigma_minus) * \
np.exp(-(P - Q) ** 2 / 4. / sigma_minus ** 2)
jsa = sqrt(2) * alpha * beta
elif model == 'sinc':
beta = sqrt(0.5 * Te / np.pi) * sinc(Te * (P - Q) / 4.)
# const = np.trace(dag(f).dot(f))*dq*dp
jsa = alpha * beta
return jsa
def hom(p, q, f, tau):
"""
HOM coincidence probability
Parameters
----------
p
q
f
tau
method: str
"brute": directly integrating the JSA over the frequency grid
"schmidt": compute the signal using the Schmidt modes of the
entangled light
nmodes
Returns
-------
prob: 1d array
coincidence probability
"""
dp = interval(p)
dq = interval(q)
P, Q = np.meshgrid(p, q)
prob = np.zeros(len(tau))
for j in range(len(tau)):
t = tau[j]
prob[j] = 0.5 - 0.5 * np.sum(f.conj() * f.T *
np.exp(1j * (P - Q) * t)).real * dq*dp
return prob
def hom_schmidt(p, q, f, method='rdm', nmodes=5):
"""
HOM signal with Schmidt modes
Parameters
----------
p
q
f
nmodes
Returns
-------
"""
dp = interval(p)
dq = interval(q)
# schmidt decompose the JSA
s, phi, chi = schmidt_decompose(f, dp, dq, method=method,
nmodes=nmodes)
prob = np.zeros(len(tau))
for j in range(len(tau)):
t = tau[j]
for a in range(nmodes):
for b in range(nmodes):
tmp1 = (phi[:,a].conj() * chi[:, b] * np.exp(1j * p * t)).sum() * dp
tmp2 = (phi[:,b] * chi[:, a].conj() * np.exp(-1j * q * t)).sum() * dq
prob[j] += -2. * np.real(s[a] * s[b] * tmp1 * tmp2)
prob = 0.5 + prob/4.
return prob
def schmidt_decompose(f, dp, dq, nmodes=5, method='rdm'):
"""
kernel method
f: 2D array,
input function to be decomposed
nmodes: int
number of modes to be kept
method: str
rdm or svd
"""
if method == 'rdm':
kernel1 = f.dot(dag(f)) * dq * dp
kernel2 = f.T.dot(f.conj()) * dp * dq
print('c: Schmidt coefficients')
s, phi = np.linalg.eig(kernel1)
s1, psi = np.linalg.eig(kernel2)
phi /= np.sqrt(dp)
psi /= np.sqrt(dq)
elif method == 'svd':
raise NotImplementedError
return np.sqrt(s[:nmodes]), phi[:, :nmodes], psi[:, :nmodes]
def _detection_amplitude(jsa, omega1, omega2, dp, dq):
'''
Detection amplitude <0|E(t)E(t')|Phi>
t, t' are defined on a 2D grid used in the FFT,
E(t) = Es(t) + Ei(t) is the total electric field operator.
This contains two amplitudes corresponding to two different
ordering of photon interactions
<0|T Ei(t)Es(t')|Phi> + <0|T Es(t)Ei(t')|Phi>
The t, t' are defined relative to t0, i.e, they are temporal durations from t0.
Parameters
----------
jsa : TYPE
DESCRIPTION.
m : TYPE
DESCRIPTION.
n : TYPE
DESCRIPTION.
omega1 : float
central frequency of signal beam
omega2 : float
central frequency of idler beam
Returns
-------
d : TYPE
DESCRIPTION.
'''
t1, t2, jta = fft2(jsa, dp, dq)
dt2 = t2[1] - t2[0]
T1, T2 = np.meshgrid(t1, t2)
# detection amplitude d(t1, t2) ~ JTA(t2, t1)
d = np.exp(-1j * omega2 * T1 - 1j * omega1 * T2) * \
np.sqrt(omega1 * omega2) * jta.T + \
np.exp(-1j * omega1 * T1 - 1j * omega2 * T2) * \
np.sqrt(omega1 * omega2) * jta
# amp = np.einsum('ij, ij -> i', d, heaviside(T1 - T2) * \
# np.exp(-1j * gap20 * (T1-T2))) * dt2
return t1, t2, d
if __name__ == '__main__':
from lime.units import au2ev, au2fs
p = np.linspace(-2, 2, 128) / au2ev
q = p
epp = Biphoton(omegap=3 / au2ev, bw=0.2 / au2ev, Te=10/au2fs,
p=p, q=q)
JSA = epp.get_jsa()
# epp.plt_jsa()
# t1, t2, d = epp.detect()
tau = np.linspace(-10, 10)/au2fs
prob = hom(p, q, JSA, tau)
fig, ax = plt.subplots()
ax.plot(tau, prob)
plt.show() | 22.791525 | 93 | 0.512605 |
import numpy as np
from scipy.sparse import lil_matrix, csr_matrix, kron, identity, linalg
from numpy import sqrt, exp, pi
import matplotlib.pyplot as plt
from lime.units import au2k, au2ev
from lime.fft import fft2
from lime.phys import rect, sinc, dag, interval
from lime.style import set_style, imshow
from numba import jit
class Pulse:
def __init__(self, tau, omegac, delay=0., amplitude=0.001, cep=0., beta=0):
self.delay = delay
self.tau = tau
self.sigma = tau
self.omegac = omegac
self.unit = 'au'
self.amplitude = amplitude
self.cep = cep
self.bandwidth = 1./tau
self.duration = 2. * tau
self.beta = beta
self.ndim = 1
def envelop(self, t):
return np.exp(-(t-self.delay)**2/2./self.tau**2)
def spectrum(self, omega):
omega0 = self.omegac
T = self.tau
A0 = self.amplitude
beta = self.beta
a = 0.5/T**2 + 1j * beta * omega0/T
return A0 * np.sqrt(np.pi/a) * np.exp(-(omega - omega0)**2/4./a)
def field(self, t):
return self.efield(t)
def efield(self, t):
omegac = self.omegac
t0 = self.delay
a = self.amplitude
tau = self.sigma
beta = self.beta
E = a * np.exp(-(t-t0)**2/2./tau**2)*np.exp(-1j * omegac * (t-t0))\
* np.exp(-1j * beta * omegac * (t-t0)**2/tau)
return E.real
def spectrogram(self, efield):
return
# Heaviside function defined in a grid.
# returns 0 if x<=0, and 1 if x>0
# """
class Biphoton:
def __init__(self, omegap, bw, Te, p=None, q=None, phase_matching='sinc'):
self.omegap = omegap
self.pump_bandwidth = bw
self.phase_matching = phase_matching
self.signal_center_frequency = omegap / 2.
self.idler_center_frequency = omegap / 2.
self.entanglement_time = Te
self.jsa = None
self.jta = None
self.p = p
self.q = q
if p is not None:
self.dp = interval(p)
self.dq = interval(q)
self.grid = [p, q]
def pump(self, bandwidth):
alpha = np.sqrt(1. / (np.sqrt(2. * np.pi) * bandwidth)) * \
np.exp(-(p + q) ** 2 / 4. / bandwidth ** 2)
return alpha
def set_grid(self, p, q):
self.p = p
self.q = q
return
def get_jsa(self):
p = self.p
q = self.q
bw = self.pump_bandwidth
self.jsa = _jsa(p, q, bw, model=self.phase_matching,
Te=self.entanglement_time)
return self.jsa
def get_jta(self):
p = self.p
q = self.q
dp = p[1] - p[0]
dq = q[1] - q[0]
if self.jsa is not None:
ts, ti, jta = fft2(self.jsa, dp, dq)
self.jta = jta
return ts, ti, jta
else:
raise ValueError('jsa is None. Call get_jsa() first.')
def jta(self, ts, ti):
return
def detect(self):
if self.jsa is None:
raise ValueError('Please call get_jsa() to compute the jsa first.')
bw = self.pump_bandwidth
omega_s = self.signal_center_frequency
omega_i = self.idler_center_frequency
p = self.p
q = self.q
dp = p[1] - p[0]
dq = q[1] - q[0]
return _detection_amplitude(self.jsa, omega_s, omega_i, dp, dq)
def detect_si(self):
pass
def detect_is(self):
pass
def g2(self):
pass
def bandwidth(self, which='signal'):
p, q = self.p, self.q
dp = interval(p)
dq = interval(q)
f = self.jsa
if which == 'signal':
rho = rdm(f, dq, which='x')
sigma = sqrt(rho.diagonal().dot(p**2) * dp)
elif which == 'idler':
rho = rdm(f, dp, which='y')
sigma = sqrt(rho.diagonal().dot(q**2) * dq)
return sigma
def plt_jsa(self, xlabel=None, ylabel=None, fname=None):
if self.jsa is None:
self.get_jsa()
plt, ax = imshow(self.p * au2ev, self.q * au2ev, np.abs(self.jsa))
if xlabel is not None: ax.set_xlabel(xlabel)
if ylabel is not None: ax.set_xlabel(ylabel)
if fname is not None:
plt.savefig(fname)
plt.show()
return ax
def rdm(self, which='signal'):
if which == 'signal':
return rdm(self.jsa, dy=self.dq, which='x')
def jta(t2, t1, omegap, sigmap, Te):
omegas = omegap/2.
omegai = omegap/2.
tau = t2 - t1
amp = sqrt(sigmap/Te) * (2.*pi)**(3./4) * \
rect(tau/2./Te) * exp(-sigmap**2*(t1+t2)**2/4.) *\
exp(-1j * omegas * t1 - 1j*omegai * t2)
return amp
def rdm(f, dx=1, dy=1, which='x'):
if which == 'x':
rho = f.dot(dag(f)) * dy
elif which == 'y':
rho = f.T.dot(np.conj(f)) * dx
else:
raise ValueError('The argument which can only be x or y.')
return rho
def _jsa(p, q, pump_bw, model='sinc', Te=None):
P, Q = np.meshgrid(p, q)
sigma_plus = pump_bw
sigma_minus = 1. / Te
alpha = np.sqrt(1. / (np.sqrt(2. * np.pi) * sigma_plus)) * \
np.exp(-(P + Q) ** 2 / 4. / sigma_plus ** 2)
if model == 'Gaussian':
beta = np.sqrt(1. / np.sqrt(2. * np.pi) / sigma_minus) * \
np.exp(-(P - Q) ** 2 / 4. / sigma_minus ** 2)
jsa = sqrt(2) * alpha * beta
elif model == 'sinc':
beta = sqrt(0.5 * Te / np.pi) * sinc(Te * (P - Q) / 4.)
jsa = alpha * beta
return jsa
def hom(p, q, f, tau):
dp = interval(p)
dq = interval(q)
P, Q = np.meshgrid(p, q)
prob = np.zeros(len(tau))
for j in range(len(tau)):
t = tau[j]
prob[j] = 0.5 - 0.5 * np.sum(f.conj() * f.T *
np.exp(1j * (P - Q) * t)).real * dq*dp
return prob
def hom_schmidt(p, q, f, method='rdm', nmodes=5):
dp = interval(p)
dq = interval(q)
s, phi, chi = schmidt_decompose(f, dp, dq, method=method,
nmodes=nmodes)
prob = np.zeros(len(tau))
for j in range(len(tau)):
t = tau[j]
for a in range(nmodes):
for b in range(nmodes):
tmp1 = (phi[:,a].conj() * chi[:, b] * np.exp(1j * p * t)).sum() * dp
tmp2 = (phi[:,b] * chi[:, a].conj() * np.exp(-1j * q * t)).sum() * dq
prob[j] += -2. * np.real(s[a] * s[b] * tmp1 * tmp2)
prob = 0.5 + prob/4.
return prob
def schmidt_decompose(f, dp, dq, nmodes=5, method='rdm'):
if method == 'rdm':
kernel1 = f.dot(dag(f)) * dq * dp
kernel2 = f.T.dot(f.conj()) * dp * dq
print('c: Schmidt coefficients')
s, phi = np.linalg.eig(kernel1)
s1, psi = np.linalg.eig(kernel2)
phi /= np.sqrt(dp)
psi /= np.sqrt(dq)
elif method == 'svd':
raise NotImplementedError
return np.sqrt(s[:nmodes]), phi[:, :nmodes], psi[:, :nmodes]
def _detection_amplitude(jsa, omega1, omega2, dp, dq):
t1, t2, jta = fft2(jsa, dp, dq)
dt2 = t2[1] - t2[0]
T1, T2 = np.meshgrid(t1, t2)
d = np.exp(-1j * omega2 * T1 - 1j * omega1 * T2) * \
np.sqrt(omega1 * omega2) * jta.T + \
np.exp(-1j * omega1 * T1 - 1j * omega2 * T2) * \
np.sqrt(omega1 * omega2) * jta
return t1, t2, d
if __name__ == '__main__':
from lime.units import au2ev, au2fs
p = np.linspace(-2, 2, 128) / au2ev
q = p
epp = Biphoton(omegap=3 / au2ev, bw=0.2 / au2ev, Te=10/au2fs,
p=p, q=q)
JSA = epp.get_jsa()
tau = np.linspace(-10, 10)/au2fs
prob = hom(p, q, JSA, tau)
fig, ax = plt.subplots()
ax.plot(tau, prob)
plt.show() | true | true |
1c457ce30654b4e60fe6ac59186a1c9d26859b54 | 10,225 | py | Python | glance_docker/glance/common/auth.py | tobegit3hub/dockerized-software | 3781bc1145b6fbb8d5fa2e2eaeaa3aa138a69632 | [
"Apache-2.0"
] | null | null | null | glance_docker/glance/common/auth.py | tobegit3hub/dockerized-software | 3781bc1145b6fbb8d5fa2e2eaeaa3aa138a69632 | [
"Apache-2.0"
] | null | null | null | glance_docker/glance/common/auth.py | tobegit3hub/dockerized-software | 3781bc1145b6fbb8d5fa2e2eaeaa3aa138a69632 | [
"Apache-2.0"
] | null | null | null | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
This auth module is intended to allow OpenStack client-tools to select from a
variety of authentication strategies, including NoAuth (the default), and
Keystone (an identity management system).
> auth_plugin = AuthPlugin(creds)
> auth_plugin.authenticate()
> auth_plugin.auth_token
abcdefg
> auth_plugin.management_url
http://service_endpoint/
"""
import httplib2
from oslo_log import log as logging
from oslo_serialization import jsonutils
# NOTE(jokke): simplified transition to py3, behaves like py2 xrange
from six.moves import range
import six.moves.urllib.parse as urlparse
from glance.common import exception
from glance import i18n
LOG = logging.getLogger(__name__)
_ = i18n._
class BaseStrategy(object):
def __init__(self):
self.auth_token = None
# TODO(sirp): Should expose selecting public/internal/admin URL.
self.management_url = None
def authenticate(self):
raise NotImplementedError
@property
def is_authenticated(self):
raise NotImplementedError
@property
def strategy(self):
raise NotImplementedError
class NoAuthStrategy(BaseStrategy):
def authenticate(self):
pass
@property
def is_authenticated(self):
return True
@property
def strategy(self):
return 'noauth'
class KeystoneStrategy(BaseStrategy):
MAX_REDIRECTS = 10
def __init__(self, creds, insecure=False, configure_via_auth=True):
self.creds = creds
self.insecure = insecure
self.configure_via_auth = configure_via_auth
super(KeystoneStrategy, self).__init__()
def check_auth_params(self):
# Ensure that supplied credential parameters are as required
for required in ('username', 'password', 'auth_url',
'strategy'):
if self.creds.get(required) is None:
raise exception.MissingCredentialError(required=required)
if self.creds['strategy'] != 'keystone':
raise exception.BadAuthStrategy(expected='keystone',
received=self.creds['strategy'])
# For v2.0 also check tenant is present
if self.creds['auth_url'].rstrip('/').endswith('v2.0'):
if self.creds.get("tenant") is None:
raise exception.MissingCredentialError(required='tenant')
def authenticate(self):
"""Authenticate with the Keystone service.
There are a few scenarios to consider here:
1. Which version of Keystone are we using? v1 which uses headers to
pass the credentials, or v2 which uses a JSON encoded request body?
2. Keystone may respond back with a redirection using a 305 status
code.
3. We may attempt a v1 auth when v2 is what's called for. In this
case, we rewrite the url to contain /v2.0/ and retry using the v2
protocol.
"""
def _authenticate(auth_url):
# If OS_AUTH_URL is missing a trailing slash add one
if not auth_url.endswith('/'):
auth_url += '/'
token_url = urlparse.urljoin(auth_url, "tokens")
# 1. Check Keystone version
is_v2 = auth_url.rstrip('/').endswith('v2.0')
if is_v2:
self._v2_auth(token_url)
else:
self._v1_auth(token_url)
self.check_auth_params()
auth_url = self.creds['auth_url']
for _ in range(self.MAX_REDIRECTS):
try:
_authenticate(auth_url)
except exception.AuthorizationRedirect as e:
# 2. Keystone may redirect us
auth_url = e.url
except exception.AuthorizationFailure:
# 3. In some configurations nova makes redirection to
# v2.0 keystone endpoint. Also, new location does not
# contain real endpoint, only hostname and port.
if 'v2.0' not in auth_url:
auth_url = urlparse.urljoin(auth_url, 'v2.0/')
else:
# If we successfully auth'd, then memorize the correct auth_url
# for future use.
self.creds['auth_url'] = auth_url
break
else:
# Guard against a redirection loop
raise exception.MaxRedirectsExceeded(redirects=self.MAX_REDIRECTS)
def _v1_auth(self, token_url):
creds = self.creds
headers = {
'X-Auth-User': creds['username'],
'X-Auth-Key': creds['password']
}
tenant = creds.get('tenant')
if tenant:
headers['X-Auth-Tenant'] = tenant
resp, resp_body = self._do_request(token_url, 'GET', headers=headers)
def _management_url(self, resp):
for url_header in ('x-image-management-url',
'x-server-management-url',
'x-glance'):
try:
return resp[url_header]
except KeyError as e:
not_found = e
raise not_found
if resp.status in (200, 204):
try:
if self.configure_via_auth:
self.management_url = _management_url(self, resp)
self.auth_token = resp['x-auth-token']
except KeyError:
raise exception.AuthorizationFailure()
elif resp.status == 305:
raise exception.AuthorizationRedirect(uri=resp['location'])
elif resp.status == 400:
raise exception.AuthBadRequest(url=token_url)
elif resp.status == 401:
raise exception.NotAuthenticated()
elif resp.status == 404:
raise exception.AuthUrlNotFound(url=token_url)
else:
raise Exception(_('Unexpected response: %s') % resp.status)
def _v2_auth(self, token_url):
creds = self.creds
creds = {
"auth": {
"tenantName": creds['tenant'],
"passwordCredentials": {
"username": creds['username'],
"password": creds['password']
}
}
}
headers = {'Content-Type': 'application/json'}
req_body = jsonutils.dumps(creds)
resp, resp_body = self._do_request(
token_url, 'POST', headers=headers, body=req_body)
if resp.status == 200:
resp_auth = jsonutils.loads(resp_body)['access']
creds_region = self.creds.get('region')
if self.configure_via_auth:
endpoint = get_endpoint(resp_auth['serviceCatalog'],
endpoint_region=creds_region)
self.management_url = endpoint
self.auth_token = resp_auth['token']['id']
elif resp.status == 305:
raise exception.RedirectException(resp['location'])
elif resp.status == 400:
raise exception.AuthBadRequest(url=token_url)
elif resp.status == 401:
raise exception.NotAuthenticated()
elif resp.status == 404:
raise exception.AuthUrlNotFound(url=token_url)
else:
raise Exception(_('Unexpected response: %s') % resp.status)
@property
def is_authenticated(self):
return self.auth_token is not None
@property
def strategy(self):
return 'keystone'
def _do_request(self, url, method, headers=None, body=None):
headers = headers or {}
conn = httplib2.Http()
conn.force_exception_to_status_code = True
conn.disable_ssl_certificate_validation = self.insecure
headers['User-Agent'] = 'glance-client'
resp, resp_body = conn.request(url, method, headers=headers, body=body)
return resp, resp_body
def get_plugin_from_strategy(strategy, creds=None, insecure=False,
configure_via_auth=True):
if strategy == 'noauth':
return NoAuthStrategy()
elif strategy == 'keystone':
return KeystoneStrategy(creds, insecure,
configure_via_auth=configure_via_auth)
else:
raise Exception(_("Unknown auth strategy '%s'") % strategy)
def get_endpoint(service_catalog, service_type='image', endpoint_region=None,
endpoint_type='publicURL'):
"""
Select an endpoint from the service catalog
We search the full service catalog for services
matching both type and region. If the client
supplied no region then any 'image' endpoint
is considered a match. There must be one -- and
only one -- successful match in the catalog,
otherwise we will raise an exception.
"""
endpoint = None
for service in service_catalog:
s_type = None
try:
s_type = service['type']
except KeyError:
msg = _('Encountered service with no "type": %s') % s_type
LOG.warn(msg)
continue
if s_type == service_type:
for ep in service['endpoints']:
if endpoint_region is None or endpoint_region == ep['region']:
if endpoint is not None:
# This is a second match, abort
raise exception.RegionAmbiguity(region=endpoint_region)
endpoint = ep
if endpoint and endpoint.get(endpoint_type):
return endpoint[endpoint_type]
else:
raise exception.NoServiceEndpoint()
| 34.897611 | 79 | 0.603619 |
import httplib2
from oslo_log import log as logging
from oslo_serialization import jsonutils
from six.moves import range
import six.moves.urllib.parse as urlparse
from glance.common import exception
from glance import i18n
LOG = logging.getLogger(__name__)
_ = i18n._
class BaseStrategy(object):
def __init__(self):
self.auth_token = None
self.management_url = None
def authenticate(self):
raise NotImplementedError
@property
def is_authenticated(self):
raise NotImplementedError
@property
def strategy(self):
raise NotImplementedError
class NoAuthStrategy(BaseStrategy):
def authenticate(self):
pass
@property
def is_authenticated(self):
return True
@property
def strategy(self):
return 'noauth'
class KeystoneStrategy(BaseStrategy):
MAX_REDIRECTS = 10
def __init__(self, creds, insecure=False, configure_via_auth=True):
self.creds = creds
self.insecure = insecure
self.configure_via_auth = configure_via_auth
super(KeystoneStrategy, self).__init__()
def check_auth_params(self):
for required in ('username', 'password', 'auth_url',
'strategy'):
if self.creds.get(required) is None:
raise exception.MissingCredentialError(required=required)
if self.creds['strategy'] != 'keystone':
raise exception.BadAuthStrategy(expected='keystone',
received=self.creds['strategy'])
if self.creds['auth_url'].rstrip('/').endswith('v2.0'):
if self.creds.get("tenant") is None:
raise exception.MissingCredentialError(required='tenant')
def authenticate(self):
def _authenticate(auth_url):
if not auth_url.endswith('/'):
auth_url += '/'
token_url = urlparse.urljoin(auth_url, "tokens")
is_v2 = auth_url.rstrip('/').endswith('v2.0')
if is_v2:
self._v2_auth(token_url)
else:
self._v1_auth(token_url)
self.check_auth_params()
auth_url = self.creds['auth_url']
for _ in range(self.MAX_REDIRECTS):
try:
_authenticate(auth_url)
except exception.AuthorizationRedirect as e:
auth_url = e.url
except exception.AuthorizationFailure:
if 'v2.0' not in auth_url:
auth_url = urlparse.urljoin(auth_url, 'v2.0/')
else:
# for future use.
self.creds['auth_url'] = auth_url
break
else:
# Guard against a redirection loop
raise exception.MaxRedirectsExceeded(redirects=self.MAX_REDIRECTS)
def _v1_auth(self, token_url):
creds = self.creds
headers = {
'X-Auth-User': creds['username'],
'X-Auth-Key': creds['password']
}
tenant = creds.get('tenant')
if tenant:
headers['X-Auth-Tenant'] = tenant
resp, resp_body = self._do_request(token_url, 'GET', headers=headers)
def _management_url(self, resp):
for url_header in ('x-image-management-url',
'x-server-management-url',
'x-glance'):
try:
return resp[url_header]
except KeyError as e:
not_found = e
raise not_found
if resp.status in (200, 204):
try:
if self.configure_via_auth:
self.management_url = _management_url(self, resp)
self.auth_token = resp['x-auth-token']
except KeyError:
raise exception.AuthorizationFailure()
elif resp.status == 305:
raise exception.AuthorizationRedirect(uri=resp['location'])
elif resp.status == 400:
raise exception.AuthBadRequest(url=token_url)
elif resp.status == 401:
raise exception.NotAuthenticated()
elif resp.status == 404:
raise exception.AuthUrlNotFound(url=token_url)
else:
raise Exception(_('Unexpected response: %s') % resp.status)
def _v2_auth(self, token_url):
creds = self.creds
creds = {
"auth": {
"tenantName": creds['tenant'],
"passwordCredentials": {
"username": creds['username'],
"password": creds['password']
}
}
}
headers = {'Content-Type': 'application/json'}
req_body = jsonutils.dumps(creds)
resp, resp_body = self._do_request(
token_url, 'POST', headers=headers, body=req_body)
if resp.status == 200:
resp_auth = jsonutils.loads(resp_body)['access']
creds_region = self.creds.get('region')
if self.configure_via_auth:
endpoint = get_endpoint(resp_auth['serviceCatalog'],
endpoint_region=creds_region)
self.management_url = endpoint
self.auth_token = resp_auth['token']['id']
elif resp.status == 305:
raise exception.RedirectException(resp['location'])
elif resp.status == 400:
raise exception.AuthBadRequest(url=token_url)
elif resp.status == 401:
raise exception.NotAuthenticated()
elif resp.status == 404:
raise exception.AuthUrlNotFound(url=token_url)
else:
raise Exception(_('Unexpected response: %s') % resp.status)
@property
def is_authenticated(self):
return self.auth_token is not None
@property
def strategy(self):
return 'keystone'
def _do_request(self, url, method, headers=None, body=None):
headers = headers or {}
conn = httplib2.Http()
conn.force_exception_to_status_code = True
conn.disable_ssl_certificate_validation = self.insecure
headers['User-Agent'] = 'glance-client'
resp, resp_body = conn.request(url, method, headers=headers, body=body)
return resp, resp_body
def get_plugin_from_strategy(strategy, creds=None, insecure=False,
configure_via_auth=True):
if strategy == 'noauth':
return NoAuthStrategy()
elif strategy == 'keystone':
return KeystoneStrategy(creds, insecure,
configure_via_auth=configure_via_auth)
else:
raise Exception(_("Unknown auth strategy '%s'") % strategy)
def get_endpoint(service_catalog, service_type='image', endpoint_region=None,
endpoint_type='publicURL'):
endpoint = None
for service in service_catalog:
s_type = None
try:
s_type = service['type']
except KeyError:
msg = _('Encountered service with no "type": %s') % s_type
LOG.warn(msg)
continue
if s_type == service_type:
for ep in service['endpoints']:
if endpoint_region is None or endpoint_region == ep['region']:
if endpoint is not None:
# This is a second match, abort
raise exception.RegionAmbiguity(region=endpoint_region)
endpoint = ep
if endpoint and endpoint.get(endpoint_type):
return endpoint[endpoint_type]
else:
raise exception.NoServiceEndpoint()
| true | true |
1c457cf430666778cca067fee9e66d2b156178b1 | 2,193 | py | Python | ropgenerator/exploit/syscall/SyscallLinuxX86.py | avltree9798/ropgenerator | c63c81f03e8653dc3911e21300c00003a4224f6a | [
"MIT"
] | 1 | 2021-01-07T13:16:19.000Z | 2021-01-07T13:16:19.000Z | ropgenerator/exploit/syscall/SyscallLinuxX86.py | avltree9798/ropgenerator | c63c81f03e8653dc3911e21300c00003a4224f6a | [
"MIT"
] | null | null | null | ropgenerator/exploit/syscall/SyscallLinuxX86.py | avltree9798/ropgenerator | c63c81f03e8653dc3911e21300c00003a4224f6a | [
"MIT"
] | null | null | null | # -*- coding:utf-8 -*-
# SycallLinuxX86 module: build syscalls for linux on X64
from ropgenerator.exploit.syscall.SyscallGeneric import Syscall, ArgType
from ropgenerator.core.Architecture import *
mprotect = Syscall('int', 'mprotect', \
[('void*', 'addr'),('size_t','len'),('int','prot')], [ArgType.INT, ArgType.INT, ArgType.INT],\
[RegX86.EBX, RegX86.ECX, RegX86.EDX], [(RegX86.EAX, 0x7d)])
execve = Syscall('int', 'execve', \
[('char*', 'cmd'),('char**','argv'),('char**', 'envp')], [ArgType.STRING, ArgType.INT,ArgType.INT],\
[RegX86.EBX,RegX86.ECX,RegX86.EDX], [(RegX86.EAX, 11)] )
read = Syscall('int', 'read', \
[('unsigned int','fd'),('char*','buf'),('size_t','count')], [ArgType.INT, ArgType.INT_OR_STRING, ArgType.INT], \
[RegX86.EBX,RegX86.ECX,RegX86.EDX], [(RegX86.EAX, 3)])
write = Syscall('int', 'write', \
[('unsigned int','fd'),('const char*','buf'),('size_t','count')], [ArgType.INT, ArgType.INT_OR_STRING, ArgType.INT], \
[RegX86.EBX,RegX86.ECX,RegX86.EDX], [(RegX86.EAX, 4)])
chmod = Syscall('int', 'chmod', \
[('const char*','filename'),('mode_t','mode')], [ArgType.INT_OR_STRING, ArgType.INT], \
[RegX86.EBX, RegX86.ECX], [(RegX86.EAX,15)])
setuid = Syscall('int', 'setuid', \
[('uid_t', 'uid')], [ArgType.INT], \
[RegX86.EBX], [(RegX86.EAX,23)])
# MMAP with flags = MAP_ANONYMOUS (no fd and offset)
mmap_anon = Syscall('void*', 'mmap_anon', \
[('unsigned long','addr'),('unsigned long','len'),('unsigned long','prot')],\
[ArgType.INT, ArgType.INT, ArgType.INT], \
[RegX86.EBX,RegX86.ECX,RegX86.EDX], [(RegX86.EAX,9), ('esi', 0x20)], function="mmap")
syscalls_list = [mprotect, execve, read, write, chmod, setuid]
## All available syscalls
available = dict()
for syscall_object in syscalls_list:
available[syscall_object.name()] = syscall_object
####################
# Useful functions #
####################
def is_supported(syscall_name):
return (syscall_name in available)
def get_syscall(syscall_name):
if( not syscall_name in available ):
return None
return available[syscall_name]
def available_syscalls():
global available
return available
| 37.810345 | 122 | 0.632011 |
from ropgenerator.exploit.syscall.SyscallGeneric import Syscall, ArgType
from ropgenerator.core.Architecture import *
mprotect = Syscall('int', 'mprotect', \
[('void*', 'addr'),('size_t','len'),('int','prot')], [ArgType.INT, ArgType.INT, ArgType.INT],\
[RegX86.EBX, RegX86.ECX, RegX86.EDX], [(RegX86.EAX, 0x7d)])
execve = Syscall('int', 'execve', \
[('char*', 'cmd'),('char**','argv'),('char**', 'envp')], [ArgType.STRING, ArgType.INT,ArgType.INT],\
[RegX86.EBX,RegX86.ECX,RegX86.EDX], [(RegX86.EAX, 11)] )
read = Syscall('int', 'read', \
[('unsigned int','fd'),('char*','buf'),('size_t','count')], [ArgType.INT, ArgType.INT_OR_STRING, ArgType.INT], \
[RegX86.EBX,RegX86.ECX,RegX86.EDX], [(RegX86.EAX, 3)])
write = Syscall('int', 'write', \
[('unsigned int','fd'),('const char*','buf'),('size_t','count')], [ArgType.INT, ArgType.INT_OR_STRING, ArgType.INT], \
[RegX86.EBX,RegX86.ECX,RegX86.EDX], [(RegX86.EAX, 4)])
chmod = Syscall('int', 'chmod', \
[('const char*','filename'),('mode_t','mode')], [ArgType.INT_OR_STRING, ArgType.INT], \
[RegX86.EBX, RegX86.ECX], [(RegX86.EAX,15)])
setuid = Syscall('int', 'setuid', \
[('uid_t', 'uid')], [ArgType.INT], \
[RegX86.EBX], [(RegX86.EAX,23)])
mmap_anon = Syscall('void*', 'mmap_anon', \
[('unsigned long','addr'),('unsigned long','len'),('unsigned long','prot')],\
[ArgType.INT, ArgType.INT, ArgType.INT], \
[RegX86.EBX,RegX86.ECX,RegX86.EDX], [(RegX86.EAX,9), ('esi', 0x20)], function="mmap")
syscalls_list = [mprotect, execve, read, write, chmod, setuid]
yscall_object in syscalls_list:
available[syscall_object.name()] = syscall_object
| true | true |
1c457d19c80113b1224bc3ece869c3003a166dee | 690 | py | Python | molecule/default/tests/test_default.py | dhs-ncats/ansible-role-htop | a7848a00693e9e841e3546d879968704228b47a4 | [
"CC0-1.0"
] | null | null | null | molecule/default/tests/test_default.py | dhs-ncats/ansible-role-htop | a7848a00693e9e841e3546d879968704228b47a4 | [
"CC0-1.0"
] | null | null | null | molecule/default/tests/test_default.py | dhs-ncats/ansible-role-htop | a7848a00693e9e841e3546d879968704228b47a4 | [
"CC0-1.0"
] | null | null | null | """Module containing the tests for the default scenario."""
# Standard Python Libraries
import os
# Third-Party Libraries
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ["MOLECULE_INVENTORY_FILE"]
).get_hosts("all")
@pytest.mark.parametrize("pkg", ["htop"])
def test_packages(host, pkg):
"""Test that the appropriate packages were installed."""
package = host.package(pkg)
assert package.is_installed
@pytest.mark.parametrize("file", ["/etc/htoprc"])
def test_files(host, file):
"""Test that config files were copied over as expected."""
f = host.file(file)
assert f.exists
| 23.793103 | 63 | 0.731884 |
import os
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ["MOLECULE_INVENTORY_FILE"]
).get_hosts("all")
@pytest.mark.parametrize("pkg", ["htop"])
def test_packages(host, pkg):
package = host.package(pkg)
assert package.is_installed
@pytest.mark.parametrize("file", ["/etc/htoprc"])
def test_files(host, file):
f = host.file(file)
assert f.exists
| true | true |
1c457edb6c9ecbc1d978023d080823ab44d6d1d2 | 560 | py | Python | integration/emulator/test.py | cvlabmiet/master-programming-example | 8a4a231ba2b72a93ae14da2c04e17b2ae3fc6651 | [
"MIT"
] | null | null | null | integration/emulator/test.py | cvlabmiet/master-programming-example | 8a4a231ba2b72a93ae14da2c04e17b2ae3fc6651 | [
"MIT"
] | null | null | null | integration/emulator/test.py | cvlabmiet/master-programming-example | 8a4a231ba2b72a93ae14da2c04e17b2ae3fc6651 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import sys, random, array
sys.dont_write_bytecode = True
from device import Pram, Lram
test_vector = list(range(0, 200))
random.shuffle(test_vector)
pram = Pram()
lram = Lram()
lram[:] = bytes(test_vector)
pram[:] = b'[u16:200:400]add(u8:0, u8:100)'
pram.run(lram)
view = memoryview(lram)[200:400].cast('H')
error_count = 0
for x in range(len(view)):
if view[x] != test_vector[x] + test_vector[x + 100]:
print("Error:", x, view[x], test_vector[x], test_vector[x + 100])
error_count += 1
sys.exit(error_count)
| 20.740741 | 73 | 0.664286 |
import sys, random, array
sys.dont_write_bytecode = True
from device import Pram, Lram
test_vector = list(range(0, 200))
random.shuffle(test_vector)
pram = Pram()
lram = Lram()
lram[:] = bytes(test_vector)
pram[:] = b'[u16:200:400]add(u8:0, u8:100)'
pram.run(lram)
view = memoryview(lram)[200:400].cast('H')
error_count = 0
for x in range(len(view)):
if view[x] != test_vector[x] + test_vector[x + 100]:
print("Error:", x, view[x], test_vector[x], test_vector[x + 100])
error_count += 1
sys.exit(error_count)
| true | true |
1c457f19469eb820eb88da2d97435a799d4d316b | 1,354 | py | Python | crslab/model/__init__.py | Xiaolong-Qi/CRSLab | d507378c86f4996727bf062482e1f224486d4533 | [
"MIT"
] | 1 | 2021-01-06T10:39:10.000Z | 2021-01-06T10:39:10.000Z | crslab/model/__init__.py | Xiaolong-Qi/CRSLab | d507378c86f4996727bf062482e1f224486d4533 | [
"MIT"
] | null | null | null | crslab/model/__init__.py | Xiaolong-Qi/CRSLab | d507378c86f4996727bf062482e1f224486d4533 | [
"MIT"
] | null | null | null | # @Time : 2020/11/22
# @Author : Kun Zhou
# @Email : francis_kun_zhou@163.com
# UPDATE:
# @Time : 2020/11/24, 2020/12/24
# @Author : Kun Zhou, Xiaolei Wang
# @Email : francis_kun_zhou@163.com, wxl1999@foxmail.com
from loguru import logger
from .conversation import *
from .kbrd import *
from .kgsf import *
from .policy import *
from .recommendation import *
from .redial import *
from .tgredial import *
Model_register_table = {
'KGSF': KGSFModel,
'KBRD': KBRDModel,
'TGRec': TGRecModel,
'TGConv': TGConvModel,
'TGPolicy': TGPolicyModel,
'ReDialRec': ReDialRecModel,
'ReDialConv': ReDialConvModel,
'GPT2': GPT2Model,
'Transformer': TransformerModel,
'ConvBERT': ConvBERTModel,
'ProfileBERT': ProfileBERTModel,
'TopicBERT': TopicBERTModel,
'PMI': PMIModel,
'MGCG': MGCGModel,
'BERT': BERTModel,
'SASREC': SASRECModel,
'GRU4REC': GRU4RECModel,
'Popularity': PopularityModel,
'TextCNN': TextCNNModel
}
def get_model(config, model_name, device, vocab, side_data=None):
if model_name in Model_register_table:
model = Model_register_table[model_name](config, device, vocab, side_data)
logger.info(f'[Build model {model_name}]')
return model
else:
raise NotImplementedError('Model [{}] has not been implemented'.format(model_name))
| 27.08 | 91 | 0.6839 |
from loguru import logger
from .conversation import *
from .kbrd import *
from .kgsf import *
from .policy import *
from .recommendation import *
from .redial import *
from .tgredial import *
Model_register_table = {
'KGSF': KGSFModel,
'KBRD': KBRDModel,
'TGRec': TGRecModel,
'TGConv': TGConvModel,
'TGPolicy': TGPolicyModel,
'ReDialRec': ReDialRecModel,
'ReDialConv': ReDialConvModel,
'GPT2': GPT2Model,
'Transformer': TransformerModel,
'ConvBERT': ConvBERTModel,
'ProfileBERT': ProfileBERTModel,
'TopicBERT': TopicBERTModel,
'PMI': PMIModel,
'MGCG': MGCGModel,
'BERT': BERTModel,
'SASREC': SASRECModel,
'GRU4REC': GRU4RECModel,
'Popularity': PopularityModel,
'TextCNN': TextCNNModel
}
def get_model(config, model_name, device, vocab, side_data=None):
if model_name in Model_register_table:
model = Model_register_table[model_name](config, device, vocab, side_data)
logger.info(f'[Build model {model_name}]')
return model
else:
raise NotImplementedError('Model [{}] has not been implemented'.format(model_name))
| true | true |
1c457faa9ac5bd092b0c88919dffda9a035f0f60 | 8,729 | py | Python | ckan_cloud_operator/providers/storage/minio/manager.py | mickeyrouash/ckan-cloud-operator | 10e38f13964af30fe57b07e8d8a3b7521ed69cc2 | [
"MIT"
] | null | null | null | ckan_cloud_operator/providers/storage/minio/manager.py | mickeyrouash/ckan-cloud-operator | 10e38f13964af30fe57b07e8d8a3b7521ed69cc2 | [
"MIT"
] | null | null | null | ckan_cloud_operator/providers/storage/minio/manager.py | mickeyrouash/ckan-cloud-operator | 10e38f13964af30fe57b07e8d8a3b7521ed69cc2 | [
"MIT"
] | null | null | null | #### standard provider code ####
# import the correct PROVIDER_SUBMODULE and PROVIDER_ID constants for your provider
from .constants import PROVIDER_ID
from ..constants import PROVIDER_SUBMODULE
# define common provider functions based on the constants
from ckan_cloud_operator.providers import manager as providers_manager
def _get_resource_name(suffix=None): return providers_manager.get_resource_name(PROVIDER_SUBMODULE, PROVIDER_ID, suffix=suffix)
def _get_resource_labels(for_deployment=False, suffix=None): return providers_manager.get_resource_labels(PROVIDER_SUBMODULE, PROVIDER_ID, for_deployment=for_deployment, suffix=suffix)
def _get_resource_annotations(suffix=None): return providers_manager.get_resource_annotations(PROVIDER_SUBMODULE, PROVIDER_ID, suffix=suffix)
def _set_provider(): providers_manager.set_provider(PROVIDER_SUBMODULE, PROVIDER_ID)
def _config_set(key=None, value=None, values=None, namespace=None, is_secret=False, suffix=None): providers_manager.config_set(PROVIDER_SUBMODULE, PROVIDER_ID, key=key, value=value, values=values, namespace=namespace, is_secret=is_secret, suffix=suffix)
def _config_get(key=None, default=None, required=False, namespace=None, is_secret=False, suffix=None): return providers_manager.config_get(PROVIDER_SUBMODULE, PROVIDER_ID, key=key, default=default, required=required, namespace=namespace, is_secret=is_secret, suffix=suffix)
def _config_interactive_set(default_values, namespace=None, is_secret=False, suffix=None, from_file=False, interactive=False): providers_manager.config_interactive_set(PROVIDER_SUBMODULE, PROVIDER_ID, default_values, namespace, is_secret, suffix, from_file, interactive)
################################
# custom provider code starts here
#
import os
import binascii
import yaml
import json
from ckan_cloud_operator import kubectl
from ckan_cloud_operator import logs
from ckan_cloud_operator.routers import manager as routers_manager
def initialize(interactive=False, storage_suffix=None, use_existing_disk_name=None, dry_run=False):
_config_interactive_set({
'disk-size-gb': None,
**({} if storage_suffix else {'router-name': routers_manager.get_default_infra_router_name()})
}, interactive=interactive, suffix=storage_suffix)
_apply_secret(storage_suffix=storage_suffix)
_apply_deployment(
_get_or_create_volume(
storage_suffix=storage_suffix,
use_existing_disk_name=use_existing_disk_name
),
storage_suffix=storage_suffix,
dry_run=dry_run
)
_apply_service(storage_suffix=storage_suffix, dry_run=dry_run)
if not storage_suffix:
_update_route(storage_suffix=storage_suffix, dry_run=dry_run)
_set_provider()
def print_credentials(raw=False, storage_suffix=None):
hostname, access_key, secret_key = get_credentials(storage_suffix=storage_suffix)
if raw:
print(f'https://{hostname} {access_key} {secret_key}')
else:
print('Minio admin credentials:')
print('External Domain: ' + hostname)
print('Access Key: ' + access_key)
print('Secret Key: ' + secret_key)
print('\nto use with minio-client, run the following command:')
print(f'mc config host add my-storage https://{hostname} {access_key} {secret_key}')
def get_credentials(storage_suffix=None):
return [_get_frontend_hostname(storage_suffix=storage_suffix)] + [
_config_get(key, required=True, is_secret=True, suffix=storage_suffix)
for key in ['MINIO_ACCESS_KEY', 'MINIO_SECRET_KEY']
]
def _generate_password(l):
return binascii.hexlify(os.urandom(l)).decode()
def _apply_secret(storage_suffix=None):
access_key = _config_get('MINIO_ACCESS_KEY', required=False, is_secret=True, suffix=storage_suffix) or _generate_password(8)
secret_key = _config_get('MINIO_SECRET_KEY', required=False, is_secret=True, suffix=storage_suffix) or _generate_password(12)
_config_set(values={'MINIO_ACCESS_KEY': access_key, 'MINIO_SECRET_KEY': secret_key}, is_secret=True, suffix=storage_suffix)
def _apply_deployment(volume_spec, storage_suffix=None, dry_run=False):
node_selector = volume_spec.pop('nodeSelector', None)
if node_selector:
pod_scheduling = {'nodeSelector': node_selector}
else:
pod_scheduling = {}
container_spec_overrides = _config_get('container-spec-overrides', required=False, default=None, suffix=storage_suffix)
kubectl.apply(kubectl.get_deployment(
_get_resource_name(suffix=storage_suffix),
_get_resource_labels(for_deployment=True, suffix=storage_suffix),
{
'replicas': 1,
'revisionHistoryLimit': 10,
'strategy': {'type': 'Recreate', },
'template': {
'metadata': {
'labels': _get_resource_labels(for_deployment=True, suffix=storage_suffix),
'annotations': _get_resource_annotations(suffix=storage_suffix)
},
'spec': {
**pod_scheduling,
'containers': [
{
'name': 'minio',
'image': 'minio/minio',
'args': ['server', '/export'],
'envFrom': [{'secretRef': {'name': _get_resource_name(suffix=storage_suffix)}}],
'ports': [{'containerPort': 9000}],
'volumeMounts': [
{
'name': 'minio-data',
'mountPath': '/export',
}
],
**(json.loads(container_spec_overrides) if container_spec_overrides else {})
}
],
'volumes': [
dict(volume_spec, name='minio-data')
]
}
}
}
), dry_run=dry_run)
def _apply_service(storage_suffix=None, dry_run=False):
kubectl.apply(kubectl.get_resource(
'v1', 'Service',
_get_resource_name(suffix=storage_suffix),
_get_resource_labels(suffix=storage_suffix),
spec={
'ports': [
{'name': '9000', 'port': 9000}
],
'selector': {
'app': _get_resource_labels(for_deployment=True, suffix=storage_suffix)['app']
}
}
), dry_run=dry_run)
def _get_or_create_volume(storage_suffix=None, use_existing_disk_name=None):
disk_size_gb = _config_get('disk-size-gb', required=True, suffix=storage_suffix)
volume_spec = _config_get('volume-spec', required=False, suffix=storage_suffix)
if volume_spec:
volume_spec = yaml.load(volume_spec)
else:
from ckan_cloud_operator.providers.cluster import manager as cluster_manager
volume_spec = cluster_manager.create_volume(
disk_size_gb,
_get_resource_labels(suffix=storage_suffix),
use_existing_disk_name=use_existing_disk_name
)
_config_set('volume-spec', yaml.dump(volume_spec, default_flow_style=False), suffix=storage_suffix)
return volume_spec
def _update_route(storage_suffix=None, dry_run=False):
backend_url_target_id = _get_backend_url_target_id(storage_suffix=storage_suffix)
router_name = _config_get('router-name', required=True, suffix=storage_suffix)
if not routers_manager.get_backend_url_routes(backend_url_target_id):
deployment_name = _get_resource_name(suffix=storage_suffix)
namespace = _get_namespace()
subdomain_route = {
'target-type': 'backend-url',
'target-resource-id': backend_url_target_id,
'backend-url': f'http://{deployment_name}.{namespace}:9000',
}
if dry_run:
logs.info('create_subdomain_route', router_name, subdomain_route)
else:
routers_manager.create_subdomain_route(router_name, subdomain_route)
if not dry_run:
routers_manager.update(router_name, wait_ready=True)
def _get_namespace():
return 'ckan-cloud'
def _get_frontend_hostname(storage_suffix=None):
backend_url_target_id = _get_backend_url_target_id(storage_suffix=storage_suffix)
routes = routers_manager.get_backend_url_routes(backend_url_target_id)
assert storage_suffix or len(routes) == 1
if len(routes) < 1:
return 'localhost:9000'
else:
return routers_manager.get_route_frontend_hostname(routes[0])
def _get_backend_url_target_id(storage_suffix=None):
return f'minio-{storage_suffix}' if storage_suffix else 'minio'
| 44.764103 | 273 | 0.680719 | perator.providers import manager as providers_manager
def _get_resource_name(suffix=None): return providers_manager.get_resource_name(PROVIDER_SUBMODULE, PROVIDER_ID, suffix=suffix)
def _get_resource_labels(for_deployment=False, suffix=None): return providers_manager.get_resource_labels(PROVIDER_SUBMODULE, PROVIDER_ID, for_deployment=for_deployment, suffix=suffix)
def _get_resource_annotations(suffix=None): return providers_manager.get_resource_annotations(PROVIDER_SUBMODULE, PROVIDER_ID, suffix=suffix)
def _set_provider(): providers_manager.set_provider(PROVIDER_SUBMODULE, PROVIDER_ID)
def _config_set(key=None, value=None, values=None, namespace=None, is_secret=False, suffix=None): providers_manager.config_set(PROVIDER_SUBMODULE, PROVIDER_ID, key=key, value=value, values=values, namespace=namespace, is_secret=is_secret, suffix=suffix)
def _config_get(key=None, default=None, required=False, namespace=None, is_secret=False, suffix=None): return providers_manager.config_get(PROVIDER_SUBMODULE, PROVIDER_ID, key=key, default=default, required=required, namespace=namespace, is_secret=is_secret, suffix=suffix)
def _config_interactive_set(default_values, namespace=None, is_secret=False, suffix=None, from_file=False, interactive=False): providers_manager.config_interactive_set(PROVIDER_SUBMODULE, PROVIDER_ID, default_values, namespace, is_secret, suffix, from_file, interactive)
suffix=storage_suffix)
_apply_secret(storage_suffix=storage_suffix)
_apply_deployment(
_get_or_create_volume(
storage_suffix=storage_suffix,
use_existing_disk_name=use_existing_disk_name
),
storage_suffix=storage_suffix,
dry_run=dry_run
)
_apply_service(storage_suffix=storage_suffix, dry_run=dry_run)
if not storage_suffix:
_update_route(storage_suffix=storage_suffix, dry_run=dry_run)
_set_provider()
def print_credentials(raw=False, storage_suffix=None):
hostname, access_key, secret_key = get_credentials(storage_suffix=storage_suffix)
if raw:
print(f'https://{hostname} {access_key} {secret_key}')
else:
print('Minio admin credentials:')
print('External Domain: ' + hostname)
print('Access Key: ' + access_key)
print('Secret Key: ' + secret_key)
print('\nto use with minio-client, run the following command:')
print(f'mc config host add my-storage https://{hostname} {access_key} {secret_key}')
def get_credentials(storage_suffix=None):
return [_get_frontend_hostname(storage_suffix=storage_suffix)] + [
_config_get(key, required=True, is_secret=True, suffix=storage_suffix)
for key in ['MINIO_ACCESS_KEY', 'MINIO_SECRET_KEY']
]
def _generate_password(l):
return binascii.hexlify(os.urandom(l)).decode()
def _apply_secret(storage_suffix=None):
access_key = _config_get('MINIO_ACCESS_KEY', required=False, is_secret=True, suffix=storage_suffix) or _generate_password(8)
secret_key = _config_get('MINIO_SECRET_KEY', required=False, is_secret=True, suffix=storage_suffix) or _generate_password(12)
_config_set(values={'MINIO_ACCESS_KEY': access_key, 'MINIO_SECRET_KEY': secret_key}, is_secret=True, suffix=storage_suffix)
def _apply_deployment(volume_spec, storage_suffix=None, dry_run=False):
node_selector = volume_spec.pop('nodeSelector', None)
if node_selector:
pod_scheduling = {'nodeSelector': node_selector}
else:
pod_scheduling = {}
container_spec_overrides = _config_get('container-spec-overrides', required=False, default=None, suffix=storage_suffix)
kubectl.apply(kubectl.get_deployment(
_get_resource_name(suffix=storage_suffix),
_get_resource_labels(for_deployment=True, suffix=storage_suffix),
{
'replicas': 1,
'revisionHistoryLimit': 10,
'strategy': {'type': 'Recreate', },
'template': {
'metadata': {
'labels': _get_resource_labels(for_deployment=True, suffix=storage_suffix),
'annotations': _get_resource_annotations(suffix=storage_suffix)
},
'spec': {
**pod_scheduling,
'containers': [
{
'name': 'minio',
'image': 'minio/minio',
'args': ['server', '/export'],
'envFrom': [{'secretRef': {'name': _get_resource_name(suffix=storage_suffix)}}],
'ports': [{'containerPort': 9000}],
'volumeMounts': [
{
'name': 'minio-data',
'mountPath': '/export',
}
],
**(json.loads(container_spec_overrides) if container_spec_overrides else {})
}
],
'volumes': [
dict(volume_spec, name='minio-data')
]
}
}
}
), dry_run=dry_run)
def _apply_service(storage_suffix=None, dry_run=False):
kubectl.apply(kubectl.get_resource(
'v1', 'Service',
_get_resource_name(suffix=storage_suffix),
_get_resource_labels(suffix=storage_suffix),
spec={
'ports': [
{'name': '9000', 'port': 9000}
],
'selector': {
'app': _get_resource_labels(for_deployment=True, suffix=storage_suffix)['app']
}
}
), dry_run=dry_run)
def _get_or_create_volume(storage_suffix=None, use_existing_disk_name=None):
disk_size_gb = _config_get('disk-size-gb', required=True, suffix=storage_suffix)
volume_spec = _config_get('volume-spec', required=False, suffix=storage_suffix)
if volume_spec:
volume_spec = yaml.load(volume_spec)
else:
from ckan_cloud_operator.providers.cluster import manager as cluster_manager
volume_spec = cluster_manager.create_volume(
disk_size_gb,
_get_resource_labels(suffix=storage_suffix),
use_existing_disk_name=use_existing_disk_name
)
_config_set('volume-spec', yaml.dump(volume_spec, default_flow_style=False), suffix=storage_suffix)
return volume_spec
def _update_route(storage_suffix=None, dry_run=False):
backend_url_target_id = _get_backend_url_target_id(storage_suffix=storage_suffix)
router_name = _config_get('router-name', required=True, suffix=storage_suffix)
if not routers_manager.get_backend_url_routes(backend_url_target_id):
deployment_name = _get_resource_name(suffix=storage_suffix)
namespace = _get_namespace()
subdomain_route = {
'target-type': 'backend-url',
'target-resource-id': backend_url_target_id,
'backend-url': f'http://{deployment_name}.{namespace}:9000',
}
if dry_run:
logs.info('create_subdomain_route', router_name, subdomain_route)
else:
routers_manager.create_subdomain_route(router_name, subdomain_route)
if not dry_run:
routers_manager.update(router_name, wait_ready=True)
def _get_namespace():
return 'ckan-cloud'
def _get_frontend_hostname(storage_suffix=None):
backend_url_target_id = _get_backend_url_target_id(storage_suffix=storage_suffix)
routes = routers_manager.get_backend_url_routes(backend_url_target_id)
assert storage_suffix or len(routes) == 1
if len(routes) < 1:
return 'localhost:9000'
else:
return routers_manager.get_route_frontend_hostname(routes[0])
def _get_backend_url_target_id(storage_suffix=None):
return f'minio-{storage_suffix}' if storage_suffix else 'minio'
| true | true |
1c4580a46e7319d59ea9439c79f77deb41aaa8c2 | 5,708 | py | Python | luigi/rpc.py | miku/luigi | 889ef2af64e2aa7d0cc65caef69a241ac91e5ff9 | [
"Apache-2.0"
] | 4 | 2017-03-21T20:01:19.000Z | 2022-03-29T16:31:41.000Z | luigi/rpc.py | miku/luigi | 889ef2af64e2aa7d0cc65caef69a241ac91e5ff9 | [
"Apache-2.0"
] | 9 | 2017-03-22T23:38:48.000Z | 2019-01-28T21:13:06.000Z | luigi/rpc.py | miku/luigi | 889ef2af64e2aa7d0cc65caef69a241ac91e5ff9 | [
"Apache-2.0"
] | 2 | 2015-05-04T22:46:20.000Z | 2016-07-14T17:58:57.000Z | # -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Implementation of the REST interface between the workers and the server.
rpc.py implements the client side of it, server.py implements the server side.
See :doc:`/central_scheduler` for more info.
"""
import os
import json
import logging
import socket
import time
from luigi.six.moves.urllib.parse import urljoin, urlencode, urlparse
from luigi.six.moves.urllib.request import urlopen
from luigi.six.moves.urllib.error import URLError
from luigi import configuration
from luigi.scheduler import RPC_METHODS
HAS_UNIX_SOCKET = True
HAS_REQUESTS = True
try:
import requests_unixsocket as requests
except ImportError:
HAS_UNIX_SOCKET = False
try:
import requests
except ImportError:
HAS_REQUESTS = False
logger = logging.getLogger('luigi-interface') # TODO: 'interface'?
def _urljoin(base, url):
"""
Join relative URLs to base URLs like urllib.parse.urljoin but support
arbitrary URIs (esp. 'http+unix://').
"""
parsed = urlparse(base)
scheme = parsed.scheme
return urlparse(
urljoin(parsed._replace(scheme='http').geturl(), url)
)._replace(scheme=scheme).geturl()
class RPCError(Exception):
def __init__(self, message, sub_exception=None):
super(RPCError, self).__init__(message)
self.sub_exception = sub_exception
class URLLibFetcher(object):
raises = (URLError, socket.timeout)
def fetch(self, full_url, body, timeout):
body = urlencode(body).encode('utf-8')
return urlopen(full_url, body, timeout).read().decode('utf-8')
class RequestsFetcher(object):
def __init__(self, session):
from requests import exceptions as requests_exceptions
self.raises = requests_exceptions.RequestException
self.session = session
self.process_id = os.getpid()
def check_pid(self):
# if the process id change changed from when the session was created
# a new session needs to be setup since requests isn't multiprocessing safe.
if os.getpid() != self.process_id:
self.session = requests.Session()
self.process_id = os.getpid()
def fetch(self, full_url, body, timeout):
self.check_pid()
resp = self.session.get(full_url, data=body, timeout=timeout)
resp.raise_for_status()
return resp.text
class RemoteScheduler(object):
"""
Scheduler proxy object. Talks to a RemoteSchedulerResponder.
"""
def __init__(self, url='http://localhost:8082/', connect_timeout=None):
assert not url.startswith('http+unix://') or HAS_UNIX_SOCKET, (
'You need to install requests-unixsocket for Unix socket support.'
)
self._url = url.rstrip('/')
config = configuration.get_config()
if connect_timeout is None:
connect_timeout = config.getfloat('core', 'rpc-connect-timeout', 10.0)
self._connect_timeout = connect_timeout
self._rpc_retry_attempts = config.getint('core', 'rpc-retry-attempts', 3)
self._rpc_retry_wait = config.getint('core', 'rpc-retry-wait', 30)
self._rpc_log_retries = config.getboolean('core', 'rpc-log-retries', True)
if HAS_REQUESTS:
self._fetcher = RequestsFetcher(requests.Session())
else:
self._fetcher = URLLibFetcher()
def _wait(self):
if self._rpc_log_retries:
logger.info("Wait for %d seconds" % self._rpc_retry_wait)
time.sleep(self._rpc_retry_wait)
def _fetch(self, url_suffix, body):
full_url = _urljoin(self._url, url_suffix)
last_exception = None
attempt = 0
while attempt < self._rpc_retry_attempts:
attempt += 1
if last_exception:
if self._rpc_log_retries:
logger.info("Retrying attempt %r of %r (max)" % (attempt, self._rpc_retry_attempts))
self._wait() # wait for a bit and retry
try:
response = self._fetcher.fetch(full_url, body, self._connect_timeout)
break
except self._fetcher.raises as e:
last_exception = e
if self._rpc_log_retries:
logger.warning("Failed connecting to remote scheduler %r", self._url,
exc_info=True)
continue
else:
raise RPCError(
"Errors (%d attempts) when connecting to remote scheduler %r" %
(self._rpc_retry_attempts, self._url),
last_exception
)
return response
def _request(self, url, data, attempts=3, allow_null=True):
body = {'data': json.dumps(data)}
for _ in range(attempts):
page = self._fetch(url, body)
response = json.loads(page)["response"]
if allow_null or response is not None:
return response
raise RPCError("Received null response from remote scheduler %r" % self._url)
for method_name, method in RPC_METHODS.items():
setattr(RemoteScheduler, method_name, method)
| 33.380117 | 104 | 0.653644 |
import os
import json
import logging
import socket
import time
from luigi.six.moves.urllib.parse import urljoin, urlencode, urlparse
from luigi.six.moves.urllib.request import urlopen
from luigi.six.moves.urllib.error import URLError
from luigi import configuration
from luigi.scheduler import RPC_METHODS
HAS_UNIX_SOCKET = True
HAS_REQUESTS = True
try:
import requests_unixsocket as requests
except ImportError:
HAS_UNIX_SOCKET = False
try:
import requests
except ImportError:
HAS_REQUESTS = False
logger = logging.getLogger('luigi-interface')
def _urljoin(base, url):
parsed = urlparse(base)
scheme = parsed.scheme
return urlparse(
urljoin(parsed._replace(scheme='http').geturl(), url)
)._replace(scheme=scheme).geturl()
class RPCError(Exception):
def __init__(self, message, sub_exception=None):
super(RPCError, self).__init__(message)
self.sub_exception = sub_exception
class URLLibFetcher(object):
raises = (URLError, socket.timeout)
def fetch(self, full_url, body, timeout):
body = urlencode(body).encode('utf-8')
return urlopen(full_url, body, timeout).read().decode('utf-8')
class RequestsFetcher(object):
def __init__(self, session):
from requests import exceptions as requests_exceptions
self.raises = requests_exceptions.RequestException
self.session = session
self.process_id = os.getpid()
def check_pid(self):
if os.getpid() != self.process_id:
self.session = requests.Session()
self.process_id = os.getpid()
def fetch(self, full_url, body, timeout):
self.check_pid()
resp = self.session.get(full_url, data=body, timeout=timeout)
resp.raise_for_status()
return resp.text
class RemoteScheduler(object):
def __init__(self, url='http://localhost:8082/', connect_timeout=None):
assert not url.startswith('http+unix://') or HAS_UNIX_SOCKET, (
'You need to install requests-unixsocket for Unix socket support.'
)
self._url = url.rstrip('/')
config = configuration.get_config()
if connect_timeout is None:
connect_timeout = config.getfloat('core', 'rpc-connect-timeout', 10.0)
self._connect_timeout = connect_timeout
self._rpc_retry_attempts = config.getint('core', 'rpc-retry-attempts', 3)
self._rpc_retry_wait = config.getint('core', 'rpc-retry-wait', 30)
self._rpc_log_retries = config.getboolean('core', 'rpc-log-retries', True)
if HAS_REQUESTS:
self._fetcher = RequestsFetcher(requests.Session())
else:
self._fetcher = URLLibFetcher()
def _wait(self):
if self._rpc_log_retries:
logger.info("Wait for %d seconds" % self._rpc_retry_wait)
time.sleep(self._rpc_retry_wait)
def _fetch(self, url_suffix, body):
full_url = _urljoin(self._url, url_suffix)
last_exception = None
attempt = 0
while attempt < self._rpc_retry_attempts:
attempt += 1
if last_exception:
if self._rpc_log_retries:
logger.info("Retrying attempt %r of %r (max)" % (attempt, self._rpc_retry_attempts))
self._wait() # wait for a bit and retry
try:
response = self._fetcher.fetch(full_url, body, self._connect_timeout)
break
except self._fetcher.raises as e:
last_exception = e
if self._rpc_log_retries:
logger.warning("Failed connecting to remote scheduler %r", self._url,
exc_info=True)
continue
else:
raise RPCError(
"Errors (%d attempts) when connecting to remote scheduler %r" %
(self._rpc_retry_attempts, self._url),
last_exception
)
return response
def _request(self, url, data, attempts=3, allow_null=True):
body = {'data': json.dumps(data)}
for _ in range(attempts):
page = self._fetch(url, body)
response = json.loads(page)["response"]
if allow_null or response is not None:
return response
raise RPCError("Received null response from remote scheduler %r" % self._url)
for method_name, method in RPC_METHODS.items():
setattr(RemoteScheduler, method_name, method)
| true | true |
1c4581505fbb614f1ce2848ca80ed21dafdc2751 | 1,094 | py | Python | quick_start.py | willin007/kucoin_sdk | a4967c9f684aa4917a4b9e668d43520307eb9d30 | [
"MIT"
] | null | null | null | quick_start.py | willin007/kucoin_sdk | a4967c9f684aa4917a4b9e668d43520307eb9d30 | [
"MIT"
] | null | null | null | quick_start.py | willin007/kucoin_sdk | a4967c9f684aa4917a4b9e668d43520307eb9d30 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2021/11/4 10:50 上午
# @Author : william.li
# @File : quick_start.py
# @Software: PyCharm
# MarketData
import asyncio
from kucoin.client import WsToken
from kucoin.ws_client import KucoinWsClient
async def main():
async def deal_msg(msg):
if msg['topic'] == '/spotMarket/level2Depth5:BTC-USDT':
print(msg["data"])
elif msg['topic'] == '/spotMarket/level2Depth5:KCS-USDT':
print(f'Get KCS level3:{msg["data"]}')
# is public
client = WsToken()
#is private
# client = WsToken(key='', secret='', passphrase='', is_sandbox=False, url='')
# is sandbox
# client = WsToken(is_sandbox=True)
ws_client = await KucoinWsClient.create(None, client, deal_msg, private=False)
# await ws_client.subscribe('/market/ticker:BTC-USDT,ETH-USDT')
await ws_client.subscribe('/spotMarket/level2Depth5:KCS-USDT')
while True:
await asyncio.sleep(60, loop=loop)
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
| 29.567568 | 82 | 0.652651 |
import asyncio
from kucoin.client import WsToken
from kucoin.ws_client import KucoinWsClient
async def main():
async def deal_msg(msg):
if msg['topic'] == '/spotMarket/level2Depth5:BTC-USDT':
print(msg["data"])
elif msg['topic'] == '/spotMarket/level2Depth5:KCS-USDT':
print(f'Get KCS level3:{msg["data"]}')
client = WsToken()
ws_client = await KucoinWsClient.create(None, client, deal_msg, private=False)
await ws_client.subscribe('/spotMarket/level2Depth5:KCS-USDT')
while True:
await asyncio.sleep(60, loop=loop)
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
| true | true |
1c458274e5efd6aadb13edda9285a4822068f1f7 | 13,123 | py | Python | translate_mustache.py | yahoo/Gordito | 031c87e82a06002fd41b601668a9ef6c0fc2d85c | [
"MIT"
] | null | null | null | translate_mustache.py | yahoo/Gordito | 031c87e82a06002fd41b601668a9ef6c0fc2d85c | [
"MIT"
] | null | null | null | translate_mustache.py | yahoo/Gordito | 031c87e82a06002fd41b601668a9ef6c0fc2d85c | [
"MIT"
] | null | null | null | import sys
import os
import re
#Copyright (c) 2012 Yahoo! Inc. All rights reserved.
#Copyrights licensed under the MIT License. See the accompanying LICENSE file
#for terms.
# define command line options
from optparse import OptionParser
parser = OptionParser()
parser.add_option("-f", "--f", dest="filename", help="The mustache template to process")
parser.add_option("-r", "--rollup", dest="rollup", action="store_true", help="Rollup this template and it's dependencies into a single file.")
parser.add_option("-o", "--output", dest="output", help="The output file of this generated template")
parser.add_option("-b", "--basepath", dest="basepath", help="The base path of the mustache directory")
parser.add_option("-d", "--baseoutpath", dest="baseoutpath", help="Directory to place output in. Overridden by -o, but not for dependencies.")
parser.add_option("-t", "--type", dest="type", help="Whether to output Javascript or PHP")
(options, args) = parser.parse_args()
def js_stomp_filename(fname):
return re.sub(r'[^a-zA-Z0-9]', '_', fname)
def jsrepl(matchobj):
res = matchobj.group(0)
if res.isspace():
return ' '
else:
return '\\' + res
def js_escape(str):
smashspaced = re.sub(r'\s+', ' ', str)
return '"'+re.sub(r'([\'"])', jsrepl, smashspaced)+'"'
# This is used to concat all simple tokens and strings into one .push() statement
def js_buffer_append_multi(arr):
return "ob.push(" + ", ".join(arr) + ");\n"
#token types
CONTENT = 1
DEREF = 2
END = 3
NEGATE = 4
INCLUDE = 5
CFGSTRING = 7
VARIABLE = 8
#Dependency list for template being compiled
deps = {}
def token(command, arg):
return {
'command': command,
'arg': arg
}
def genTokens(template):
negation = None
tokens = []
start = template.find('{{', 0)
lastEnd = 0
###################
#Tokenization pass#
###################
#PYTHON SUCKS
#TODO: some cases of a missing } can cause this to hang.
#If your build fails because this script got wedged here,
#look for errors in the template it wass processing when it died.
while (start) >= 0:
tokens.append(token(CONTENT, template[lastEnd:start]))
start+=2
end = template.find('}}', start)
directive = template[start:end]
command = directive[0:1]
name = directive[1:].strip()
#print "DIR "+directive
#print "COM "+command
#print "NAM "+name
#print "NEG "+str(negation)
if command=='#':
tokens.append(token(DEREF, name))
elif command=='/':
tokens.append(token(END, None))
elif command=='>':
tokens.append(token(INCLUDE, name))
elif command=='^':
#print "entering negation"
#push stack just to maintin consistency with block end code...
tokens.append(token(NEGATE, name))
elif command=='=':
print "Unsupported delimiter change directive encountered. Terminating..."
exit()
elif command=='!':
#print "COMMENT: " + directive
a=1
else:
if command != '{':
#triple brace means unescape, but we don't handle that right now per ymail behavior
name = directive
else:
end += 1 #we will have an extra } closing this directive, so consume it
if name.find('str_') == 0:
tokens.append(token(CFGSTRING, name))
else:
tokens.append(token(VARIABLE, name))
lastEnd = end+2
start = template.find('{{', end+2)
tokens.append(token(CONTENT, template[lastEnd:]))
tokens.append(token(END, None))
return tokens
def compile_template(filename):
global deps
templateStripped = []
try:
template = open(filename, 'r')
#mustache delimiters will run together with code if we don't have at least a space between lines.
templateStripped = [line.strip()+"\n" for line in template]
#templateStripped = [line for line in template]
except:
print "Could not open "+filename
return ''
tokens = genTokens("".join(templateStripped))
##TODO: make it return local output buffer instead of modifying global one?
deps = {}
global options
if options.type and options.type == 'php':
return compileTokensPHP(tokens)[0]
else:
compiled = compileTokensJS(tokens)[0] #this fills in deps
MODULE_PREFIX = 'mu_'
depStr = "',\n'".join([MODULE_PREFIX+js_stomp_filename(key) for key in deps.keys()])
if len(depStr) != 0: depStr = "'"+depStr+"'"
try:
idx = filename.index("mustache/")
idx += 9 # length of 'mustache/'
except:
idx = 0
fname = js_stomp_filename(filename[idx:][:-3]) #trim off mustache/ dir and .mu extension
if options.rollup:
fname = fname + "_rollup"
res = ("/* AUTO-GENERATED FILE. DO NOT EDIT. */\n" +
"YUI.add('"+MODULE_PREFIX+fname+"',function(Y){\n" +
"Y.namespace('ui.MuTemplates');\n" +
"Y.ui.MuTemplates."+fname+" = function(__ctx, __world) {\n" +
"var ob = __world.outbuffer,"+
"str=__world.strings,"+
"handleHash=__world.handleHashsign,\n"+
"templates=__world.templates;\n"+
compiled + "\n}\n" +
"}, '1.0.0', {requires:["+depStr+"]});")
return res
#returns compiled (string, number of tokens consumed)
def compileTokensJS(tokens):
global deps
compiled = ''
i = 0
tempbuffer = []
while i < len(tokens) and tokens[i]['command'] != END:
#print tokens[i]
command = tokens[i]['command']
arg = tokens[i]['arg']
res = ('', 1)
if command==DEREF:
# Flush out the tempbuffer
if (len(tempbuffer) > 0) :
compiled += js_buffer_append_multi(tempbuffer)
tempbuffer = []
res = compileTokensJS(tokens[i+1:])
res = ("handleHash(function(__ctx, __world) {\n" + res[0] + "\n}, '" + arg + "', __ctx, __world);", res[1]+1 )
elif command==INCLUDE:
# Flush out the tempbuffer
if (len(tempbuffer) > 0) :
compiled += js_buffer_append_multi(tempbuffer)
tempbuffer = []
if options.rollup:
basePath = options.basepath
templateStripped = []
try:
print "Processing partial: " + basePath + arg + ".mu"
template = open(basePath + arg + ".mu", 'rU')
templateStripped = [line.strip()+"\n" for line in template]
except:
print "Could not open "+ basePath + arg + ".mu"
res = ('', 1)
subtokens = genTokens("".join(templateStripped))
res = (compileTokensJS(subtokens )[0], 1)
else:
deps[arg] = arg
res = ("templates."+js_stomp_filename(arg)+"(__ctx, __world);\n", 1)
elif command==NEGATE:
if (len(tempbuffer) > 0) :
compiled += js_buffer_append_multi(tempbuffer)
tempbuffer = []
res = compileTokensJS(tokens[i+1:])
res = ("if(!__ctx['"+arg+"']) {\n" + res[0] + "}\n", res[1]+1)
elif command==CFGSTRING:
tempbuffer.append("str('"+arg+"', __ctx, __world)")
elif command==VARIABLE:
tempbuffer.append("__ctx['"+arg+"']")
elif command==CONTENT:
if arg != "":
tempbuffer.append(js_escape(arg))
#print res
compiled += res[0]
i+= res[1]
# Flush out the tempbuffer
if (len(tempbuffer) > 0) :
compiled += js_buffer_append_multi(tempbuffer)
tempbuffer = []
return (compiled, i+1)
#returns compiled (string, number of tokens consumed)
def compileTokensPHP(tokens):
global deps
compiled = ''
i = 0
while i < len(tokens) and tokens[i]['command'] != END:
#print tokens[i]
command = tokens[i]['command']
arg = tokens[i]['arg']
res = ('', 0)
if command==DEREF:
res = compileTokensPHP(tokens[i+1:])
res = ("<?php $_varname = '"+arg+"'; " +
"""
$_items = array();
$_var = $ctx->$_varname;
$_should_descend_context = !is_scalar($_var);
if($_var) {
if(!is_array($_var)) {
$_items[] = $_var;
}
else {
$_items = $_var;
}
}
$stk[] = $ctx;
foreach($_items as $_ctx_item) {
if($_should_descend_context) {
$ctx = $_ctx_item;
}
?>"""
+ res[0] + "<?php } $ctx = array_pop($stk); ?>", res[1]+1 )
elif command==INCLUDE:
if options.rollup:
basePath = options.basepath
try:
print "Processing partial: " + basePath + arg + ".mu"
template = open(basePath + arg + ".mu", 'rU')
templateStripped = [line.strip()+"\n" for line in template]
subtokens = genTokens("".join(templateStripped))
res = (compileTokensPHP(subtokens )[0], 1)
except:
print "Could not open "+ basePath + arg + ".mu"
res = ('', 1)
else:
#fname = js_stomp_filename(arg)
deps[arg] = arg
res = ("<?php include($_TEMPLATE_BASE.'"+arg+".inc'); ?>", 1)
elif command==NEGATE:
res = compileTokensPHP(tokens[i+1:])
res = ("<?php $_var = '"+arg+"'; if(!isset($ctx->$_var) || empty($ctx->$_var) ) { ?>" + res[0] + "<?php } ?>", res[1]+1)
elif command==CFGSTRING:
res = ("<?php echo $this->getIString('"+arg+"', $ctx); ?>", 1)
elif command==VARIABLE:
res = ("<?php $_var = '"+arg+"'; echo $ctx->$_var; ?>", 1)
elif command==CONTENT:
res = (arg, 1)
#print res
compiled += res[0]
i+= res[1]
return (compiled, i+1)
# setup php path
basename, extension = os.path.splitext(options.filename)
sourcedir = "mustache/"
if options.basepath:
sourcedir = options.basepath
destdir = ""
if options.baseoutpath:
destdir = options.baseoutpath
if options.output:
newPath = options.output
else:
if options.type and options.type == 'php':
newPath = basename.replace(sourcedir, destdir +"php_translated/") + ".inc"
else:
newPath = basename.replace(sourcedir, destdir + "js_translated/") + ".js"
newPathDir = os.path.dirname(newPath)
if not os.path.exists(newPathDir) :
os.makedirs(newPathDir)
if options.rollup:
basename, extension = os.path.splitext(newPath)
newPath = basename + "_rollup" + extension
print "Processing "+options.filename+" into "+newPath
#print(compile_template(sys.argv[1]))
f = open(newPath, 'w')
f.write(compile_template(options.filename))
f.close()
#if a basepath has been specified, build dependent templates:
if options.basepath and options.baseoutpath:
print deps
depstack = deps
while len(depstack) > 0:
deps = depstack
depstack = {}
for key in deps.keys():
basename = options.basepath + key
if options.type and options.type == 'php':
newPath = basename.replace(options.basepath, options.baseoutpath + "php_translated/") + ".inc"
else:
newPath = basename.replace(options.basepath, options.baseoutpath + "js_translated/") + ".js"
newPathDir = os.path.dirname(newPath)
if not os.path.exists(newPathDir) :
os.makedirs(newPathDir)
print "+ Processing dependency "+key+" into "+newPath
#print(compile_template(sys.argv[1]))
f = open(newPath, 'w')
f.write(compile_template(options.basepath + key + ".mu"))
f.close()
depstack.update(deps)
elif not options.rollup:
print "WARNING: not rollup, and no dependencies generated (basepath and baseoutpath must both be specified to generate dependencies)" | 34.44357 | 166 | 0.522518 | import sys
import os
import re
from optparse import OptionParser
parser = OptionParser()
parser.add_option("-f", "--f", dest="filename", help="The mustache template to process")
parser.add_option("-r", "--rollup", dest="rollup", action="store_true", help="Rollup this template and it's dependencies into a single file.")
parser.add_option("-o", "--output", dest="output", help="The output file of this generated template")
parser.add_option("-b", "--basepath", dest="basepath", help="The base path of the mustache directory")
parser.add_option("-d", "--baseoutpath", dest="baseoutpath", help="Directory to place output in. Overridden by -o, but not for dependencies.")
parser.add_option("-t", "--type", dest="type", help="Whether to output Javascript or PHP")
(options, args) = parser.parse_args()
def js_stomp_filename(fname):
return re.sub(r'[^a-zA-Z0-9]', '_', fname)
def jsrepl(matchobj):
res = matchobj.group(0)
if res.isspace():
return ' '
else:
return '\\' + res
def js_escape(str):
smashspaced = re.sub(r'\s+', ' ', str)
return '"'+re.sub(r'([\'"])', jsrepl, smashspaced)+'"'
# This is used to concat all simple tokens and strings into one .push() statement
def js_buffer_append_multi(arr):
return "ob.push(" + ", ".join(arr) + ");\n"
#token types
CONTENT = 1
DEREF = 2
END = 3
NEGATE = 4
INCLUDE = 5
CFGSTRING = 7
VARIABLE = 8
#Dependency list for template being compiled
deps = {}
def token(command, arg):
return {
'command': command,
'arg': arg
}
def genTokens(template):
negation = None
tokens = []
start = template.find('{{', 0)
lastEnd = 0
###################
#Tokenization pass#
###################
#PYTHON SUCKS
#TODO: some cases of a missing } can cause this to hang.
#If your build fails because this script got wedged here,
#look for errors in the template it wass processing when it died.
while (start) >= 0:
tokens.append(token(CONTENT, template[lastEnd:start]))
start+=2
end = template.find('}}', start)
directive = template[start:end]
command = directive[0:1]
name = directive[1:].strip()
#print "DIR "+directive
#print "COM "+command
#print "NAM "+name
#print "NEG "+str(negation)
if command=='#':
tokens.append(token(DEREF, name))
elif command=='/':
tokens.append(token(END, None))
elif command=='>':
tokens.append(token(INCLUDE, name))
elif command=='^':
#print "entering negation"
#push stack just to maintin consistency with block end code...
tokens.append(token(NEGATE, name))
elif command=='=':
print "Unsupported delimiter change directive encountered. Terminating..."
exit()
elif command=='!':
#print "COMMENT: " + directive
a=1
else:
if command != '{':
#triple brace means unescape, but we don't handle that right now per ymail behavior
name = directive
else:
end += 1 #we will have an extra } closing this directive, so consume it
if name.find('str_') == 0:
tokens.append(token(CFGSTRING, name))
else:
tokens.append(token(VARIABLE, name))
lastEnd = end+2
start = template.find('{{', end+2)
tokens.append(token(CONTENT, template[lastEnd:]))
tokens.append(token(END, None))
return tokens
def compile_template(filename):
global deps
templateStripped = []
try:
template = open(filename, 'r')
#mustache delimiters will run together with code if we don't have at least a space between lines.
templateStripped = [line.strip()+"\n" for line in template]
#templateStripped = [line for line in template]
except:
print "Could not open "+filename
return ''
tokens = genTokens("".join(templateStripped))
##TODO: make it return local output buffer instead of modifying global one?
deps = {}
global options
if options.type and options.type == 'php':
return compileTokensPHP(tokens)[0]
else:
compiled = compileTokensJS(tokens)[0] #this fills in deps
MODULE_PREFIX = 'mu_'
depStr = "',\n'".join([MODULE_PREFIX+js_stomp_filename(key) for key in deps.keys()])
if len(depStr) != 0: depStr = "'"+depStr+"'"
try:
idx = filename.index("mustache/")
idx += 9 # length of 'mustache/'
except:
idx = 0
fname = js_stomp_filename(filename[idx:][:-3]) #trim off mustache/ dir and .mu extension
if options.rollup:
fname = fname + "_rollup"
res = ("/* AUTO-GENERATED FILE. DO NOT EDIT. */\n" +
"YUI.add('"+MODULE_PREFIX+fname+"',function(Y){\n" +
"Y.namespace('ui.MuTemplates');\n" +
"Y.ui.MuTemplates."+fname+" = function(__ctx, __world) {\n" +
"var ob = __world.outbuffer,"+
"str=__world.strings,"+
"handleHash=__world.handleHashsign,\n"+
"templates=__world.templates;\n"+
compiled + "\n}\n" +
"}, '1.0.0', {requires:["+depStr+"]});")
return res
#returns compiled (string, number of tokens consumed)
def compileTokensJS(tokens):
global deps
compiled = ''
i = 0
tempbuffer = []
while i < len(tokens) and tokens[i]['command'] != END:
#print tokens[i]
command = tokens[i]['command']
arg = tokens[i]['arg']
res = ('', 1)
if command==DEREF:
# Flush out the tempbuffer
if (len(tempbuffer) > 0) :
compiled += js_buffer_append_multi(tempbuffer)
tempbuffer = []
res = compileTokensJS(tokens[i+1:])
res = ("handleHash(function(__ctx, __world) {\n" + res[0] + "\n}, '" + arg + "', __ctx, __world);", res[1]+1 )
elif command==INCLUDE:
# Flush out the tempbuffer
if (len(tempbuffer) > 0) :
compiled += js_buffer_append_multi(tempbuffer)
tempbuffer = []
if options.rollup:
basePath = options.basepath
templateStripped = []
try:
print "Processing partial: " + basePath + arg + ".mu"
template = open(basePath + arg + ".mu", 'rU')
templateStripped = [line.strip()+"\n" for line in template]
except:
print "Could not open "+ basePath + arg + ".mu"
res = ('', 1)
subtokens = genTokens("".join(templateStripped))
res = (compileTokensJS(subtokens )[0], 1)
else:
deps[arg] = arg
res = ("templates."+js_stomp_filename(arg)+"(__ctx, __world);\n", 1)
elif command==NEGATE:
if (len(tempbuffer) > 0) :
compiled += js_buffer_append_multi(tempbuffer)
tempbuffer = []
res = compileTokensJS(tokens[i+1:])
res = ("if(!__ctx['"+arg+"']) {\n" + res[0] + "}\n", res[1]+1)
elif command==CFGSTRING:
tempbuffer.append("str('"+arg+"', __ctx, __world)")
elif command==VARIABLE:
tempbuffer.append("__ctx['"+arg+"']")
elif command==CONTENT:
if arg != "":
tempbuffer.append(js_escape(arg))
#print res
compiled += res[0]
i+= res[1]
# Flush out the tempbuffer
if (len(tempbuffer) > 0) :
compiled += js_buffer_append_multi(tempbuffer)
tempbuffer = []
return (compiled, i+1)
#returns compiled (string, number of tokens consumed)
def compileTokensPHP(tokens):
global deps
compiled = ''
i = 0
while i < len(tokens) and tokens[i]['command'] != END:
#print tokens[i]
command = tokens[i]['command']
arg = tokens[i]['arg']
res = ('', 0)
if command==DEREF:
res = compileTokensPHP(tokens[i+1:])
res = ("<?php $_varname = '"+arg+"'; " +
"""
$_items = array();
$_var = $ctx->$_varname;
$_should_descend_context = !is_scalar($_var);
if($_var) {
if(!is_array($_var)) {
$_items[] = $_var;
}
else {
$_items = $_var;
}
}
$stk[] = $ctx;
foreach($_items as $_ctx_item) {
if($_should_descend_context) {
$ctx = $_ctx_item;
}
?>"""
+ res[0] + "<?php } $ctx = array_pop($stk); ?>", res[1]+1 )
elif command==INCLUDE:
if options.rollup:
basePath = options.basepath
try:
print "Processing partial: " + basePath + arg + ".mu"
template = open(basePath + arg + ".mu", 'rU')
templateStripped = [line.strip()+"\n" for line in template]
subtokens = genTokens("".join(templateStripped))
res = (compileTokensPHP(subtokens )[0], 1)
except:
print "Could not open "+ basePath + arg + ".mu"
res = ('', 1)
else:
#fname = js_stomp_filename(arg)
deps[arg] = arg
res = ("<?php include($_TEMPLATE_BASE.'"+arg+".inc'); ?>", 1)
elif command==NEGATE:
res = compileTokensPHP(tokens[i+1:])
res = ("<?php $_var = '"+arg+"'; if(!isset($ctx->$_var) || empty($ctx->$_var) ) { ?>" + res[0] + "<?php } ?>", res[1]+1)
elif command==CFGSTRING:
res = ("<?php echo $this->getIString('"+arg+"', $ctx); ?>", 1)
elif command==VARIABLE:
res = ("<?php $_var = '"+arg+"'; echo $ctx->$_var; ?>", 1)
elif command==CONTENT:
res = (arg, 1)
#print res
compiled += res[0]
i+= res[1]
return (compiled, i+1)
# setup php path
basename, extension = os.path.splitext(options.filename)
sourcedir = "mustache/"
if options.basepath:
sourcedir = options.basepath
destdir = ""
if options.baseoutpath:
destdir = options.baseoutpath
if options.output:
newPath = options.output
else:
if options.type and options.type == 'php':
newPath = basename.replace(sourcedir, destdir +"php_translated/") + ".inc"
else:
newPath = basename.replace(sourcedir, destdir + "js_translated/") + ".js"
newPathDir = os.path.dirname(newPath)
if not os.path.exists(newPathDir) :
os.makedirs(newPathDir)
if options.rollup:
basename, extension = os.path.splitext(newPath)
newPath = basename + "_rollup" + extension
print "Processing "+options.filename+" into "+newPath
#print(compile_template(sys.argv[1]))
f = open(newPath, 'w')
f.write(compile_template(options.filename))
f.close()
#if a basepath has been specified, build dependent templates:
if options.basepath and options.baseoutpath:
print deps
depstack = deps
while len(depstack) > 0:
deps = depstack
depstack = {}
for key in deps.keys():
basename = options.basepath + key
if options.type and options.type == 'php':
newPath = basename.replace(options.basepath, options.baseoutpath + "php_translated/") + ".inc"
else:
newPath = basename.replace(options.basepath, options.baseoutpath + "js_translated/") + ".js"
newPathDir = os.path.dirname(newPath)
if not os.path.exists(newPathDir) :
os.makedirs(newPathDir)
print "+ Processing dependency "+key+" into "+newPath
#print(compile_template(sys.argv[1]))
f = open(newPath, 'w')
f.write(compile_template(options.basepath + key + ".mu"))
f.close()
depstack.update(deps)
elif not options.rollup:
print "WARNING: not rollup, and no dependencies generated (basepath and baseoutpath must both be specified to generate dependencies)" | false | true |
1c45828c2da100de725a4b389922ca6abe3ce11d | 1,901 | py | Python | setup.py | cajfisher/vasppy | a460db14163b7db3bce54d754dd476c45a3ed85b | [
"MIT"
] | 28 | 2017-02-16T13:22:34.000Z | 2021-04-29T06:10:10.000Z | setup.py | cajfisher/vasppy | a460db14163b7db3bce54d754dd476c45a3ed85b | [
"MIT"
] | 15 | 2016-05-09T13:08:42.000Z | 2021-08-09T10:59:58.000Z | setup.py | cajfisher/vasppy | a460db14163b7db3bce54d754dd476c45a3ed85b | [
"MIT"
] | 25 | 2015-10-12T11:29:22.000Z | 2021-08-20T17:33:27.000Z | """
vasppy: Python utilities for working with VASP inputs and outputs.
"""
from setuptools import setup, find_packages
from vasppy.version import __version__ as VERSION
readme = 'README.md'
long_description = open(readme).read()
scripts = ['check_species',
'murnfit',
'vasp_summary',
'poscar_to_cif',
'potcar_spec',
'effective_mass',
'fat_bands',
'pimaim_to_poscar',
'pimaim_to_xtl',
'poscar_sort',
'poscar_to_pimaim',
'poscar_to_xtl',
'proc_poscar',
'rotate_poscar',
'spacegroup',
'vasp_grid',
'xdatcar_to_disp',
'xdatcar_to_poscart',
'xdatcar_to_rdf']
setup(
name='vasppy',
version=VERSION,
description='Python utilities for working with VASP inputs and outputs',
long_description=long_description,
long_description_content_type="text/markdown",
author='Benjamin J. Morgan',
author_email='bjm42@bath.ac.uk',
url='https://github.com/bjmorgan/vasppy',
download_url='https://github.com/bjmorgan/vasppy/archive/{}.tar.gz'.format(VERSION),
keywords=['vasp'], # keywords
packages=find_packages(exclude=['docs', 'tests*']),
package_data={'vasppy': ['data/*.yaml']},
entry_points={'console_scripts':[
'{} = vasppy.scripts.{}:main'.format(s, s) for s in scripts]},
license='MIT',
install_requires=['monty',
'numpy>=1.16.2',
'pandas',
'pymatgen>=2022.0.0',
'PyYAML',
'coverage==4.3.4',
'codeclimate-test-reporter',
'fortranformat',
'scipy>=1.4.1',
'tqdm',
'lxml'],
python_requires='>=3.7'
)
| 31.683333 | 88 | 0.538138 |
from setuptools import setup, find_packages
from vasppy.version import __version__ as VERSION
readme = 'README.md'
long_description = open(readme).read()
scripts = ['check_species',
'murnfit',
'vasp_summary',
'poscar_to_cif',
'potcar_spec',
'effective_mass',
'fat_bands',
'pimaim_to_poscar',
'pimaim_to_xtl',
'poscar_sort',
'poscar_to_pimaim',
'poscar_to_xtl',
'proc_poscar',
'rotate_poscar',
'spacegroup',
'vasp_grid',
'xdatcar_to_disp',
'xdatcar_to_poscart',
'xdatcar_to_rdf']
setup(
name='vasppy',
version=VERSION,
description='Python utilities for working with VASP inputs and outputs',
long_description=long_description,
long_description_content_type="text/markdown",
author='Benjamin J. Morgan',
author_email='bjm42@bath.ac.uk',
url='https://github.com/bjmorgan/vasppy',
download_url='https://github.com/bjmorgan/vasppy/archive/{}.tar.gz'.format(VERSION),
keywords=['vasp'],
packages=find_packages(exclude=['docs', 'tests*']),
package_data={'vasppy': ['data/*.yaml']},
entry_points={'console_scripts':[
'{} = vasppy.scripts.{}:main'.format(s, s) for s in scripts]},
license='MIT',
install_requires=['monty',
'numpy>=1.16.2',
'pandas',
'pymatgen>=2022.0.0',
'PyYAML',
'coverage==4.3.4',
'codeclimate-test-reporter',
'fortranformat',
'scipy>=1.4.1',
'tqdm',
'lxml'],
python_requires='>=3.7'
)
| true | true |
1c4582bb37d8bf82a9eadb8ac9e0bbddd1dde76a | 7,194 | py | Python | hack/boilerplate/boilerplate.py | moelsayed/kubeone | bec424b09d2d0cb5d97347469c947ab66c5c1d91 | [
"Apache-2.0"
] | 1 | 2020-02-13T17:46:28.000Z | 2020-02-13T17:46:28.000Z | hack/boilerplate/boilerplate.py | moelsayed/kubeone | bec424b09d2d0cb5d97347469c947ab66c5c1d91 | [
"Apache-2.0"
] | null | null | null | hack/boilerplate/boilerplate.py | moelsayed/kubeone | bec424b09d2d0cb5d97347469c947ab66c5c1d91 | [
"Apache-2.0"
] | 1 | 2020-05-06T15:33:38.000Z | 2020-05-06T15:33:38.000Z | #!/usr/bin/env python
# Copyright 2019 The KubeOne Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import argparse
import datetime
import difflib
import glob
import os
import re
import sys
parser = argparse.ArgumentParser()
parser.add_argument(
"filenames",
help="list of files to check, all files if unspecified",
nargs='*')
rootdir = os.path.dirname(__file__) + "/../../"
rootdir = os.path.abspath(rootdir)
parser.add_argument(
"--rootdir", default=rootdir, help="root directory to examine")
default_boilerplate_dir = os.path.join(rootdir, "hack/boilerplate")
parser.add_argument(
"--boilerplate-dir", default=default_boilerplate_dir)
parser.add_argument(
"-v", "--verbose",
help="give verbose output regarding why a file does not pass",
action="store_true")
args = parser.parse_args()
verbose_out = sys.stderr if args.verbose else open("/dev/null", "w")
def get_refs():
refs = {}
for path in glob.glob(os.path.join(args.boilerplate_dir, "boilerplate.*.txt")):
extension = os.path.basename(path).split(".")[1]
ref_file = open(path, 'r')
ref = ref_file.read().splitlines()
ref_file.close()
refs[extension] = ref
return refs
def is_generated_file(filename, data, regexs):
for d in skipped_ungenerated_files:
if d in filename:
return False
p = regexs["generated"]
return p.search(data)
def file_passes(filename, refs, regexs):
try:
f = open(filename, 'r')
except Exception as exc:
print("Unable to open %s: %s" % (filename, exc), file=verbose_out)
return False
data = f.read()
f.close()
# determine if the file is automatically generated
generated = is_generated_file(filename, data, regexs)
basename = os.path.basename(filename)
if generated:
extension = "generatego"
else:
extension = file_extension(filename)
if extension != "":
ref = refs[extension]
else:
ref = refs[basename]
# remove extra content from the top of files
if extension == "go" or extension == "generatego":
p = regexs["go_build_constraints"]
(data, found) = p.subn("", data, 1)
elif extension == "sh":
p = regexs["shebang"]
(data, found) = p.subn("", data, 1)
data = data.splitlines()
# if our test file is smaller than the reference it surely fails!
if len(ref) > len(data):
print('File %s smaller than reference (%d < %d)' %
(filename, len(data), len(ref)),
file=verbose_out)
return False
# trim our file to the same number of lines as the reference file
data = data[:len(ref)]
p = regexs["year"]
for d in data:
if p.search(d):
if generated:
print('File %s has the YEAR field, but it should not be in generated file' % filename, file=verbose_out)
else:
print('File %s has the YEAR field, but missing the year of date' % filename, file=verbose_out)
return False
if not generated:
# Replace all occurrences of the regex "2014|2015|2016|2017|2018" with "YEAR"
p = regexs["date"]
for i, d in enumerate(data):
(data[i], found) = p.subn('YEAR', d)
if found != 0:
break
# if we don't match the reference at this point, fail
if ref != data:
print("Header in %s does not match reference, diff:" % filename, file=verbose_out)
if args.verbose:
print(file=verbose_out)
for line in difflib.unified_diff(ref, data, 'reference', filename, lineterm=''):
print(line, file=verbose_out)
print(file=verbose_out)
return False
return True
def file_extension(filename):
return os.path.splitext(filename)[1].split(".")[-1].lower()
skipped_dirs = [
'bin',
'Godeps',
'.git',
'vendor',
'hack/boilerplate/test',
'pkg/apis/kubeadm/v1beta1',
'pkg/apis/kubeadm/v1beta2',
]
# list all the files contain 'DO NOT EDIT', but are not generated
skipped_ungenerated_files = ['hack/boilerplate/boilerplate.py']
def normalize_files(files):
newfiles = []
for pathname in files:
if any(x in pathname for x in skipped_dirs):
continue
newfiles.append(pathname)
for i, pathname in enumerate(newfiles):
if not os.path.isabs(pathname):
newfiles[i] = os.path.join(args.rootdir, pathname)
return newfiles
def get_files(extensions):
files = []
if len(args.filenames) > 0:
files = args.filenames
else:
for root, dirs, walkfiles in os.walk(args.rootdir):
# don't visit certain dirs. This is just a performance improvement
# as we would prune these later in normalize_files(). But doing it
# cuts down the amount of filesystem walking we do and cuts down
# the size of the file list
for d in skipped_dirs:
if d in dirs:
dirs.remove(d)
for name in walkfiles:
pathname = os.path.join(root, name)
files.append(pathname)
files = normalize_files(files)
outfiles = []
for pathname in files:
basename = os.path.basename(pathname)
extension = file_extension(pathname)
if extension in extensions or basename in extensions:
outfiles.append(pathname)
return outfiles
def get_dates():
years = datetime.datetime.now().year
return '(%s)' % '|'.join((str(year) for year in range(2014, years+1)))
def get_regexs():
regexs = {}
# Search for "YEAR" which exists in the boilerplate, but shouldn't in the real thing
regexs["year"] = re.compile('YEAR')
# get_dates return 2014, 2015, 2016, 2017, or 2018 until the current year as a regex like: "(2014|2015|2016|2017|2018)";
# company holder names can be anything
regexs["date"] = re.compile(get_dates())
# strip // +build \n\n build constraints
regexs["go_build_constraints"] = re.compile(
r"^(// \+build.*\n)+\n", re.MULTILINE)
# strip #!.* from shell scripts
regexs["shebang"] = re.compile(r"^(#!.*\n)\n*", re.MULTILINE)
# Search for generated files
regexs["generated"] = re.compile('DO NOT EDIT')
return regexs
def main():
regexs = get_regexs()
refs = get_refs()
filenames = get_files(refs.keys())
for filename in filenames:
if not file_passes(filename, refs, regexs):
print(filename, file=sys.stdout)
return 0
if __name__ == "__main__":
sys.exit(main())
| 29.604938 | 124 | 0.630108 |
from __future__ import print_function
import argparse
import datetime
import difflib
import glob
import os
import re
import sys
parser = argparse.ArgumentParser()
parser.add_argument(
"filenames",
help="list of files to check, all files if unspecified",
nargs='*')
rootdir = os.path.dirname(__file__) + "/../../"
rootdir = os.path.abspath(rootdir)
parser.add_argument(
"--rootdir", default=rootdir, help="root directory to examine")
default_boilerplate_dir = os.path.join(rootdir, "hack/boilerplate")
parser.add_argument(
"--boilerplate-dir", default=default_boilerplate_dir)
parser.add_argument(
"-v", "--verbose",
help="give verbose output regarding why a file does not pass",
action="store_true")
args = parser.parse_args()
verbose_out = sys.stderr if args.verbose else open("/dev/null", "w")
def get_refs():
refs = {}
for path in glob.glob(os.path.join(args.boilerplate_dir, "boilerplate.*.txt")):
extension = os.path.basename(path).split(".")[1]
ref_file = open(path, 'r')
ref = ref_file.read().splitlines()
ref_file.close()
refs[extension] = ref
return refs
def is_generated_file(filename, data, regexs):
for d in skipped_ungenerated_files:
if d in filename:
return False
p = regexs["generated"]
return p.search(data)
def file_passes(filename, refs, regexs):
try:
f = open(filename, 'r')
except Exception as exc:
print("Unable to open %s: %s" % (filename, exc), file=verbose_out)
return False
data = f.read()
f.close()
generated = is_generated_file(filename, data, regexs)
basename = os.path.basename(filename)
if generated:
extension = "generatego"
else:
extension = file_extension(filename)
if extension != "":
ref = refs[extension]
else:
ref = refs[basename]
if extension == "go" or extension == "generatego":
p = regexs["go_build_constraints"]
(data, found) = p.subn("", data, 1)
elif extension == "sh":
p = regexs["shebang"]
(data, found) = p.subn("", data, 1)
data = data.splitlines()
if len(ref) > len(data):
print('File %s smaller than reference (%d < %d)' %
(filename, len(data), len(ref)),
file=verbose_out)
return False
data = data[:len(ref)]
p = regexs["year"]
for d in data:
if p.search(d):
if generated:
print('File %s has the YEAR field, but it should not be in generated file' % filename, file=verbose_out)
else:
print('File %s has the YEAR field, but missing the year of date' % filename, file=verbose_out)
return False
if not generated:
p = regexs["date"]
for i, d in enumerate(data):
(data[i], found) = p.subn('YEAR', d)
if found != 0:
break
if ref != data:
print("Header in %s does not match reference, diff:" % filename, file=verbose_out)
if args.verbose:
print(file=verbose_out)
for line in difflib.unified_diff(ref, data, 'reference', filename, lineterm=''):
print(line, file=verbose_out)
print(file=verbose_out)
return False
return True
def file_extension(filename):
return os.path.splitext(filename)[1].split(".")[-1].lower()
skipped_dirs = [
'bin',
'Godeps',
'.git',
'vendor',
'hack/boilerplate/test',
'pkg/apis/kubeadm/v1beta1',
'pkg/apis/kubeadm/v1beta2',
]
# list all the files contain 'DO NOT EDIT', but are not generated
skipped_ungenerated_files = ['hack/boilerplate/boilerplate.py']
def normalize_files(files):
newfiles = []
for pathname in files:
if any(x in pathname for x in skipped_dirs):
continue
newfiles.append(pathname)
for i, pathname in enumerate(newfiles):
if not os.path.isabs(pathname):
newfiles[i] = os.path.join(args.rootdir, pathname)
return newfiles
def get_files(extensions):
files = []
if len(args.filenames) > 0:
files = args.filenames
else:
for root, dirs, walkfiles in os.walk(args.rootdir):
# don't visit certain dirs. This is just a performance improvement
for d in skipped_dirs:
if d in dirs:
dirs.remove(d)
for name in walkfiles:
pathname = os.path.join(root, name)
files.append(pathname)
files = normalize_files(files)
outfiles = []
for pathname in files:
basename = os.path.basename(pathname)
extension = file_extension(pathname)
if extension in extensions or basename in extensions:
outfiles.append(pathname)
return outfiles
def get_dates():
years = datetime.datetime.now().year
return '(%s)' % '|'.join((str(year) for year in range(2014, years+1)))
def get_regexs():
regexs = {}
regexs["year"] = re.compile('YEAR')
# get_dates return 2014, 2015, 2016, 2017, or 2018 until the current year as a regex like: "(2014|2015|2016|2017|2018)";
# company holder names can be anything
regexs["date"] = re.compile(get_dates())
# strip // +build \n\n build constraints
regexs["go_build_constraints"] = re.compile(
r"^(// \+build.*\n)+\n", re.MULTILINE)
# strip #!.* from shell scripts
regexs["shebang"] = re.compile(r"^(#!.*\n)\n*", re.MULTILINE)
# Search for generated files
regexs["generated"] = re.compile('DO NOT EDIT')
return regexs
def main():
regexs = get_regexs()
refs = get_refs()
filenames = get_files(refs.keys())
for filename in filenames:
if not file_passes(filename, refs, regexs):
print(filename, file=sys.stdout)
return 0
if __name__ == "__main__":
sys.exit(main())
| true | true |
1c45839120b9c193c462707af258c3c9bfffdfa3 | 568 | py | Python | tests/test_level1/test_visited.py | kianmeng/soupsieve | a8640aad6ae0476e6b62f4f15e12ad4efc7605c4 | [
"MIT"
] | 130 | 2018-12-27T06:00:32.000Z | 2022-03-29T05:47:18.000Z | tests/test_level1/test_visited.py | kianmeng/soupsieve | a8640aad6ae0476e6b62f4f15e12ad4efc7605c4 | [
"MIT"
] | 157 | 2018-12-07T07:44:15.000Z | 2022-02-05T16:20:08.000Z | tests/test_level1/test_visited.py | kianmeng/soupsieve | a8640aad6ae0476e6b62f4f15e12ad4efc7605c4 | [
"MIT"
] | 32 | 2018-12-31T03:11:55.000Z | 2022-03-06T09:06:43.000Z | """Test visited selectors."""
from .. import util
class TestVisited(util.TestCase):
"""Test visited selectors."""
def test_visited(self):
"""Test visited."""
markup = """
<div>
<p>Some text <span id="1" class="foo:bar:foobar"> in a paragraph</span>.
<a id="2" class="bar" href="http://google.com">Link</a>
<a id="3">Placeholder text.</a>
</p>
</div>
"""
self.assert_selector(
markup,
"a:visited",
[],
flags=util.HTML
)
| 21.846154 | 80 | 0.482394 | from .. import util
class TestVisited(util.TestCase):
def test_visited(self):
markup = """
<div>
<p>Some text <span id="1" class="foo:bar:foobar"> in a paragraph</span>.
<a id="2" class="bar" href="http://google.com">Link</a>
<a id="3">Placeholder text.</a>
</p>
</div>
"""
self.assert_selector(
markup,
"a:visited",
[],
flags=util.HTML
)
| true | true |
1c4584ac1bc01ab917fbb00db92b230e45196a27 | 5,228 | py | Python | export.py | OleksandrBlack/safecoinnodes | 0021edc8e72e078fcd7bedb465292c96caeeb148 | [
"MIT"
] | null | null | null | export.py | OleksandrBlack/safecoinnodes | 0021edc8e72e078fcd7bedb465292c96caeeb148 | [
"MIT"
] | null | null | null | export.py | OleksandrBlack/safecoinnodes | 0021edc8e72e078fcd7bedb465292c96caeeb148 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# export.py - Exports enumerated data for reachable nodes into a JSON file.
#
# Copyright (c) Addy Yeow Chin Heng <ayeowch@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
Exports enumerated data for reachable nodes into a JSON file.
"""
import json
import logging
import os
import sys
import time
from binascii import hexlify, unhexlify
from ConfigParser import ConfigParser
from utils import new_redis_conn
REDIS_CONN = None
CONF = {}
def get_row(node):
"""
Returns enumerated row data from Redis for the specified node.
"""
# address, port, version, user_agent, timestamp, services
node = eval(node)
address = node[0]
port = node[1]
services = node[-1]
height = REDIS_CONN.get('height:{}-{}-{}'.format(address, port, services))
if height is None:
height = (0,)
else:
height = (int(height),)
hostname = REDIS_CONN.hget('resolve:{}'.format(address), 'hostname')
hostname = (hostname,)
geoip = REDIS_CONN.hget('resolve:{}'.format(address), 'geoip')
if geoip is None:
# city, country, latitude, longitude, timezone, asn, org
geoip = (None, None, 0.0, 0.0, None, None, None)
else:
geoip = eval(geoip)
return node + height + hostname + geoip
MAX_DUMPED_SNAPSHOTS = 500
def export_nodes(nodes, timestamp):
"""
Merges enumerated data for the specified nodes and exports them into
timestamp-prefixed JSON file.
"""
rows = []
start = time.time()
for node in nodes:
row = get_row(node)
rows.append(row)
end = time.time()
elapsed = end - start
logging.info("Elapsed: %d", elapsed)
dump = os.path.join(CONF['export_dir'], "{}.json".format(timestamp))
open(dump, 'w').write(json.dumps(rows, encoding="latin-1"))
REDIS_CONN.lpush('dumped_snapshots', timestamp)
REDIS_CONN.ltrim('dumped_snapshots', 0, MAX_DUMPED_SNAPSHOTS)
logging.info("Wrote %s", dump)
def init_conf(argv):
"""
Populates CONF with key-value pairs from configuration file.
"""
conf = ConfigParser()
conf.read(argv[1])
CONF['logfile'] = conf.get('export', 'logfile')
CONF['magic_number'] = unhexlify(conf.get('export', 'magic_number'))
CONF['db'] = conf.getint('export', 'db')
CONF['debug'] = conf.getboolean('export', 'debug')
CONF['export_dir'] = conf.get('export', 'export_dir')
if not os.path.exists(CONF['export_dir']):
os.makedirs(CONF['export_dir'])
def main(argv):
if len(argv) < 2 or not os.path.exists(argv[1]):
print("Usage: export.py [config]")
return 1
# Initialize global conf
init_conf(argv)
# Initialize logger
loglevel = logging.INFO
if CONF['debug']:
loglevel = logging.DEBUG
logformat = ("%(asctime)s,%(msecs)05.1f %(levelname)s (%(funcName)s) "
"%(message)s")
logging.basicConfig(level=loglevel,
format=logformat,
filename=CONF['logfile'],
filemode='w')
print("Log: {}, press CTRL+C to terminate..".format(CONF['logfile']))
global REDIS_CONN
REDIS_CONN = new_redis_conn(db=CONF['db'])
subscribe_key = 'resolve:{}'.format(hexlify(CONF['magic_number']))
publish_key = 'export:{}'.format(hexlify(CONF['magic_number']))
pubsub = REDIS_CONN.pubsub()
pubsub.subscribe(subscribe_key)
while True:
msg = pubsub.get_message()
if msg is None:
time.sleep(0.001) # 1 ms artificial intrinsic latency.
continue
# 'resolve' message is published by resolve.py after resolving hostname
# and GeoIP data for all reachable nodes.
if msg['channel'] == subscribe_key and msg['type'] == 'message':
timestamp = int(msg['data']) # From ping.py's 'snapshot' message
logging.info("Timestamp: %d", timestamp)
nodes = REDIS_CONN.smembers('opendata')
logging.info("Nodes: %d", len(nodes))
export_nodes(nodes, timestamp)
REDIS_CONN.publish(publish_key, timestamp)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| 32.271605 | 79 | 0.655318 |
import json
import logging
import os
import sys
import time
from binascii import hexlify, unhexlify
from ConfigParser import ConfigParser
from utils import new_redis_conn
REDIS_CONN = None
CONF = {}
def get_row(node):
node = eval(node)
address = node[0]
port = node[1]
services = node[-1]
height = REDIS_CONN.get('height:{}-{}-{}'.format(address, port, services))
if height is None:
height = (0,)
else:
height = (int(height),)
hostname = REDIS_CONN.hget('resolve:{}'.format(address), 'hostname')
hostname = (hostname,)
geoip = REDIS_CONN.hget('resolve:{}'.format(address), 'geoip')
if geoip is None:
geoip = (None, None, 0.0, 0.0, None, None, None)
else:
geoip = eval(geoip)
return node + height + hostname + geoip
MAX_DUMPED_SNAPSHOTS = 500
def export_nodes(nodes, timestamp):
rows = []
start = time.time()
for node in nodes:
row = get_row(node)
rows.append(row)
end = time.time()
elapsed = end - start
logging.info("Elapsed: %d", elapsed)
dump = os.path.join(CONF['export_dir'], "{}.json".format(timestamp))
open(dump, 'w').write(json.dumps(rows, encoding="latin-1"))
REDIS_CONN.lpush('dumped_snapshots', timestamp)
REDIS_CONN.ltrim('dumped_snapshots', 0, MAX_DUMPED_SNAPSHOTS)
logging.info("Wrote %s", dump)
def init_conf(argv):
conf = ConfigParser()
conf.read(argv[1])
CONF['logfile'] = conf.get('export', 'logfile')
CONF['magic_number'] = unhexlify(conf.get('export', 'magic_number'))
CONF['db'] = conf.getint('export', 'db')
CONF['debug'] = conf.getboolean('export', 'debug')
CONF['export_dir'] = conf.get('export', 'export_dir')
if not os.path.exists(CONF['export_dir']):
os.makedirs(CONF['export_dir'])
def main(argv):
if len(argv) < 2 or not os.path.exists(argv[1]):
print("Usage: export.py [config]")
return 1
init_conf(argv)
loglevel = logging.INFO
if CONF['debug']:
loglevel = logging.DEBUG
logformat = ("%(asctime)s,%(msecs)05.1f %(levelname)s (%(funcName)s) "
"%(message)s")
logging.basicConfig(level=loglevel,
format=logformat,
filename=CONF['logfile'],
filemode='w')
print("Log: {}, press CTRL+C to terminate..".format(CONF['logfile']))
global REDIS_CONN
REDIS_CONN = new_redis_conn(db=CONF['db'])
subscribe_key = 'resolve:{}'.format(hexlify(CONF['magic_number']))
publish_key = 'export:{}'.format(hexlify(CONF['magic_number']))
pubsub = REDIS_CONN.pubsub()
pubsub.subscribe(subscribe_key)
while True:
msg = pubsub.get_message()
if msg is None:
time.sleep(0.001)
continue
if msg['channel'] == subscribe_key and msg['type'] == 'message':
timestamp = int(msg['data'])
logging.info("Timestamp: %d", timestamp)
nodes = REDIS_CONN.smembers('opendata')
logging.info("Nodes: %d", len(nodes))
export_nodes(nodes, timestamp)
REDIS_CONN.publish(publish_key, timestamp)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| true | true |
1c4584cb83547e7b831785c85e43413291a71a8c | 2,135 | py | Python | Stopwatch.py | arapawa/stopwatch-game | 5ee64e04a8dc15ead2dcd8a661105ae1c9087317 | [
"MIT"
] | null | null | null | Stopwatch.py | arapawa/stopwatch-game | 5ee64e04a8dc15ead2dcd8a661105ae1c9087317 | [
"MIT"
] | 1 | 2016-12-30T06:59:12.000Z | 2016-12-30T06:59:12.000Z | Stopwatch.py | arapawa/stopwatch-game | 5ee64e04a8dc15ead2dcd8a661105ae1c9087317 | [
"MIT"
] | null | null | null | # "Stopwatch: The Game"
# tenth of a second between every tick
# every time timer ticks, it will update a global variable by one
import simplegui
# define global variables
time = 0
success = 0
attempts = 0
counter = 0
# variable to ensure score can only be increased after stopwatch was running
stopwatch_running = False
# define helper function format that converts time
# in tenths of seconds into formatted string A:BC.D
def format(t):
a_time = t // 600
b_time = ((t / 10) % 60) // 10
c_time = ((t / 10) % 60) % 10
d_time = t % 10
return str(a_time) + ":" + str(b_time) + str(c_time) + "." + str(d_time)
# define event handlers for buttons; "Start", "Stop", "Reset"
def button_start():
stopwatch_timer()
def button_stop():
timer.stop()
global success, attempts, stopwatch_running
if stopwatch_running == True:
if (time % 10) == 0:
success += 1
attempts += 1
else:
attempts += 1
else:
return
stopwatch_running = False
def button_reset():
global time, success, attempts
time = 0
success = 0
attempts = 0
return time, success, attempts
# define event handler for timer with 0.1 sec interval
# stopwatch timer event handler
def stopwatch_timer():
timer.start()
global time, stopwatch_running
time += 1
stopwatch_running = True
return time, stopwatch_running
# define draw handler
def draw_handler(canvas):
# stopwatch display on canvas
canvas.draw_text(format(time), [90, 140], 50, "White")
# score display
canvas.draw_text(str(success) + "/" + str(attempts), [220, 50], 25, "Red")
# create frame
frame = simplegui.create_frame("Stopwatch: The Game", 300, 200)
# register event handlers
timer = simplegui.create_timer(100, button_start)
start = frame.add_button("Start", button_start, 100)
stop = frame.add_button("Stop", button_stop, 100)
reset = frame.add_button("Reset", button_reset, 100)
frame.set_draw_handler(draw_handler)
# start frame
frame.start()
| 26.036585 | 79 | 0.640281 |
import simplegui
time = 0
success = 0
attempts = 0
counter = 0
stopwatch_running = False
def format(t):
a_time = t // 600
b_time = ((t / 10) % 60) // 10
c_time = ((t / 10) % 60) % 10
d_time = t % 10
return str(a_time) + ":" + str(b_time) + str(c_time) + "." + str(d_time)
def button_start():
stopwatch_timer()
def button_stop():
timer.stop()
global success, attempts, stopwatch_running
if stopwatch_running == True:
if (time % 10) == 0:
success += 1
attempts += 1
else:
attempts += 1
else:
return
stopwatch_running = False
def button_reset():
global time, success, attempts
time = 0
success = 0
attempts = 0
return time, success, attempts
def stopwatch_timer():
timer.start()
global time, stopwatch_running
time += 1
stopwatch_running = True
return time, stopwatch_running
def draw_handler(canvas):
canvas.draw_text(format(time), [90, 140], 50, "White")
canvas.draw_text(str(success) + "/" + str(attempts), [220, 50], 25, "Red")
frame = simplegui.create_frame("Stopwatch: The Game", 300, 200)
timer = simplegui.create_timer(100, button_start)
start = frame.add_button("Start", button_start, 100)
stop = frame.add_button("Stop", button_stop, 100)
reset = frame.add_button("Reset", button_reset, 100)
frame.set_draw_handler(draw_handler)
frame.start()
| true | true |
1c458641abbee4ca565c0de49e6620d72012ccb6 | 20,836 | py | Python | cripts/relationships/handlers.py | lakiw/cripts | 43f62891a3724e1ec60629887d97c421fb302163 | [
"MIT"
] | 2 | 2017-04-06T12:26:11.000Z | 2018-11-05T19:17:15.000Z | cripts/relationships/handlers.py | lakiw/cripts | 43f62891a3724e1ec60629887d97c421fb302163 | [
"MIT"
] | 9 | 2016-09-28T10:19:10.000Z | 2017-02-24T17:58:43.000Z | cripts/relationships/handlers.py | lakiw/cripts | 43f62891a3724e1ec60629887d97c421fb302163 | [
"MIT"
] | null | null | null | import datetime
from dateutil.parser import parse
from cripts.core.class_mapper import class_from_id
def get_relationships(obj=None, type_=None, id_=None, analyst=None):
"""
Get relationships for a top-level object.
:param obj: The top-level object to get relationships for.
:type obj: :class:`cripts.core.cripts_mongoengine.CriptsBaseAttributes`
:param type_: The top-level object type to get relationships for.
:type type_: str
:param id_: The ObjectId of the top-level object.
:type id_: str
:param analyst: The user requesting the relationships.
:type analyst: str
:returns: dict
"""
if obj:
return obj.sort_relationships("%s" % analyst, meta=True)
elif type_ and id_:
obj = class_from_id(type_, id_)
if not obj:
return {}
return obj.sort_relationships("%s" % analyst, meta=True)
else:
return {}
def forge_relationship(type_=None, id_=None,
class_=None, right_type=None,
right_id=None, right_class=None,
rel_type=None, rel_date=None,
user=None, rel_reason="",
rel_confidence='unknown', get_rels=False, **kwargs):
"""
Forge a relationship between two top-level objects.
:param type_: The type of first top-level object to relate to.
:type type_: str
:param id_: The ObjectId of the first top-level object.
:type id_: str
:param class_: The first top-level object to relate to.
:type class_: :class:`cripts.core.cripts_mongoengine.CriptsBaseAttributes`
:param right_type: The type of second top-level object to relate to.
:type right_type: str
:param right_id: The ObjectId of the second top-level object.
:type right_id: str
:param right_class: The second top-level object to relate to.
:type right_class: :class:`cripts.core.cripts_mongoengine.CriptsBaseAttributes`
:param rel_type: The type of relationship.
:type rel_type: str
:param rel_date: The date this relationship applies.
:type rel_date: datetime.datetime
:param user: The user forging this relationship.
:type user: str
:param rel_reason: The reason for the relationship.
:type rel_reason: str
:param rel_confidence: The confidence of the relationship.
:type rel_confidence: str
:param get_rels: Return the relationships after forging.
:type get_rels: boolean
:returns: dict with keys:
"success" (boolean)
"message" (str if fail, EmbeddedObject if success)
"relationships" (dict)
"""
if rel_date == 'None':
rel_date = None
elif isinstance(rel_date, basestring) and rel_date != '':
rel_date = parse(rel_date, fuzzy=True)
elif not isinstance(rel_date, datetime.datetime):
rel_date = None
if not class_:
if type_ and id_:
class_ = class_from_id(type_, id_)
if not class_:
return {'success': False, 'message': "Failed to get left TLO"}
if not right_class:
if right_type and right_id:
print ("right type:" + str(right_type))
print ("right id:" + str(right_id))
right_class = class_from_id(right_type, right_id)
if not right_class:
return {'success': False, 'message': "Failed to get right TLO"}
try:
# forge relationship
results = class_.add_relationship(right_class, rel_type, rel_date,
user, rel_confidence, rel_reason)
except Exception as e:
return {'success': False, 'message': e}
if results['success']:
class_.update(add_to_set__relationships=results['message'])
if get_rels:
results['relationships'] = class_.sort_relationships("%s" % user,
meta=True)
return results
def delete_all_relationships(left_class=None, left_type=None,
left_id=None, analyst=None):
"""
Delete all relationships for this top-level object.
:param left_class: The top-level object to delete relationships for.
:type left_class: :class:`cripts.core.cripts_mongoengine.CriptsBaseAttributes`
:param left_type: The type of the top-level object.
:type left_type: str
:param left_id: The ObjectId of the top-level object.
:type left_id: str
:param analyst: The user deleting these relationships.
:type analyst: str
:returns: dict with keys "success" (boolean) and "message" (str)
"""
if not left_class:
if left_type and left_id:
left_class = class_from_id(left_type, left_id)
if not left_class:
return {'success': False,
'message': "Unable to get object."}
else:
return {'success': False,
'message': "Need a valid left type and id"}
return left_class.delete_all_relationships()
def delete_relationship(left_class=None, right_class=None,
left_type=None, left_id=None,
right_type=None, right_id=None,
rel_type=None, rel_date=None,
analyst=None, get_rels=True):
"""
Delete a relationship between two top-level objects.
:param left_class: The first top-level object.
:type left_class: :class:`cripts.core.cripts_mongoengine.CriptsBaseAttributes`
:param right_class: The second top-level object.
:type right_class: :class:`cripts.core.cripts_mongoengine.CriptsBaseAttributes`
:param left_type: The type of first top-level object.
:type left_type: str
:param left_id: The ObjectId of the first top-level object.
:type left_id: str
:param right_type: The type of second top-level object.
:type right_type: str
:param right_id: The ObjectId of the second top-level object.
:type right_id: str
:param rel_type: The type of relationship.
:type rel_type: str
:param rel_date: The date this relationship applies.
:type rel_date: datetime.datetime
:param analyst: The user deleting this relationship.
:type analyst: str
:param get_rels: Return the relationships after forging.
:type get_rels: boolean
:returns: dict with keys "success" (boolean) and "message" (str if
failed, dict if successful)
"""
if rel_date is None or rel_date == 'None':
rel_date = None
elif isinstance(rel_date, basestring) and rel_date != '':
rel_date = parse(rel_date, fuzzy=True)
elif not isinstance(rel_date, datetime.datetime):
rel_date = None
if not left_class:
if left_type and left_id:
left_class = class_from_id(left_type, left_id)
if not left_class:
return {'success': False,
'message': "Unable to get object."}
else:
return {'success': False,
'message': "Need a valid left type and id"}
# delete relationship
if right_class:
results = left_class.delete_relationship(rel_item=right_class,
rel_type=rel_type,
rel_date=rel_date,
analyst=analyst)
else:
if right_type and right_id:
results = left_class.delete_relationship(type_=right_type,
rel_id=right_id,
rel_type=rel_type,
rel_date=rel_date,
analyst=analyst)
else:
return {'success': False,
'message': "Need a valid right type and id"}
if results['success']:
left_class.save(username=analyst)
if get_rels:
results['relationships'] = left_class.sort_relationships("%s" % analyst, meta=True)
return results
def update_relationship_types(left_class=None, right_class=None,
left_type=None, left_id=None,
right_type=None, right_id=None,
rel_type=None, rel_date=None,
new_type=None,analyst=None):
"""
Update the relationship type between two top-level objects.
:param left_class: The first top-level object.
:type left_class: :class:`cripts.core.cripts_mongoengine.CriptsBaseAttributes`
:param right_class: The second top-level object.
:type right_class: :class:`cripts.core.cripts_mongoengine.CriptsBaseAttributes`
:param left_type: The type of first top-level object.
:type left_type: str
:param left_id: The ObjectId of the first top-level object.
:type left_id: str
:param right_type: The type of second top-level object.
:type right_type: str
:param right_id: The ObjectId of the second top-level object.
:type right_id: str
:param rel_type: The type of relationship.
:type rel_type: str
:param rel_date: The date this relationship applies.
:type rel_date: datetime.datetime
:param new_type: The new type of relationship.
:type new_type: str
:param analyst: The user updating this relationship.
:type analyst: str
:returns: dict with keys "success" (boolean) and "message" (str)
"""
if rel_date is None or rel_date == 'None':
rel_date = None
elif isinstance(rel_date, basestring) and rel_date != '':
rel_date = parse(rel_date, fuzzy=True)
elif not isinstance(rel_date, datetime.datetime):
rel_date = None
if not left_class:
if left_type and left_id:
left_class = class_from_id(left_type, left_id)
if not left_class:
return {'success': False,
'message': "Unable to get object."}
else:
return {'success': False,
'message': "Need a valid left type and id"}
# update relationship
if right_class:
results = left_class.edit_relationship_type(rel_item=right_class,
rel_type=rel_type,
rel_date=rel_date,
new_type=new_type,
analyst=analyst)
left_class.save(username=analyst)
right_class.save(username=analyst)
else:
if right_type and right_id:
results = left_class.edit_relationship_type(type_=right_type,
rel_id=right_id,
rel_type=rel_type,
rel_date=rel_date,
new_type=new_type,
analyst=analyst)
left_class.save(username=analyst)
else:
return {'success': False,
'message': "Need a valid right type and id"}
return results
def update_relationship_confidences(left_class=None, right_class=None,
left_type=None, left_id=None,
right_type=None, right_id=None,
rel_type=None, rel_date=None,
new_type=None,analyst=None,
new_confidence='unknown'):
"""
Update the relationship type between two top-level objects.
:param left_class: The first top-level object.
:type left_class: :class:`cripts.core.cripts_mongoengine.CriptsBaseAttributes`
:param right_class: The second top-level object.
:type right_class: :class:`cripts.core.cripts_mongoengine.CriptsBaseAttributes`
:param left_type: The type of first top-level object.
:type left_type: str
:param left_id: The ObjectId of the first top-level object.
:type left_id: str
:param right_type: The type of second top-level object.
:type right_type: str
:param right_id: The ObjectId of the second top-level object.
:type right_id: str
:param rel_type: The type of relationship.
:type rel_type: str
:param rel_date: The date this relationship applies.
:type rel_date: datetime.datetime
:param analyst: The user updating this relationship.
:type analyst: str
:param new_confidence: The new confidence level.
:type new_confidence: str
:returns: dict with keys "success" (boolean) and "message" (str)
"""
if rel_date is None or rel_date == 'None':
rel_date = None
elif isinstance(rel_date, basestring) and rel_date != '':
rel_date = parse(rel_date, fuzzy=True)
elif not isinstance(rel_date, datetime.datetime):
rel_date = None
if not left_class:
if left_type and left_id:
left_class = class_from_id(left_type, left_id)
else:
return {'success': False,
'message': "Need a valid left type and id"}
# update relationship
if right_class:
results = left_class.edit_relationship_confidence(rel_item=right_class,
rel_type=rel_type,
rel_date=rel_date,
new_confidence=new_confidence,
analyst=analyst)
left_class.save(username=analyst)
right_class.save(username=analyst)
else:
if right_type and right_id:
results = left_class.edit_relationship_confidence(type_=right_type,
rel_id=right_id,
rel_type=rel_type,
rel_date=rel_date,
new_confidence=new_confidence,
analyst=analyst)
left_class.save(username=analyst)
else:
return {'success': False,
'message': "Need a valid right type and id"}
return results
def update_relationship_reasons(left_class=None, right_class=None,
left_type=None, left_id=None,
right_type=None, right_id=None,
rel_type=None, rel_date=None,
new_type=None,analyst=None, new_reason="N/A"):
"""
Update the relationship type between two top-level objects.
:param left_class: The first top-level object.
:type left_class: :class:`cripts.core.cripts_mongoengine.CriptsBaseAttributes`
:param right_class: The second top-level object.
:type right_class: :class:`cripts.core.cripts_mongoengine.CriptsBaseAttributes`
:param left_type: The type of first top-level object.
:type left_type: str
:param left_id: The ObjectId of the first top-level object.
:type left_id: str
:param right_type: The type of second top-level object.
:type right_type: str
:param right_id: The ObjectId of the second top-level object.
:type right_id: str
:param rel_type: The type of relationship.
:type rel_type: str
:param rel_date: The date this relationship applies.
:type rel_date: datetime.datetime
:param analyst: The user updating this relationship.
:type analyst: str
:returns: dict with keys "success" (boolean) and "message" (str)
"""
if rel_date is None or rel_date == 'None':
rel_date = None
elif isinstance(rel_date, basestring) and rel_date != '':
rel_date = parse(rel_date, fuzzy=True)
elif not isinstance(rel_date, datetime.datetime):
rel_date = None
if not left_class:
if left_type and left_id:
left_class = class_from_id(left_type, left_id)
else:
return {'success': False,
'message': "Need a valid left type and id"}
# update relationship
if right_class:
results = left_class.edit_relationship_reason(rel_item=right_class,
rel_type=rel_type,
rel_date=rel_date,
new_reason=new_reason,
analyst=analyst)
left_class.save(username=analyst)
right_class.save(username=analyst)
else:
if right_type and right_id:
results = left_class.edit_relationship_reason(type_=right_type,
rel_id=right_id,
rel_type=rel_type,
rel_date=rel_date,
new_reason=new_reason,
analyst=analyst)
left_class.save(username=analyst)
else:
return {'success': False,
'message': "Need a valid right type and id"}
return results
def update_relationship_dates(left_class=None, right_class=None,
left_type=None, left_id=None,
right_type=None, right_id=None,
rel_type=None, rel_date=None,
new_date=None,analyst=None):
"""
Update the relationship date between two top-level objects.
:param left_class: The first top-level object.
:type left_class: :class:`cripts.core.cripts_mongoengine.CriptsBaseAttributes`
:param right_class: The second top-level object.
:type right_class: :class:`cripts.core.cripts_mongoengine.CriptsBaseAttributes`
:param left_type: The type of first top-level object.
:type left_type: str
:param left_id: The ObjectId of the first top-level object.
:type left_id: str
:param right_type: The type of second top-level object.
:type right_type: str
:param right_id: The ObjectId of the second top-level object.
:type right_id: str
:param rel_type: The type of relationship.
:type rel_type: str
:param rel_date: The date this relationship applies.
:type rel_date: datetime.datetime
:param new_date: The new date of the relationship.
:type new_date: str
:param analyst: The user updating this relationship.
:type analyst: str
:returns: dict with keys "success" (boolean) and "message" (str)
"""
if rel_date is None or rel_date == 'None':
rel_date = None
elif isinstance(rel_date, basestring) and rel_date != '':
rel_date = parse(rel_date, fuzzy=True)
elif not isinstance(rel_date, datetime.datetime):
rel_date = None
if new_date is None or new_date == 'None':
new_date = None
elif isinstance(new_date, basestring) and new_date != '':
new_date = parse(new_date, fuzzy=True)
elif not isinstance(new_date, datetime.datetime):
new_date = None
if not left_class:
if left_type and left_id:
left_class = class_from_id(left_type, left_id)
if not left_class:
return {'success': False,
'message': "Unable to get object."}
else:
return {'success': False,
'message': "Need a valid left type and id"}
# update relationship
if right_class:
results = left_class.edit_relationship_date(rel_item=right_class,
rel_type=rel_type,
rel_date=rel_date,
new_date=new_date,
analyst=analyst)
left_class.save(username=analyst)
right_class.save(username=analyst)
else:
if right_type and right_id:
results = left_class.edit_relationship_date(type_=right_type,
rel_id=right_id,
rel_type=rel_type,
rel_date=rel_date,
new_date=new_date,
analyst=analyst)
left_class.save(username=analyst)
else:
return {'success': False,
'message': "Need a valid right type and id"}
return results
| 42.522449 | 95 | 0.578038 | import datetime
from dateutil.parser import parse
from cripts.core.class_mapper import class_from_id
def get_relationships(obj=None, type_=None, id_=None, analyst=None):
if obj:
return obj.sort_relationships("%s" % analyst, meta=True)
elif type_ and id_:
obj = class_from_id(type_, id_)
if not obj:
return {}
return obj.sort_relationships("%s" % analyst, meta=True)
else:
return {}
def forge_relationship(type_=None, id_=None,
class_=None, right_type=None,
right_id=None, right_class=None,
rel_type=None, rel_date=None,
user=None, rel_reason="",
rel_confidence='unknown', get_rels=False, **kwargs):
if rel_date == 'None':
rel_date = None
elif isinstance(rel_date, basestring) and rel_date != '':
rel_date = parse(rel_date, fuzzy=True)
elif not isinstance(rel_date, datetime.datetime):
rel_date = None
if not class_:
if type_ and id_:
class_ = class_from_id(type_, id_)
if not class_:
return {'success': False, 'message': "Failed to get left TLO"}
if not right_class:
if right_type and right_id:
print ("right type:" + str(right_type))
print ("right id:" + str(right_id))
right_class = class_from_id(right_type, right_id)
if not right_class:
return {'success': False, 'message': "Failed to get right TLO"}
try:
results = class_.add_relationship(right_class, rel_type, rel_date,
user, rel_confidence, rel_reason)
except Exception as e:
return {'success': False, 'message': e}
if results['success']:
class_.update(add_to_set__relationships=results['message'])
if get_rels:
results['relationships'] = class_.sort_relationships("%s" % user,
meta=True)
return results
def delete_all_relationships(left_class=None, left_type=None,
left_id=None, analyst=None):
if not left_class:
if left_type and left_id:
left_class = class_from_id(left_type, left_id)
if not left_class:
return {'success': False,
'message': "Unable to get object."}
else:
return {'success': False,
'message': "Need a valid left type and id"}
return left_class.delete_all_relationships()
def delete_relationship(left_class=None, right_class=None,
left_type=None, left_id=None,
right_type=None, right_id=None,
rel_type=None, rel_date=None,
analyst=None, get_rels=True):
if rel_date is None or rel_date == 'None':
rel_date = None
elif isinstance(rel_date, basestring) and rel_date != '':
rel_date = parse(rel_date, fuzzy=True)
elif not isinstance(rel_date, datetime.datetime):
rel_date = None
if not left_class:
if left_type and left_id:
left_class = class_from_id(left_type, left_id)
if not left_class:
return {'success': False,
'message': "Unable to get object."}
else:
return {'success': False,
'message': "Need a valid left type and id"}
if right_class:
results = left_class.delete_relationship(rel_item=right_class,
rel_type=rel_type,
rel_date=rel_date,
analyst=analyst)
else:
if right_type and right_id:
results = left_class.delete_relationship(type_=right_type,
rel_id=right_id,
rel_type=rel_type,
rel_date=rel_date,
analyst=analyst)
else:
return {'success': False,
'message': "Need a valid right type and id"}
if results['success']:
left_class.save(username=analyst)
if get_rels:
results['relationships'] = left_class.sort_relationships("%s" % analyst, meta=True)
return results
def update_relationship_types(left_class=None, right_class=None,
left_type=None, left_id=None,
right_type=None, right_id=None,
rel_type=None, rel_date=None,
new_type=None,analyst=None):
if rel_date is None or rel_date == 'None':
rel_date = None
elif isinstance(rel_date, basestring) and rel_date != '':
rel_date = parse(rel_date, fuzzy=True)
elif not isinstance(rel_date, datetime.datetime):
rel_date = None
if not left_class:
if left_type and left_id:
left_class = class_from_id(left_type, left_id)
if not left_class:
return {'success': False,
'message': "Unable to get object."}
else:
return {'success': False,
'message': "Need a valid left type and id"}
if right_class:
results = left_class.edit_relationship_type(rel_item=right_class,
rel_type=rel_type,
rel_date=rel_date,
new_type=new_type,
analyst=analyst)
left_class.save(username=analyst)
right_class.save(username=analyst)
else:
if right_type and right_id:
results = left_class.edit_relationship_type(type_=right_type,
rel_id=right_id,
rel_type=rel_type,
rel_date=rel_date,
new_type=new_type,
analyst=analyst)
left_class.save(username=analyst)
else:
return {'success': False,
'message': "Need a valid right type and id"}
return results
def update_relationship_confidences(left_class=None, right_class=None,
left_type=None, left_id=None,
right_type=None, right_id=None,
rel_type=None, rel_date=None,
new_type=None,analyst=None,
new_confidence='unknown'):
if rel_date is None or rel_date == 'None':
rel_date = None
elif isinstance(rel_date, basestring) and rel_date != '':
rel_date = parse(rel_date, fuzzy=True)
elif not isinstance(rel_date, datetime.datetime):
rel_date = None
if not left_class:
if left_type and left_id:
left_class = class_from_id(left_type, left_id)
else:
return {'success': False,
'message': "Need a valid left type and id"}
if right_class:
results = left_class.edit_relationship_confidence(rel_item=right_class,
rel_type=rel_type,
rel_date=rel_date,
new_confidence=new_confidence,
analyst=analyst)
left_class.save(username=analyst)
right_class.save(username=analyst)
else:
if right_type and right_id:
results = left_class.edit_relationship_confidence(type_=right_type,
rel_id=right_id,
rel_type=rel_type,
rel_date=rel_date,
new_confidence=new_confidence,
analyst=analyst)
left_class.save(username=analyst)
else:
return {'success': False,
'message': "Need a valid right type and id"}
return results
def update_relationship_reasons(left_class=None, right_class=None,
left_type=None, left_id=None,
right_type=None, right_id=None,
rel_type=None, rel_date=None,
new_type=None,analyst=None, new_reason="N/A"):
if rel_date is None or rel_date == 'None':
rel_date = None
elif isinstance(rel_date, basestring) and rel_date != '':
rel_date = parse(rel_date, fuzzy=True)
elif not isinstance(rel_date, datetime.datetime):
rel_date = None
if not left_class:
if left_type and left_id:
left_class = class_from_id(left_type, left_id)
else:
return {'success': False,
'message': "Need a valid left type and id"}
if right_class:
results = left_class.edit_relationship_reason(rel_item=right_class,
rel_type=rel_type,
rel_date=rel_date,
new_reason=new_reason,
analyst=analyst)
left_class.save(username=analyst)
right_class.save(username=analyst)
else:
if right_type and right_id:
results = left_class.edit_relationship_reason(type_=right_type,
rel_id=right_id,
rel_type=rel_type,
rel_date=rel_date,
new_reason=new_reason,
analyst=analyst)
left_class.save(username=analyst)
else:
return {'success': False,
'message': "Need a valid right type and id"}
return results
def update_relationship_dates(left_class=None, right_class=None,
left_type=None, left_id=None,
right_type=None, right_id=None,
rel_type=None, rel_date=None,
new_date=None,analyst=None):
if rel_date is None or rel_date == 'None':
rel_date = None
elif isinstance(rel_date, basestring) and rel_date != '':
rel_date = parse(rel_date, fuzzy=True)
elif not isinstance(rel_date, datetime.datetime):
rel_date = None
if new_date is None or new_date == 'None':
new_date = None
elif isinstance(new_date, basestring) and new_date != '':
new_date = parse(new_date, fuzzy=True)
elif not isinstance(new_date, datetime.datetime):
new_date = None
if not left_class:
if left_type and left_id:
left_class = class_from_id(left_type, left_id)
if not left_class:
return {'success': False,
'message': "Unable to get object."}
else:
return {'success': False,
'message': "Need a valid left type and id"}
if right_class:
results = left_class.edit_relationship_date(rel_item=right_class,
rel_type=rel_type,
rel_date=rel_date,
new_date=new_date,
analyst=analyst)
left_class.save(username=analyst)
right_class.save(username=analyst)
else:
if right_type and right_id:
results = left_class.edit_relationship_date(type_=right_type,
rel_id=right_id,
rel_type=rel_type,
rel_date=rel_date,
new_date=new_date,
analyst=analyst)
left_class.save(username=analyst)
else:
return {'success': False,
'message': "Need a valid right type and id"}
return results
| true | true |
1c45866e5a644fc50a8ed3659b45f9a0dee3b769 | 1,731 | py | Python | pytorch/skin_lesion_classification/plots.py | deephealthproject/use-case-pipelines | ea9c8aedfbc9084e1a5350f6f73def2578258c77 | [
"MIT"
] | 1 | 2020-05-20T16:57:11.000Z | 2020-05-20T16:57:11.000Z | pytorch/skin_lesion_classification/plots.py | deephealthproject/use-case-pipelines | ea9c8aedfbc9084e1a5350f6f73def2578258c77 | [
"MIT"
] | 5 | 2021-03-26T16:01:51.000Z | 2021-09-20T13:53:22.000Z | pytorch/skin_lesion_classification/plots.py | deephealthproject/use-case-pipelines | ea9c8aedfbc9084e1a5350f6f73def2578258c77 | [
"MIT"
] | 5 | 2020-05-18T09:44:03.000Z | 2020-11-29T12:58:28.000Z | import itertools
import matplotlib.pyplot as plt
import numpy as np
def plot_sequence(filename, sequences, legend=None):
"""Plots one or more sequences of values into a file
:param filename: output filename
:param sequences: (M x N) array-like structure containing M sequences of N values
:param legend: (M) array-like legend
:return:
"""
fig = plt.figure()
for sequence in sequences:
plt.plot(range(len(sequence)), sequence)
if legend:
plt.legend(legend)
plt.savefig(filename)
plt.close(fig)
def plot_confusion_matrix(cm, classes,
normalize=False,
title='Confusion matrix',
cmap=plt.cm.Blues):
"""
This function prints and plots the confusion matrix.
Normalization can be applied by setting `normalize=True`.
"""
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
print("Normalized confusion matrix")
else:
print('Confusion matrix, without normalization')
print(cm)
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
plt.ylim(-0.5, len(classes) - 0.5)
fmt = '.2f' if normalize else 'd'
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, format(cm[i, j], fmt),
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
plt.tight_layout()
plt.ylabel('True label')
plt.xlabel('Predicted label')
| 29.844828 | 85 | 0.617562 | import itertools
import matplotlib.pyplot as plt
import numpy as np
def plot_sequence(filename, sequences, legend=None):
fig = plt.figure()
for sequence in sequences:
plt.plot(range(len(sequence)), sequence)
if legend:
plt.legend(legend)
plt.savefig(filename)
plt.close(fig)
def plot_confusion_matrix(cm, classes,
normalize=False,
title='Confusion matrix',
cmap=plt.cm.Blues):
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
print("Normalized confusion matrix")
else:
print('Confusion matrix, without normalization')
print(cm)
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
plt.ylim(-0.5, len(classes) - 0.5)
fmt = '.2f' if normalize else 'd'
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, format(cm[i, j], fmt),
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
plt.tight_layout()
plt.ylabel('True label')
plt.xlabel('Predicted label')
| true | true |
1c4586bd66611dfec7cf8c2a805839086f354af2 | 232 | py | Python | lib/JumpScale/baselib/dnsman/dnsFactory.py | rudecs/jumpscale_core7 | 30c03f26f1cdad3edbb9d79d50fbada8acc974f5 | [
"Apache-2.0"
] | null | null | null | lib/JumpScale/baselib/dnsman/dnsFactory.py | rudecs/jumpscale_core7 | 30c03f26f1cdad3edbb9d79d50fbada8acc974f5 | [
"Apache-2.0"
] | 4 | 2016-08-25T12:08:39.000Z | 2018-04-12T12:36:01.000Z | lib/JumpScale/baselib/dnsman/dnsFactory.py | rudecs/jumpscale_core7 | 30c03f26f1cdad3edbb9d79d50fbada8acc974f5 | [
"Apache-2.0"
] | 3 | 2016-03-08T07:49:34.000Z | 2018-10-19T13:56:43.000Z | from bind import BindDNS
class DNSFactory(object):
def __init__(self):
self.bindObj = None
@property
def bind(self):
if not self.bindObj:
self.bindObj = BindDNS()
return self.bindObj | 21.090909 | 36 | 0.612069 | from bind import BindDNS
class DNSFactory(object):
def __init__(self):
self.bindObj = None
@property
def bind(self):
if not self.bindObj:
self.bindObj = BindDNS()
return self.bindObj | true | true |
1c45875f0c9405efffecbefbf3c272cc94cee782 | 7,536 | py | Python | main/cloudfoundry_client/v2/entities.py | subhash12/cf-python-client | c0ecbb8ec85040fc2f74b6c52e1f9a6c6c16c4b0 | [
"Apache-2.0"
] | null | null | null | main/cloudfoundry_client/v2/entities.py | subhash12/cf-python-client | c0ecbb8ec85040fc2f74b6c52e1f9a6c6c16c4b0 | [
"Apache-2.0"
] | null | null | null | main/cloudfoundry_client/v2/entities.py | subhash12/cf-python-client | c0ecbb8ec85040fc2f74b6c52e1f9a6c6c16c4b0 | [
"Apache-2.0"
] | null | null | null | from functools import partial, reduce
from typing import Callable, List, Tuple, Any, Optional, Generator, TYPE_CHECKING
from urllib.parse import quote
from requests import Response
from cloudfoundry_client.errors import InvalidEntity
from cloudfoundry_client.json_object import JsonObject
from cloudfoundry_client.request_object import Request
if TYPE_CHECKING:
from cloudfoundry_client.client import CloudFoundryClient
class Entity(JsonObject):
def __init__(self, target_endpoint: str, client: "CloudFoundryClient", *args, **kwargs):
super(Entity, self).__init__(*args, **kwargs)
self.target_endpoint = target_endpoint
self.client = client
try:
if not (isinstance(self.get("entity"), dict)):
raise InvalidEntity(**self)
for attribute, value in list(self["entity"].items()):
domain_name, suffix = attribute.rpartition("_")[::2]
if suffix == "url":
manager_name = domain_name if domain_name.endswith("s") else "%ss" % domain_name
try:
other_manager = getattr(client.v2, manager_name)
except AttributeError:
# generic manager
other_manager = EntityManager(target_endpoint, client, "")
if domain_name.endswith("s"):
new_method = partial(other_manager._list, value)
else:
new_method = partial(other_manager._get, value)
new_method.__name__ = domain_name
setattr(self, domain_name, new_method)
except KeyError:
raise InvalidEntity(**self)
EntityBuilder = Callable[[List[Tuple[str, Any]]], Entity]
PaginateEntities = Generator[Entity, None, None]
class EntityManager(object):
list_query_parameters = ["page", "results-per-page", "order-direction"]
list_multi_parameters = ["order-by"]
timestamp_parameters = ["timestamp"]
def __init__(
self, target_endpoint: str, client: "CloudFoundryClient", entity_uri: str, entity_builder: Optional[EntityBuilder] = None
):
self.target_endpoint = target_endpoint
self.entity_uri = entity_uri
self.client = client
self.entity_builder = (
entity_builder if entity_builder is not None else lambda pairs: Entity(target_endpoint, client, pairs)
)
def _list(self, requested_path: str, entity_builder: Optional[EntityBuilder] = None, **kwargs) -> PaginateEntities:
url_requested = self._get_url_filtered("%s%s" % (self.target_endpoint, requested_path), **kwargs)
response = self.client.get(url_requested)
entity_builder = self._get_entity_builder(entity_builder)
while True:
response_json = self._read_response(response, JsonObject)
for resource in response_json["resources"]:
yield entity_builder(list(resource.items()))
if response_json["next_url"] is None:
break
else:
url_requested = "%s%s" % (self.target_endpoint, response_json["next_url"])
response = self.client.get(url_requested)
def _create(self, data: dict, **kwargs) -> Entity:
url = "%s%s" % (self.target_endpoint, self.entity_uri)
return self._post(url, data, **kwargs)
def _update(self, resource_id: str, data: dict, **kwargs):
url = "%s%s/%s" % (self.target_endpoint, self.entity_uri, resource_id)
return self._put(url, data, **kwargs)
def _remove(self, resource_id: str, **kwargs):
url = "%s%s/%s" % (self.target_endpoint, self.entity_uri, resource_id)
self._delete(url, **kwargs)
def _get(self, requested_path: str, entity_builder: Optional[EntityBuilder] = None) -> Entity:
url = "%s%s" % (self.target_endpoint, requested_path)
response = self.client.get(url)
return self._read_response(response, entity_builder)
def _post(self, url: str, data: Optional[dict] = None, **kwargs):
response = self.client.post(url, json=data, **kwargs)
return self._read_response(response)
def _put(self, url: str, data: Optional[dict] = None, **kwargs):
response = self.client.put(url, json=data, **kwargs)
return self._read_response(response)
def _delete(self, url: str, **kwargs):
self.client.delete(url, **kwargs)
def __iter__(self) -> PaginateEntities:
return self.list()
def __getitem__(self, entity_guid) -> Entity:
return self.get(entity_guid)
def list(self, **kwargs) -> PaginateEntities:
return self._list(self.entity_uri, **kwargs)
def get_first(self, **kwargs) -> Optional[Entity]:
kwargs.setdefault("results-per-page", 1)
for entity in self._list(self.entity_uri, **kwargs):
return entity
return None
def get(self, entity_id: str, *extra_paths) -> Entity:
if len(extra_paths) == 0:
requested_path = "%s/%s" % (self.entity_uri, entity_id)
else:
requested_path = "%s/%s/%s" % (self.entity_uri, entity_id, "/".join(extra_paths))
return self._get(requested_path)
def _read_response(self, response: Response, other_entity_builder: Optional[EntityBuilder] = None):
entity_builder = self._get_entity_builder(other_entity_builder)
result = response.json(object_pairs_hook=JsonObject)
return entity_builder(list(result.items()))
@staticmethod
def _request(**mandatory_parameters) -> Request:
return Request(**mandatory_parameters)
def _get_entity_builder(self, entity_builder: Optional[EntityBuilder]) -> EntityBuilder:
if entity_builder is None:
return self.entity_builder
else:
return entity_builder
def _get_url_filtered(self, url: str, **kwargs) -> str:
def _append_encoded_parameter(parameters: List[str], args: Tuple[str, Any]) -> List[str]:
parameter_name, parameter_value = args[0], args[1]
if parameter_name in self.list_query_parameters:
parameters.append("%s=%s" % (parameter_name, str(parameter_value)))
elif parameter_name in self.list_multi_parameters:
value_list = parameter_value
if not isinstance(value_list, (list, tuple)):
value_list = [value_list]
for value in value_list:
parameters.append("%s=%s" % (parameter_name, str(value)))
elif parameter_name in self.timestamp_parameters:
if isinstance(args[1], dict):
operator_list = args[1].keys()
for operator in operator_list:
parameters.append("q=%s" % quote("%s%s%s" % (parameter_name, operator, args[1][operator])))
else:
parameters.append("q=%s" % quote("%s:%s" % (parameter_name, str(parameter_value))))
elif isinstance(parameter_value, (list, tuple)):
parameters.append("q=%s" % quote("%s IN %s" % (parameter_name, ",".join(parameter_value))))
else:
parameters.append("q=%s" % quote("%s:%s" % (parameter_name, str(parameter_value))))
return parameters
if len(kwargs) > 0:
return "%s?%s" % (url, "&".join(reduce(_append_encoded_parameter, sorted(list(kwargs.items())), [])))
else:
return url
| 43.813953 | 129 | 0.625398 | from functools import partial, reduce
from typing import Callable, List, Tuple, Any, Optional, Generator, TYPE_CHECKING
from urllib.parse import quote
from requests import Response
from cloudfoundry_client.errors import InvalidEntity
from cloudfoundry_client.json_object import JsonObject
from cloudfoundry_client.request_object import Request
if TYPE_CHECKING:
from cloudfoundry_client.client import CloudFoundryClient
class Entity(JsonObject):
def __init__(self, target_endpoint: str, client: "CloudFoundryClient", *args, **kwargs):
super(Entity, self).__init__(*args, **kwargs)
self.target_endpoint = target_endpoint
self.client = client
try:
if not (isinstance(self.get("entity"), dict)):
raise InvalidEntity(**self)
for attribute, value in list(self["entity"].items()):
domain_name, suffix = attribute.rpartition("_")[::2]
if suffix == "url":
manager_name = domain_name if domain_name.endswith("s") else "%ss" % domain_name
try:
other_manager = getattr(client.v2, manager_name)
except AttributeError:
other_manager = EntityManager(target_endpoint, client, "")
if domain_name.endswith("s"):
new_method = partial(other_manager._list, value)
else:
new_method = partial(other_manager._get, value)
new_method.__name__ = domain_name
setattr(self, domain_name, new_method)
except KeyError:
raise InvalidEntity(**self)
EntityBuilder = Callable[[List[Tuple[str, Any]]], Entity]
PaginateEntities = Generator[Entity, None, None]
class EntityManager(object):
list_query_parameters = ["page", "results-per-page", "order-direction"]
list_multi_parameters = ["order-by"]
timestamp_parameters = ["timestamp"]
def __init__(
self, target_endpoint: str, client: "CloudFoundryClient", entity_uri: str, entity_builder: Optional[EntityBuilder] = None
):
self.target_endpoint = target_endpoint
self.entity_uri = entity_uri
self.client = client
self.entity_builder = (
entity_builder if entity_builder is not None else lambda pairs: Entity(target_endpoint, client, pairs)
)
def _list(self, requested_path: str, entity_builder: Optional[EntityBuilder] = None, **kwargs) -> PaginateEntities:
url_requested = self._get_url_filtered("%s%s" % (self.target_endpoint, requested_path), **kwargs)
response = self.client.get(url_requested)
entity_builder = self._get_entity_builder(entity_builder)
while True:
response_json = self._read_response(response, JsonObject)
for resource in response_json["resources"]:
yield entity_builder(list(resource.items()))
if response_json["next_url"] is None:
break
else:
url_requested = "%s%s" % (self.target_endpoint, response_json["next_url"])
response = self.client.get(url_requested)
def _create(self, data: dict, **kwargs) -> Entity:
url = "%s%s" % (self.target_endpoint, self.entity_uri)
return self._post(url, data, **kwargs)
def _update(self, resource_id: str, data: dict, **kwargs):
url = "%s%s/%s" % (self.target_endpoint, self.entity_uri, resource_id)
return self._put(url, data, **kwargs)
def _remove(self, resource_id: str, **kwargs):
url = "%s%s/%s" % (self.target_endpoint, self.entity_uri, resource_id)
self._delete(url, **kwargs)
def _get(self, requested_path: str, entity_builder: Optional[EntityBuilder] = None) -> Entity:
url = "%s%s" % (self.target_endpoint, requested_path)
response = self.client.get(url)
return self._read_response(response, entity_builder)
def _post(self, url: str, data: Optional[dict] = None, **kwargs):
response = self.client.post(url, json=data, **kwargs)
return self._read_response(response)
def _put(self, url: str, data: Optional[dict] = None, **kwargs):
response = self.client.put(url, json=data, **kwargs)
return self._read_response(response)
def _delete(self, url: str, **kwargs):
self.client.delete(url, **kwargs)
def __iter__(self) -> PaginateEntities:
return self.list()
def __getitem__(self, entity_guid) -> Entity:
return self.get(entity_guid)
def list(self, **kwargs) -> PaginateEntities:
return self._list(self.entity_uri, **kwargs)
def get_first(self, **kwargs) -> Optional[Entity]:
kwargs.setdefault("results-per-page", 1)
for entity in self._list(self.entity_uri, **kwargs):
return entity
return None
def get(self, entity_id: str, *extra_paths) -> Entity:
if len(extra_paths) == 0:
requested_path = "%s/%s" % (self.entity_uri, entity_id)
else:
requested_path = "%s/%s/%s" % (self.entity_uri, entity_id, "/".join(extra_paths))
return self._get(requested_path)
def _read_response(self, response: Response, other_entity_builder: Optional[EntityBuilder] = None):
entity_builder = self._get_entity_builder(other_entity_builder)
result = response.json(object_pairs_hook=JsonObject)
return entity_builder(list(result.items()))
@staticmethod
def _request(**mandatory_parameters) -> Request:
return Request(**mandatory_parameters)
def _get_entity_builder(self, entity_builder: Optional[EntityBuilder]) -> EntityBuilder:
if entity_builder is None:
return self.entity_builder
else:
return entity_builder
def _get_url_filtered(self, url: str, **kwargs) -> str:
def _append_encoded_parameter(parameters: List[str], args: Tuple[str, Any]) -> List[str]:
parameter_name, parameter_value = args[0], args[1]
if parameter_name in self.list_query_parameters:
parameters.append("%s=%s" % (parameter_name, str(parameter_value)))
elif parameter_name in self.list_multi_parameters:
value_list = parameter_value
if not isinstance(value_list, (list, tuple)):
value_list = [value_list]
for value in value_list:
parameters.append("%s=%s" % (parameter_name, str(value)))
elif parameter_name in self.timestamp_parameters:
if isinstance(args[1], dict):
operator_list = args[1].keys()
for operator in operator_list:
parameters.append("q=%s" % quote("%s%s%s" % (parameter_name, operator, args[1][operator])))
else:
parameters.append("q=%s" % quote("%s:%s" % (parameter_name, str(parameter_value))))
elif isinstance(parameter_value, (list, tuple)):
parameters.append("q=%s" % quote("%s IN %s" % (parameter_name, ",".join(parameter_value))))
else:
parameters.append("q=%s" % quote("%s:%s" % (parameter_name, str(parameter_value))))
return parameters
if len(kwargs) > 0:
return "%s?%s" % (url, "&".join(reduce(_append_encoded_parameter, sorted(list(kwargs.items())), [])))
else:
return url
| true | true |
1c4587d7f261fcbda3642a50322883ae48f591a2 | 8,013 | py | Python | karton/config_extractor/config_extractor.py | kscieslinski/karton-config-extractor | c0eb0bddeed2b217abe517ca1b8a20e679506dba | [
"BSD-3-Clause"
] | null | null | null | karton/config_extractor/config_extractor.py | kscieslinski/karton-config-extractor | c0eb0bddeed2b217abe517ca1b8a20e679506dba | [
"BSD-3-Clause"
] | null | null | null | karton/config_extractor/config_extractor.py | kscieslinski/karton-config-extractor | c0eb0bddeed2b217abe517ca1b8a20e679506dba | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/python3
import gc
import hashlib
import json
import os
import re
from karton.core import Config, Karton, Resource, Task
from karton.core.resource import ResourceBase
from malduck.extractor import ExtractManager, ExtractorModules
from .__version__ import __version__
class AnalysisExtractManager(ExtractManager):
"""
Patched version of original ExtractManager, providing current karton interface
"""
def __init__(self, karton: "ConfigExtractor") -> None:
super(AnalysisExtractManager, self).__init__(karton.modules)
self.karton = karton
def create_extractor(karton: "ConfigExtractor") -> AnalysisExtractManager:
return AnalysisExtractManager(karton)
class ConfigExtractor(Karton):
"""
Extracts configuration from samples and Drakvuf Sandbox analyses
"""
identity = "karton.config-extractor"
version = __version__
persistent = True
filters = [
{
"type": "sample",
"stage": "recognized",
"kind": "runnable",
"platform": "win32",
},
{
"type": "sample",
"stage": "recognized",
"kind": "runnable",
"platform": "win64",
},
{
"type": "sample",
"stage": "recognized",
"kind": "runnable",
"platform": "linux",
},
{"type": "analysis", "kind": "drakrun-prod"},
{"type": "analysis", "kind": "drakrun"},
]
@classmethod
def args_parser(cls):
parser = super().args_parser()
parser.add_argument(
"--modules",
help="Malduck extractor modules directory",
default="extractor/modules",
)
return parser
@classmethod
def main(cls):
parser = cls.args_parser()
args = parser.parse_args()
config = Config(args.config_file)
service = ConfigExtractor(config, modules=args.modules)
service.loop()
def __init__(self, config: Config, modules: str) -> None:
super().__init__(config)
self.modules = ExtractorModules(modules)
def report_config(self, config, sample, parent=None):
legacy_config = dict(config)
legacy_config["type"] = config["family"]
del legacy_config["family"]
# This allows us to spawn karton tasks for special config handling
if "store-in-karton" in legacy_config:
self.log.info("Karton tasks found in config, sending")
for karton_task in legacy_config["store-in-karton"]:
task_data = karton_task["task"]
payload_data = karton_task["payload"]
payload_data["parent"] = parent or sample
task = Task(headers=task_data, payload=payload_data)
self.send_task(task)
self.log.info("Sending ripped task %s", task.uid)
del legacy_config["store-in-karton"]
if len(legacy_config.items()) == 1:
self.log.info("Final config is empty, not sending it to the reporter")
return
task = Task(
{
"type": "config",
"kind": "static",
"family": config["family"],
"quality": self.current_task.headers.get("quality", "high"),
},
payload={
"config": legacy_config,
"sample": sample,
"parent": parent or sample,
},
)
self.send_task(task)
# analyze a standard, non-dump sample
def analyze_sample(self, sample: ResourceBase) -> None:
extractor = create_extractor(self)
with sample.download_temporary_file() as temp: # type: ignore
extractor.push_file(temp.name)
configs = extractor.config
if configs:
config = configs[0]
self.log.info("Got config: {}".format(json.dumps(config)))
self.report_config(config, sample)
else:
self.log.info("Failed to get config")
# analyze a drakrun analysis
def analyze_drakrun(self, sample, path):
extractor = create_extractor(self)
dumps_path = os.path.join(path, "dumps")
dump_candidates = {}
results = {
"analysed": 0,
"crashed": 0,
}
analysis_dumps = sorted(os.listdir(dumps_path))
for i, dump in enumerate(analysis_dumps):
# catch only dumps
if re.match(r"^[a-f0-9]{4,16}_[a-f0-9]{16}$", dump):
results["analysed"] += 1
self.log.debug(
"Analyzing dump %d/%d %s", i, len(analysis_dumps), str(dump)
)
dump_path = os.path.join(dumps_path, dump)
with open(dump_path, "rb") as f:
dump_data = f.read()
if not dump_data:
self.log.warning("Dump {} is empty".format(dump))
continue
base = int(dump.split("_")[0], 16)
try:
family = extractor.push_file(dump_path, base=base)
if family:
self.log.info("Found better %s config in %s", family, dump)
dump_candidates[family] = (dump, dump_data)
except Exception:
self.log.exception("Error while extracting from {}".format(dump))
results["crashed"] += 1
self.log.debug("Finished analysing dump no. %d", i)
self.log.info("Merging and reporting extracted configs")
for family, config in extractor.configs.items():
dump, dump_data = dump_candidates[family]
self.log.info("* (%s) %s => %s", family, dump, json.dumps(config))
parent = Resource(name=dump, content=dump_data)
task = Task(
{
"type": "sample",
"stage": "analyzed",
"kind": "dump",
"platform": "win32",
"extension": "exe",
},
payload={
"sample": parent,
"parent": sample,
"tags": ["dump:win32:exe"],
},
)
self.send_task(task)
self.report_config(config, sample, parent=parent)
self.log.info("done analysing, results: {}".format(json.dumps(results)))
def process(self, task: Task) -> None: # type: ignore
sample = task.get_resource("sample")
headers = task.headers
if headers["type"] == "sample":
self.log.info("Analyzing original binary")
self.analyze_sample(sample)
elif headers["type"] == "analysis" and headers["kind"] == "drakrun-prod":
analysis = task.get_resource("analysis")
if analysis.size > 1024 * 1024 * 128:
self.log.info("Analysis is too large, aborting")
return
with analysis.extract_temporary() as fpath: # type: ignore
with open(os.path.join(fpath, "sample.txt"), "r") as f:
sample_hash = f.read()
self.log.info(
"Processing drakmon analysis, sample: {}".format(sample_hash)
)
self.analyze_drakrun(sample, fpath)
elif headers["type"] == "analysis" and headers["kind"] == "drakrun":
# DRAKVUF Sandbox (codename: drakmon OSS)
sample_hash = hashlib.sha256(sample.content or b"").hexdigest()
self.log.info(
"Processing drakmon OSS analysis, sample: {}".format(sample_hash)
)
dumps = task.get_resource("dumps.zip")
with dumps.extract_temporary() as tmpdir: # type: ignore
self.analyze_drakrun(sample, tmpdir)
self.log.debug("Printing gc stats")
self.log.debug(gc.get_stats())
| 34.097872 | 85 | 0.539998 |
import gc
import hashlib
import json
import os
import re
from karton.core import Config, Karton, Resource, Task
from karton.core.resource import ResourceBase
from malduck.extractor import ExtractManager, ExtractorModules
from .__version__ import __version__
class AnalysisExtractManager(ExtractManager):
def __init__(self, karton: "ConfigExtractor") -> None:
super(AnalysisExtractManager, self).__init__(karton.modules)
self.karton = karton
def create_extractor(karton: "ConfigExtractor") -> AnalysisExtractManager:
return AnalysisExtractManager(karton)
class ConfigExtractor(Karton):
identity = "karton.config-extractor"
version = __version__
persistent = True
filters = [
{
"type": "sample",
"stage": "recognized",
"kind": "runnable",
"platform": "win32",
},
{
"type": "sample",
"stage": "recognized",
"kind": "runnable",
"platform": "win64",
},
{
"type": "sample",
"stage": "recognized",
"kind": "runnable",
"platform": "linux",
},
{"type": "analysis", "kind": "drakrun-prod"},
{"type": "analysis", "kind": "drakrun"},
]
@classmethod
def args_parser(cls):
parser = super().args_parser()
parser.add_argument(
"--modules",
help="Malduck extractor modules directory",
default="extractor/modules",
)
return parser
@classmethod
def main(cls):
parser = cls.args_parser()
args = parser.parse_args()
config = Config(args.config_file)
service = ConfigExtractor(config, modules=args.modules)
service.loop()
def __init__(self, config: Config, modules: str) -> None:
super().__init__(config)
self.modules = ExtractorModules(modules)
def report_config(self, config, sample, parent=None):
legacy_config = dict(config)
legacy_config["type"] = config["family"]
del legacy_config["family"]
if "store-in-karton" in legacy_config:
self.log.info("Karton tasks found in config, sending")
for karton_task in legacy_config["store-in-karton"]:
task_data = karton_task["task"]
payload_data = karton_task["payload"]
payload_data["parent"] = parent or sample
task = Task(headers=task_data, payload=payload_data)
self.send_task(task)
self.log.info("Sending ripped task %s", task.uid)
del legacy_config["store-in-karton"]
if len(legacy_config.items()) == 1:
self.log.info("Final config is empty, not sending it to the reporter")
return
task = Task(
{
"type": "config",
"kind": "static",
"family": config["family"],
"quality": self.current_task.headers.get("quality", "high"),
},
payload={
"config": legacy_config,
"sample": sample,
"parent": parent or sample,
},
)
self.send_task(task)
def analyze_sample(self, sample: ResourceBase) -> None:
extractor = create_extractor(self)
with sample.download_temporary_file() as temp:
extractor.push_file(temp.name)
configs = extractor.config
if configs:
config = configs[0]
self.log.info("Got config: {}".format(json.dumps(config)))
self.report_config(config, sample)
else:
self.log.info("Failed to get config")
def analyze_drakrun(self, sample, path):
extractor = create_extractor(self)
dumps_path = os.path.join(path, "dumps")
dump_candidates = {}
results = {
"analysed": 0,
"crashed": 0,
}
analysis_dumps = sorted(os.listdir(dumps_path))
for i, dump in enumerate(analysis_dumps):
if re.match(r"^[a-f0-9]{4,16}_[a-f0-9]{16}$", dump):
results["analysed"] += 1
self.log.debug(
"Analyzing dump %d/%d %s", i, len(analysis_dumps), str(dump)
)
dump_path = os.path.join(dumps_path, dump)
with open(dump_path, "rb") as f:
dump_data = f.read()
if not dump_data:
self.log.warning("Dump {} is empty".format(dump))
continue
base = int(dump.split("_")[0], 16)
try:
family = extractor.push_file(dump_path, base=base)
if family:
self.log.info("Found better %s config in %s", family, dump)
dump_candidates[family] = (dump, dump_data)
except Exception:
self.log.exception("Error while extracting from {}".format(dump))
results["crashed"] += 1
self.log.debug("Finished analysing dump no. %d", i)
self.log.info("Merging and reporting extracted configs")
for family, config in extractor.configs.items():
dump, dump_data = dump_candidates[family]
self.log.info("* (%s) %s => %s", family, dump, json.dumps(config))
parent = Resource(name=dump, content=dump_data)
task = Task(
{
"type": "sample",
"stage": "analyzed",
"kind": "dump",
"platform": "win32",
"extension": "exe",
},
payload={
"sample": parent,
"parent": sample,
"tags": ["dump:win32:exe"],
},
)
self.send_task(task)
self.report_config(config, sample, parent=parent)
self.log.info("done analysing, results: {}".format(json.dumps(results)))
def process(self, task: Task) -> None:
sample = task.get_resource("sample")
headers = task.headers
if headers["type"] == "sample":
self.log.info("Analyzing original binary")
self.analyze_sample(sample)
elif headers["type"] == "analysis" and headers["kind"] == "drakrun-prod":
analysis = task.get_resource("analysis")
if analysis.size > 1024 * 1024 * 128:
self.log.info("Analysis is too large, aborting")
return
with analysis.extract_temporary() as fpath:
with open(os.path.join(fpath, "sample.txt"), "r") as f:
sample_hash = f.read()
self.log.info(
"Processing drakmon analysis, sample: {}".format(sample_hash)
)
self.analyze_drakrun(sample, fpath)
elif headers["type"] == "analysis" and headers["kind"] == "drakrun":
sample_hash = hashlib.sha256(sample.content or b"").hexdigest()
self.log.info(
"Processing drakmon OSS analysis, sample: {}".format(sample_hash)
)
dumps = task.get_resource("dumps.zip")
with dumps.extract_temporary() as tmpdir:
self.analyze_drakrun(sample, tmpdir)
self.log.debug("Printing gc stats")
self.log.debug(gc.get_stats())
| true | true |
1c458914cb33dd348d349ab2d97c4bf9208ef056 | 6,011 | py | Python | Code/PrepareTables/SelectedROICorrs_positionVar.py | cirmuw/functional-twin-analysis | b6730f09f2143d5372f1a90d5fac47e3385e54fb | [
"Apache-2.0"
] | null | null | null | Code/PrepareTables/SelectedROICorrs_positionVar.py | cirmuw/functional-twin-analysis | b6730f09f2143d5372f1a90d5fac47e3385e54fb | [
"Apache-2.0"
] | null | null | null | Code/PrepareTables/SelectedROICorrs_positionVar.py | cirmuw/functional-twin-analysis | b6730f09f2143d5372f1a90d5fac47e3385e54fb | [
"Apache-2.0"
] | null | null | null | #script to create tabels containig x, y and z coordinates of functionally corresponding vertices (position variability) for each twin, one table per vertex
#input:id of functionally corresponding vetices of each twin to reference
#output: tables with vertex position in each subject, one table per vetex
import numpy as np
import nibabel as nib
import pandas as pd
from glob import glob
import os, sys
currentdir = os.path.dirname(os.path.realpath(__file__))
parentdir = os.path.dirname(currentdir)
sys.path.append(parentdir)
import settings as s
import pickle
#paths to subject data,id of vertices without signal, surface file, parcelation, chosen rois
infile =s.HCP_information_sheet_path #\
subjectpath1=s.HCProot+'HCP_3T_RESTA_fmri/'# used obtain subject ids
subjectpath2=s.HCProot+'HCP_3T_RESTB_fmri/'#/
source_dir=s.projectfolder+'7NETS_vertex/5_7nets_corresponding/' # path containing id of functionally corresponding vetices of each twin to reference
target_dir=s.projectfolder+'/7NETS_vertex/10_PositionVar_cosine/'# output tables with vertex position in each subject
if not os.path.exists(target_dir):
os.mkdir(target_dir)
zerovertexlh=np.load('../../Deliveries/0verticeslh.npy')#ids of vertices without signal
zerovertexrh=np.load('../../Deliveries/0verticesrh.npy')
surfacedirlh='../../Deliveries/fsaverage4/lh.inflated' # surface on which vertex coordinates are based
surfacedirrh='../../Deliveries/fsaverage4/rh.inflated'
lhsurf=nib.freesurfer.io.read_geometry(surfacedirlh)
rhsurf=nib.freesurfer.io.read_geometry(surfacedirrh)
lhsurf=lhsurf[0]
lhsurf=np.delete(lhsurf,zerovertexlh,0)
rhsurf=rhsurf[0]
rhsurf=np.delete(rhsurf,zerovertexrh,0)
surf=np.concatenate([lhsurf,rhsurf],axis=0)
lhparpath='../../Deliveries/lh.Schaefer2018_600Parcels_7Networks_order.annot'
rhparpath='../../Deliveries/rh.Schaefer2018_600Parcels_7Networks_order.annot'
lhannot=nib.freesurfer.io.read_annot(lhparpath)
lhlabels=lhannot[0]
rhannot=nib.freesurfer.io.read_annot(rhparpath)
rhlabels=rhannot[0]
labelslh=np.delete(lhlabels,zerovertexlh,0)
labelsrh=np.delete(rhlabels,zerovertexrh,0)
lhrois=list(np.load('../../Deliveries/chosenroislh.npy'))#save id of chosen rois
rhrois=list(np.load('../../Deliveries/chosenroisrh.npy'))
lhrois=lhrois[1:]
rhrois=rhrois[1:]
nameslhrois=['l_'+str(s) for s in lhrois]
namesrhrois=['r_'+str(s) for s in rhrois]
#get assigenment of parcels to yeo nets based on color table
lhnetwork=np.zeros((9))
rhnetwork=np.zeros((9))
lhnetwork[8]=301
rhnetwork[8]=301
c1=1
c2=1
for i in range(1,301):
if abs(lhannot[1][i][0]-lhannot[1][i-1][0])>5:
lhnetwork[c1]=int(i)
c1=c1+1
if abs(rhannot[1][i][0]-rhannot[1][i-1][0])>5:
rhnetwork[c2]=int(i)
c2=c2+1
#Get paths to mgh-files of available subjects
xl=pd.ExcelFile(infile)
dataframe1=xl.parse('Sheet1')
isNotTwin=dataframe1['Twin_Stat']=='NotTwin'
isNotTwin=np.where(isNotTwin)[0]
dataframe2=dataframe1.drop(isNotTwin,0)
Subjects=dataframe2['Subject'].values
path1=[]
path2=[]
for i in range(Subjects.shape[0]):
path1.append(subjectpath1+str(Subjects[i]))
path2.append(subjectpath2+str(Subjects[i]))
truesubjects=[]
for i in range(Subjects.shape[0]):
if os.path.isdir(path1[i])==True:
truesubjects.append(Subjects[i])
if os.path.isdir(path2[i])==True:
truesubjects.append(Subjects[i])
name=['Subject','Zygosity','Mother_ID']
nonvertexdat=np.zeros((len(truesubjects),3),dtype=object)
for j in range(len(labelslh)):
if labelslh[j]!=0:
positionvar=[]
for i in range(len(truesubjects)):
functional=pickle.load(open(source_dir+'lh_'+str(j+1)+'correspondingvertices.p','rb'))
index=np.where(functional[1]==-1)[0]
index=functional[0][i][index]
index=index[0]
coords=surf[index]
positionframe=pd.DataFrame(coords)
positionframe.columns=['x','y','z']
positionvar.append(positionframe)
if j==0:
index=dataframe2[dataframe2['Subject']==truesubjects[i]].index.tolist()
tmp1=np.array([str(truesubjects[i]),dataframe2['Zygosity'][index].values[0], str(dataframe2['Mother_ID'][index].values[0])])
nonvertexdat[i,:]=tmp1
nonvertextable=pd.DataFrame(data=nonvertexdat)
nonvertextable.columns=name
positionframe=pd.concat(positionvar,axis=0,ignore_index=True)
table=pd.concat([nonvertextable,positionframe],axis=1)
table=table.sort_values(['Zygosity', 'Mother_ID'], axis=0, ascending=[True,True])
table.reset_index(inplace=True)
table=table.drop('index',axis=1)
writefile=target_dir+'lh_'+str(j+1)+'_mean_position.csv.gz'
table.to_csv(writefile, compression='gzip')
for j in range(len(labelsrh)):
if labelsrh[j]!=0:
positionvar=[]
for i in range(len(truesubjects)):
functional=pickle.load(open(source_dir+'rh_'+str(j+1)+'correspondingvertices.p','rb'))
index=np.where(functional[1]==-1)[0]
index=functional[0][i][index]
index=index[0]
coords=surf[index]
positionframe=pd.DataFrame(coords)
positionframe.columns=['x','y','z']
positionvar.append(positionframe)
nonvertextable=pd.DataFrame(data=nonvertexdat)
nonvertextable.columns=name
positionframe=pd.concat(positionvar,axis=0,ignore_index=True)
table=pd.concat([nonvertextable,positionframe],axis=1)
table=table.sort_values(['Zygosity', 'Mother_ID'], axis=0, ascending=[True,True])
table.reset_index(inplace=True)
table=table.drop('index',axis=1)
writefile=target_dir+'rh_'+str(j+1)+'_mean_position.csv.gz'
table.to_csv(writefile, compression='gzip')
print('Finished')
| 38.044304 | 155 | 0.683081 |
import numpy as np
import nibabel as nib
import pandas as pd
from glob import glob
import os, sys
currentdir = os.path.dirname(os.path.realpath(__file__))
parentdir = os.path.dirname(currentdir)
sys.path.append(parentdir)
import settings as s
import pickle
infile =s.HCP_information_sheet_path
subjectpath1=s.HCProot+'HCP_3T_RESTA_fmri/'
subjectpath2=s.HCProot+'HCP_3T_RESTB_fmri/'
source_dir=s.projectfolder+'7NETS_vertex/5_7nets_corresponding/'
target_dir=s.projectfolder+'/7NETS_vertex/10_PositionVar_cosine/'
if not os.path.exists(target_dir):
os.mkdir(target_dir)
zerovertexlh=np.load('../../Deliveries/0verticeslh.npy')
zerovertexrh=np.load('../../Deliveries/0verticesrh.npy')
surfacedirlh='../../Deliveries/fsaverage4/lh.inflated'
surfacedirrh='../../Deliveries/fsaverage4/rh.inflated'
lhsurf=nib.freesurfer.io.read_geometry(surfacedirlh)
rhsurf=nib.freesurfer.io.read_geometry(surfacedirrh)
lhsurf=lhsurf[0]
lhsurf=np.delete(lhsurf,zerovertexlh,0)
rhsurf=rhsurf[0]
rhsurf=np.delete(rhsurf,zerovertexrh,0)
surf=np.concatenate([lhsurf,rhsurf],axis=0)
lhparpath='../../Deliveries/lh.Schaefer2018_600Parcels_7Networks_order.annot'
rhparpath='../../Deliveries/rh.Schaefer2018_600Parcels_7Networks_order.annot'
lhannot=nib.freesurfer.io.read_annot(lhparpath)
lhlabels=lhannot[0]
rhannot=nib.freesurfer.io.read_annot(rhparpath)
rhlabels=rhannot[0]
labelslh=np.delete(lhlabels,zerovertexlh,0)
labelsrh=np.delete(rhlabels,zerovertexrh,0)
lhrois=list(np.load('../../Deliveries/chosenroislh.npy'))
rhrois=list(np.load('../../Deliveries/chosenroisrh.npy'))
lhrois=lhrois[1:]
rhrois=rhrois[1:]
nameslhrois=['l_'+str(s) for s in lhrois]
namesrhrois=['r_'+str(s) for s in rhrois]
lhnetwork=np.zeros((9))
rhnetwork=np.zeros((9))
lhnetwork[8]=301
rhnetwork[8]=301
c1=1
c2=1
for i in range(1,301):
if abs(lhannot[1][i][0]-lhannot[1][i-1][0])>5:
lhnetwork[c1]=int(i)
c1=c1+1
if abs(rhannot[1][i][0]-rhannot[1][i-1][0])>5:
rhnetwork[c2]=int(i)
c2=c2+1
xl=pd.ExcelFile(infile)
dataframe1=xl.parse('Sheet1')
isNotTwin=dataframe1['Twin_Stat']=='NotTwin'
isNotTwin=np.where(isNotTwin)[0]
dataframe2=dataframe1.drop(isNotTwin,0)
Subjects=dataframe2['Subject'].values
path1=[]
path2=[]
for i in range(Subjects.shape[0]):
path1.append(subjectpath1+str(Subjects[i]))
path2.append(subjectpath2+str(Subjects[i]))
truesubjects=[]
for i in range(Subjects.shape[0]):
if os.path.isdir(path1[i])==True:
truesubjects.append(Subjects[i])
if os.path.isdir(path2[i])==True:
truesubjects.append(Subjects[i])
name=['Subject','Zygosity','Mother_ID']
nonvertexdat=np.zeros((len(truesubjects),3),dtype=object)
for j in range(len(labelslh)):
if labelslh[j]!=0:
positionvar=[]
for i in range(len(truesubjects)):
functional=pickle.load(open(source_dir+'lh_'+str(j+1)+'correspondingvertices.p','rb'))
index=np.where(functional[1]==-1)[0]
index=functional[0][i][index]
index=index[0]
coords=surf[index]
positionframe=pd.DataFrame(coords)
positionframe.columns=['x','y','z']
positionvar.append(positionframe)
if j==0:
index=dataframe2[dataframe2['Subject']==truesubjects[i]].index.tolist()
tmp1=np.array([str(truesubjects[i]),dataframe2['Zygosity'][index].values[0], str(dataframe2['Mother_ID'][index].values[0])])
nonvertexdat[i,:]=tmp1
nonvertextable=pd.DataFrame(data=nonvertexdat)
nonvertextable.columns=name
positionframe=pd.concat(positionvar,axis=0,ignore_index=True)
table=pd.concat([nonvertextable,positionframe],axis=1)
table=table.sort_values(['Zygosity', 'Mother_ID'], axis=0, ascending=[True,True])
table.reset_index(inplace=True)
table=table.drop('index',axis=1)
writefile=target_dir+'lh_'+str(j+1)+'_mean_position.csv.gz'
table.to_csv(writefile, compression='gzip')
for j in range(len(labelsrh)):
if labelsrh[j]!=0:
positionvar=[]
for i in range(len(truesubjects)):
functional=pickle.load(open(source_dir+'rh_'+str(j+1)+'correspondingvertices.p','rb'))
index=np.where(functional[1]==-1)[0]
index=functional[0][i][index]
index=index[0]
coords=surf[index]
positionframe=pd.DataFrame(coords)
positionframe.columns=['x','y','z']
positionvar.append(positionframe)
nonvertextable=pd.DataFrame(data=nonvertexdat)
nonvertextable.columns=name
positionframe=pd.concat(positionvar,axis=0,ignore_index=True)
table=pd.concat([nonvertextable,positionframe],axis=1)
table=table.sort_values(['Zygosity', 'Mother_ID'], axis=0, ascending=[True,True])
table.reset_index(inplace=True)
table=table.drop('index',axis=1)
writefile=target_dir+'rh_'+str(j+1)+'_mean_position.csv.gz'
table.to_csv(writefile, compression='gzip')
print('Finished')
| true | true |
1c458b871efd7083878ac19fd4aba0ef100e5f65 | 12,721 | py | Python | libs/curveLib/open_all.py | ledummy/CoMPlEx | f315df7a1b13cfcbdafd9879ff93a974f2e2c38b | [
"MIT"
] | null | null | null | libs/curveLib/open_all.py | ledummy/CoMPlEx | f315df7a1b13cfcbdafd9879ff93a974f2e2c38b | [
"MIT"
] | 1 | 2020-04-08T12:55:50.000Z | 2020-04-08T12:55:50.000Z | libs/curveLib/open_all.py | ledummy/CoMPlEx | f315df7a1b13cfcbdafd9879ff93a974f2e2c38b | [
"MIT"
] | 1 | 2020-04-08T12:44:47.000Z | 2020-04-08T12:44:47.000Z | import segment
import logging
import string
class openWorker():
def __init__(self,fname):
self.fname = fname
self.parameters={}
self.info={}
self.segments=[]
def parseConfigLine(self,cline,newline='\r\n'):
line = cline[2:-len(newline)]
# columns: vDeflection strainGaugeHeight
# fancyNames: "Vertical deflection" "Height (measured)"
if line.find(':')==-1:
return False
fragments = line.split(':')
name = fragments[0]
post = string.join(fragments[1:],':').strip()
if post.find('"')==-1:
val = post.split(' ')
else:
val = post[1:-1].split('" "')
if len(val)==1:
val = val[0]
return name,val
def getFile(self):
in_file = open(str(self.fname),"r")
righe = in_file.readlines()
in_file.close()
self.newline = '\n'
try:
if righe[10][-2:]=='\r\n':
self.newline = '\r\n'
elif righe[10][-1:]=='\r':
self.newline = '\r'
except:
logging.error('File is not an ascii file')
return False
return righe
def getAll(self):
return self.parameters,self.info,self.segments
def open(self):
return False
class opener:
EXT = ['txt','itx','dat','nano','r9c']
OPN = ['jpktxt','igoritx','igortxt','nanoscopetxt','r9Curves']
def __init__(self,fname):
self.fname = fname
def getOpener(self,driver=None):
if driver == None:
import os
extension = os.path.splitext(self.fname)[1][1:].lower()
for i in range(len(self.EXT)):
if self.EXT[i]==extension:
dr = eval(self.OPN[i]+'(self.fname)')
if dr.open()==True:
return dr.getAll()
else:
dr = eval(driver+'(self.fname)')
if dr.open()==True:
return dr.getAll()
return [],[],[]
class jpktxt(openWorker):
def open(self):
"""
Open JPK exported TXT files
"""
righe = self.getFile()
x=[]
y=[]
direction = None
chZ = 0
chF = 1
k = 1.0
parse = True
try:
speed = 0.0
for rigo in righe:
if rigo[0] != '#' and len(rigo) > len(self.newline) and parse and rigo[0] != ' ':
separator = ' '
if rigo.find(separator)==-1:
separator='\t'
datas = rigo[:-len(self.newline)].split(separator)
xi = datas[chZ]
yi = datas[chF]
x.append(float(xi)*1e9)
y.append(-1.0*float(yi)*1e12)
else:
ex = self.parseConfigLine(rigo,self.newline)
if ex != False:
name,val = ex
if name == 'units':
self.info['units'] = val
elif name == 'segmentIndex':
if len(x)>0 and len(y)>0:
self.segments.append(segment.segment(x,y))
self.segments[-1].speed = speed
self.segments[-1].k = k
if direction != None:
self.segments[-1].direction = direction
direction = None
speed = 1.0
x = []
y = []
elif name == 'springConstant':
self.parameters['k'] = 1000.0*float(val) #internally k is in pN/nm
k = self.parameters['k']
elif name=='segment':
direction = val
if val == 'extend':
direction='near'
parse = True
elif val == 'retract':
direction = 'far'
parse = True
elif val=='pause':
parse = False
elif name == 'columns':
# columns: height vDeflection smoothedCapacitiveSensorHeight capacitiveSensorHeight seriesTime time
# fancyNames: "Height" "Vertical deflection" "Height (measured & smoothed)" "Height (measured)" "Series Time" "Segment Time"
zs = ['smoothedCapacitiveSensorHeight','height','capacitiveSensorHeight','strainGaugeHeight']
for s in zs[::-1]:
if s in val:
chZ = val.index(s)
if 'vDeflection' in val:
chF = val.index('vDeflection')
elif name == 'fzfd':
if val == '1' or val == 'True':
self.parameters['fzfd'] = True
elif name == 'fancyNames':
self.info['fancyNames'] = val
elif name == 'sensitivity':
self.parameters['sensitivity'] = 1.0e9*float(val) #internally in nm/V
elif name == 'speed':
speed = 1.0e9*float(val) #internally in nm/s
except:
#if logging.getDEBUG :
# logging.error('File cannot be interpreted as JPK FD curve')
# return False
#else:
raise
if len(x)>0 and len(y)>0:
self.segments.append(segment.segment(x,y))
self.segments[-1].speed = speed
self.segments[-1].k = k
if direction != None:
self.segments[-1].direction = direction
return True
class igoritx(openWorker):
def open(self):
"""
Open internal Igor Text File ITX
"""
self.parameters['k'] = 1.0
speed = 0.0
righe = self.getFile()
newline = self.newline
y1=[]
y2=[]
x1=[]
x2=[]
speed = 0.0
del righe[0:3]
for rigo in righe:
r = rigo.strip(newline)
if r.strip() =='END':
break
(ffb,eeb,fff,eef)= r.split()
if ffb.strip()=='ffb':
continue
if eef.strip() != 'NAN':
x1.append(float(eef))
y1.append(float(fff))
if eeb.strip() != 'NAN':
x2.append(float(eeb))
y2.append(float(ffb))
self.segments.append(segment.segment(x1, y1))
self.segments.append(segment.segment(x2, y2))
r = righe[-1].strip(newline)
r = r[r.find('"')+1:-1]
sl = r.split(';')
for var in sl:
nm,val = var.split('=')
if nm.strip() =='SC(pN/nm)':
self.parameters['k'] = float(val)
if nm.strip() == 'PullingRate(nm/s)':
speed = float(val)/1.0e9
for p in self.segments:
p.speed = speed
return True
class igortxt(openWorker):
def open(self):
"""
Open Igor exported TXT files
"""
self.parameters['k'] = 1.0
speed = 0.0
righe = self.getFile()
newline = self.newline
y1=[]
y2=[]
x1=[]
x2=[]
for rigo in righe:
r = rigo.strip(newline)
(ffb,eeb,fff,eef)= r.split()
if ffb.strip()=='ffb':
continue
if eef.strip() != 'NAN':
x1.append(float(eef))
y1.append(float(fff))
if eeb.strip() != 'NAN':
x2.append(float(eeb))
y2.append(float(ffb))
self.segments.append(segment.segment(x1, y1))
self.segments.append(segment.segment(x2, y2))
for p in self.segments:
p.speed = speed
return True
class nanoscopetxt(openWorker):
def open(self):
import numpy as np
"""
Open exported text files from nanoscope (versions ? Implementation is not robust)
"""
self.parameters['k'] = 1.0
righe = self.getFile()
newline = self.newline
o = 0
i = 0
r = righe[0].strip(newline)
r = r.strip('"')
r = r.strip('\\')
while r != '*File list end':
r = righe[i].strip(newline)
r = r.strip('"')
r = r.strip('\\')
if o==0 and r=='*Scanner list':
o+=1
elif o==1 and r=='*Ciao scan list':
o+=1
elif o==2 and r=='*Ciao force list':
o+=1
elif o==3 and r=='*Ciao force image list':
o+=1
if r.find(':') > 0:
g = r.split(':')
if len(g)==2:
(pre,post) = g
else:
pre=g[0]+':'+g[1]
post=g[2]
pre = pre.strip()
post=post.strip()
if o == 1:
if pre == '@Sens. Zsens':
post=post.split()
zsens = float(post[-2])
self.info['zsens']=zsens
elif o==2:
if pre=='@Sens. DeflSens':
post=post.split()
deflsens = float(post[-2])
self.info['deflsens']=deflsens
elif o==3:
if pre == 'Scan rate':
scanrate = float(post)
self.info['scanrate']=scanrate
elif pre=='@4:Ramp size Zsweep':
post=post.split()
rampsize = float(post[-2])
elif pre=='@Z scan start':
post=post.split()
zstart = float(post[-2])
self.info['zstart']=zstart
elif pre=='@Z scan size':
post=post.split()
zsize = float(post[-2])
self.info['zsize']=zsize
if pre == 'Forward vel.':
fspeed = float(post)
self.info['fspeed']=fspeed
if pre == 'Reverse vel.':
bspeed = float(post)
self.info['bspeed']=bspeed
elif o==4:
if pre=='Samps/line':
post=post.split()
sampline = int(post[-1])
elif pre=='Spring Constant':
self.parameters['k'] = 1000.0*float(post)
elif pre=='@4:Z scale':
post=post.split()
zscale = float(post[-2])
self.info['zscale']=zscale
i+=1
y1=[]
y2=[]
x=[]
for j in range(i+2,len(righe)):
rigo = righe[j].split()
x.append((j-i-2)*zsens*rampsize/float(sampline))
y1.append(-self.parameters['k']*float(rigo[0]))
y2.append(-self.parameters['k']*float(rigo[1]))
x = np.array(x)
y1.reverse()
y1 = np.array(y1)
y2 = np.array(y2)
#test whether some points at the end/beginning of the curves are saturating
if y1[0]==y1[1]:
a = y1[0]
i=0
for yy in y1:
i+=1
if yy!=a:
break
y1 = y1[i:]
y2 = y2[i:]
x = x[i:]
self.segments.append(segment.segment(x, y1))
self.segments.append(segment.segment(x, y2))
self.segments[0].speed = fspeed
self.segments[1].speed = bspeed
return True
if __name__ == "__main__":
print 'not for direct use' | 34.381081 | 151 | 0.398003 | import segment
import logging
import string
class openWorker():
def __init__(self,fname):
self.fname = fname
self.parameters={}
self.info={}
self.segments=[]
def parseConfigLine(self,cline,newline='\r\n'):
line = cline[2:-len(newline)]
if line.find(':')==-1:
return False
fragments = line.split(':')
name = fragments[0]
post = string.join(fragments[1:],':').strip()
if post.find('"')==-1:
val = post.split(' ')
else:
val = post[1:-1].split('" "')
if len(val)==1:
val = val[0]
return name,val
def getFile(self):
in_file = open(str(self.fname),"r")
righe = in_file.readlines()
in_file.close()
self.newline = '\n'
try:
if righe[10][-2:]=='\r\n':
self.newline = '\r\n'
elif righe[10][-1:]=='\r':
self.newline = '\r'
except:
logging.error('File is not an ascii file')
return False
return righe
def getAll(self):
return self.parameters,self.info,self.segments
def open(self):
return False
class opener:
EXT = ['txt','itx','dat','nano','r9c']
OPN = ['jpktxt','igoritx','igortxt','nanoscopetxt','r9Curves']
def __init__(self,fname):
self.fname = fname
def getOpener(self,driver=None):
if driver == None:
import os
extension = os.path.splitext(self.fname)[1][1:].lower()
for i in range(len(self.EXT)):
if self.EXT[i]==extension:
dr = eval(self.OPN[i]+'(self.fname)')
if dr.open()==True:
return dr.getAll()
else:
dr = eval(driver+'(self.fname)')
if dr.open()==True:
return dr.getAll()
return [],[],[]
class jpktxt(openWorker):
def open(self):
"""
Open JPK exported TXT files
"""
righe = self.getFile()
x=[]
y=[]
direction = None
chZ = 0
chF = 1
k = 1.0
parse = True
try:
speed = 0.0
for rigo in righe:
if rigo[0] != '#' and len(rigo) > len(self.newline) and parse and rigo[0] != ' ':
separator = ' '
if rigo.find(separator)==-1:
separator='\t'
datas = rigo[:-len(self.newline)].split(separator)
xi = datas[chZ]
yi = datas[chF]
x.append(float(xi)*1e9)
y.append(-1.0*float(yi)*1e12)
else:
ex = self.parseConfigLine(rigo,self.newline)
if ex != False:
name,val = ex
if name == 'units':
self.info['units'] = val
elif name == 'segmentIndex':
if len(x)>0 and len(y)>0:
self.segments.append(segment.segment(x,y))
self.segments[-1].speed = speed
self.segments[-1].k = k
if direction != None:
self.segments[-1].direction = direction
direction = None
speed = 1.0
x = []
y = []
elif name == 'springConstant':
self.parameters['k'] = 1000.0*float(val) #internally k is in pN/nm
k = self.parameters['k']
elif name=='segment':
direction = val
if val == 'extend':
direction='near'
parse = True
elif val == 'retract':
direction = 'far'
parse = True
elif val=='pause':
parse = False
elif name == 'columns':
# columns: height vDeflection smoothedCapacitiveSensorHeight capacitiveSensorHeight seriesTime time
# fancyNames: "Height" "Vertical deflection" "Height (measured & smoothed)" "Height (measured)" "Series Time" "Segment Time"
zs = ['smoothedCapacitiveSensorHeight','height','capacitiveSensorHeight','strainGaugeHeight']
for s in zs[::-1]:
if s in val:
chZ = val.index(s)
if 'vDeflection' in val:
chF = val.index('vDeflection')
elif name == 'fzfd':
if val == '1' or val == 'True':
self.parameters['fzfd'] = True
elif name == 'fancyNames':
self.info['fancyNames'] = val
elif name == 'sensitivity':
self.parameters['sensitivity'] = 1.0e9*float(val) #internally in nm/V
elif name == 'speed':
speed = 1.0e9*float(val) #internally in nm/s
except:
#if logging.getDEBUG :
# logging.error('File cannot be interpreted as JPK FD curve')
# return False
#else:
raise
if len(x)>0 and len(y)>0:
self.segments.append(segment.segment(x,y))
self.segments[-1].speed = speed
self.segments[-1].k = k
if direction != None:
self.segments[-1].direction = direction
return True
class igoritx(openWorker):
def open(self):
"""
Open internal Igor Text File ITX
"""
self.parameters['k'] = 1.0
speed = 0.0
righe = self.getFile()
newline = self.newline
y1=[]
y2=[]
x1=[]
x2=[]
speed = 0.0
del righe[0:3]
for rigo in righe:
r = rigo.strip(newline)
if r.strip() =='END':
break
(ffb,eeb,fff,eef)= r.split()
if ffb.strip()=='ffb':
continue
if eef.strip() != 'NAN':
x1.append(float(eef))
y1.append(float(fff))
if eeb.strip() != 'NAN':
x2.append(float(eeb))
y2.append(float(ffb))
self.segments.append(segment.segment(x1, y1))
self.segments.append(segment.segment(x2, y2))
r = righe[-1].strip(newline)
r = r[r.find('"')+1:-1]
sl = r.split(';')
for var in sl:
nm,val = var.split('=')
if nm.strip() =='SC(pN/nm)':
self.parameters['k'] = float(val)
if nm.strip() == 'PullingRate(nm/s)':
speed = float(val)/1.0e9
for p in self.segments:
p.speed = speed
return True
class igortxt(openWorker):
def open(self):
"""
Open Igor exported TXT files
"""
self.parameters['k'] = 1.0
speed = 0.0
righe = self.getFile()
newline = self.newline
y1=[]
y2=[]
x1=[]
x2=[]
for rigo in righe:
r = rigo.strip(newline)
(ffb,eeb,fff,eef)= r.split()
if ffb.strip()=='ffb':
continue
if eef.strip() != 'NAN':
x1.append(float(eef))
y1.append(float(fff))
if eeb.strip() != 'NAN':
x2.append(float(eeb))
y2.append(float(ffb))
self.segments.append(segment.segment(x1, y1))
self.segments.append(segment.segment(x2, y2))
for p in self.segments:
p.speed = speed
return True
class nanoscopetxt(openWorker):
def open(self):
import numpy as np
"""
Open exported text files from nanoscope (versions ? Implementation is not robust)
"""
self.parameters['k'] = 1.0
righe = self.getFile()
newline = self.newline
o = 0
i = 0
r = righe[0].strip(newline)
r = r.strip('"')
r = r.strip('\\')
while r != '*File list end':
r = righe[i].strip(newline)
r = r.strip('"')
r = r.strip('\\')
if o==0 and r=='*Scanner list':
o+=1
elif o==1 and r=='*Ciao scan list':
o+=1
elif o==2 and r=='*Ciao force list':
o+=1
elif o==3 and r=='*Ciao force image list':
o+=1
if r.find(':') > 0:
g = r.split(':')
if len(g)==2:
(pre,post) = g
else:
pre=g[0]+':'+g[1]
post=g[2]
pre = pre.strip()
post=post.strip()
if o == 1:
if pre == '@Sens. Zsens':
post=post.split()
zsens = float(post[-2])
self.info['zsens']=zsens
elif o==2:
if pre=='@Sens. DeflSens':
post=post.split()
deflsens = float(post[-2])
self.info['deflsens']=deflsens
elif o==3:
if pre == 'Scan rate':
scanrate = float(post)
self.info['scanrate']=scanrate
elif pre=='@4:Ramp size Zsweep':
post=post.split()
rampsize = float(post[-2])
elif pre=='@Z scan start':
post=post.split()
zstart = float(post[-2])
self.info['zstart']=zstart
elif pre=='@Z scan size':
post=post.split()
zsize = float(post[-2])
self.info['zsize']=zsize
if pre == 'Forward vel.':
fspeed = float(post)
self.info['fspeed']=fspeed
if pre == 'Reverse vel.':
bspeed = float(post)
self.info['bspeed']=bspeed
elif o==4:
if pre=='Samps/line':
post=post.split()
sampline = int(post[-1])
elif pre=='Spring Constant':
self.parameters['k'] = 1000.0*float(post)
elif pre=='@4:Z scale':
post=post.split()
zscale = float(post[-2])
self.info['zscale']=zscale
i+=1
y1=[]
y2=[]
x=[]
for j in range(i+2,len(righe)):
rigo = righe[j].split()
x.append((j-i-2)*zsens*rampsize/float(sampline))
y1.append(-self.parameters['k']*float(rigo[0]))
y2.append(-self.parameters['k']*float(rigo[1]))
x = np.array(x)
y1.reverse()
y1 = np.array(y1)
y2 = np.array(y2)
if y1[0]==y1[1]:
a = y1[0]
i=0
for yy in y1:
i+=1
if yy!=a:
break
y1 = y1[i:]
y2 = y2[i:]
x = x[i:]
self.segments.append(segment.segment(x, y1))
self.segments.append(segment.segment(x, y2))
self.segments[0].speed = fspeed
self.segments[1].speed = bspeed
return True
if __name__ == "__main__":
print 'not for direct use' | false | true |
1c458bedfb80717a0139eb3f7187e74d5601bb56 | 477 | py | Python | {{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/basic/templatetags/form_tags.py | cubicuboctahedron/cookiecutter-django-wagtail | d7f668ce09ba2c4a3f98045ab8a6fcd286d36553 | [
"Apache-2.0"
] | null | null | null | {{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/basic/templatetags/form_tags.py | cubicuboctahedron/cookiecutter-django-wagtail | d7f668ce09ba2c4a3f98045ab8a6fcd286d36553 | [
"Apache-2.0"
] | null | null | null | {{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/basic/templatetags/form_tags.py | cubicuboctahedron/cookiecutter-django-wagtail | d7f668ce09ba2c4a3f98045ab8a6fcd286d36553 | [
"Apache-2.0"
] | 1 | 2020-04-07T10:07:07.000Z | 2020-04-07T10:07:07.000Z | from django import template
register = template.Library()
@register.filter(name='addcss')
def addcss(field, css):
return field.as_widget(attrs={"class":css})
@register.filter(name='add_attributes')
def add_attributes(field, css):
attrs = {}
definition = css.split(',')
for d in definition:
if ':' not in d:
attrs['class'] = d
else:
t, v = d.split(':')
attrs[t] = v
return field.as_widget(attrs=attrs)
| 21.681818 | 46 | 0.597484 | from django import template
register = template.Library()
@register.filter(name='addcss')
def addcss(field, css):
return field.as_widget(attrs={"class":css})
@register.filter(name='add_attributes')
def add_attributes(field, css):
attrs = {}
definition = css.split(',')
for d in definition:
if ':' not in d:
attrs['class'] = d
else:
t, v = d.split(':')
attrs[t] = v
return field.as_widget(attrs=attrs)
| true | true |
1c458c303d4a0d97db1662628a538701eb8cf2dd | 1,049 | py | Python | test/hlt/pytest/python/com/huawei/iotplatform/client/dto/BatchTaskCreateInDTO.py | yuanyi-thu/AIOT- | 27f67d98324593c4c6c66bbd5e2a4aa7b9a4ac1e | [
"BSD-3-Clause"
] | 128 | 2018-10-29T04:11:47.000Z | 2022-03-07T02:19:14.000Z | test/hlt/pytest/python/com/huawei/iotplatform/client/dto/BatchTaskCreateInDTO.py | yuanyi-thu/AIOT- | 27f67d98324593c4c6c66bbd5e2a4aa7b9a4ac1e | [
"BSD-3-Clause"
] | 40 | 2018-11-02T00:40:48.000Z | 2021-12-07T09:33:56.000Z | test/hlt/pytest/python/com/huawei/iotplatform/client/dto/BatchTaskCreateInDTO.py | yuanyi-thu/AIOT- | 27f67d98324593c4c6c66bbd5e2a4aa7b9a4ac1e | [
"BSD-3-Clause"
] | 118 | 2018-10-29T08:43:57.000Z | 2022-01-07T06:49:25.000Z | from com.huawei.iotplatform.client.dto.ObjectNode import ObjectNode
from com.huawei.iotplatform.client.dto.TagDTO2 import TagDTO2
class BatchTaskCreateInDTO(object):
tags = TagDTO2
param = ObjectNode
def __init__(self):
self.appId = None
self.taskName = None
self.taskType = None
self.timeout = None
def getAppId(self):
return self.appId
def setAppId(self, appId):
self.appId = appId
def getTaskName(self):
return self.taskName
def setTaskName(self, taskName):
self.taskName = taskName
def getTaskType(self):
return self.taskType
def setTaskType(self, taskType):
self.taskType = taskType
def getTimeout(self):
return self.timeout
def setTimeout(self, timeout):
self.timeout = timeout
def getTags(self):
return self.tags
def setTags(self, tags):
self.tags = tags
def getParam(self):
return self.param
def setParam(self, param):
self.param = param
| 20.98 | 67 | 0.638704 | from com.huawei.iotplatform.client.dto.ObjectNode import ObjectNode
from com.huawei.iotplatform.client.dto.TagDTO2 import TagDTO2
class BatchTaskCreateInDTO(object):
tags = TagDTO2
param = ObjectNode
def __init__(self):
self.appId = None
self.taskName = None
self.taskType = None
self.timeout = None
def getAppId(self):
return self.appId
def setAppId(self, appId):
self.appId = appId
def getTaskName(self):
return self.taskName
def setTaskName(self, taskName):
self.taskName = taskName
def getTaskType(self):
return self.taskType
def setTaskType(self, taskType):
self.taskType = taskType
def getTimeout(self):
return self.timeout
def setTimeout(self, timeout):
self.timeout = timeout
def getTags(self):
return self.tags
def setTags(self, tags):
self.tags = tags
def getParam(self):
return self.param
def setParam(self, param):
self.param = param
| true | true |
1c458e127c7a31bedae9e99bb85864dbcdac3092 | 20,718 | py | Python | nova/api/openstack/compute/legacy_v2/contrib/security_groups.py | ebalduf/nova-backports | 6bf97ec73467de522d34ab7a17ca0e0874baa7f9 | [
"Apache-2.0"
] | 5 | 2016-04-28T16:20:38.000Z | 2021-04-25T11:19:03.000Z | nova/api/openstack/compute/legacy_v2/contrib/security_groups.py | ebalduf/nova-backports | 6bf97ec73467de522d34ab7a17ca0e0874baa7f9 | [
"Apache-2.0"
] | null | null | null | nova/api/openstack/compute/legacy_v2/contrib/security_groups.py | ebalduf/nova-backports | 6bf97ec73467de522d34ab7a17ca0e0874baa7f9 | [
"Apache-2.0"
] | 5 | 2020-04-08T20:24:45.000Z | 2020-10-05T19:02:13.000Z | # Copyright 2011 OpenStack Foundation
# Copyright 2012 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The security groups extension."""
import contextlib
from xml.dom import minidom
from oslo_log import log as logging
from oslo_serialization import jsonutils
import six
import webob
from webob import exc
from nova.api.openstack import common
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova import compute
from nova import exception
from nova.i18n import _
from nova.network.security_group import openstack_driver
from nova.virt import netutils
LOG = logging.getLogger(__name__)
authorize = extensions.extension_authorizer('compute', 'security_groups')
softauth = extensions.soft_extension_authorizer('compute', 'security_groups')
def _authorize_context(req):
context = req.environ['nova.context']
authorize(context)
return context
@contextlib.contextmanager
def translate_exceptions():
"""Translate nova exceptions to http exceptions."""
try:
yield
except exception.Invalid as exp:
msg = exp.format_message()
raise exc.HTTPBadRequest(explanation=msg)
except exception.SecurityGroupNotFound as exp:
msg = exp.format_message()
raise exc.HTTPNotFound(explanation=msg)
except exception.InstanceNotFound as exp:
msg = exp.format_message()
raise exc.HTTPNotFound(explanation=msg)
except exception.SecurityGroupLimitExceeded as exp:
msg = exp.format_message()
raise exc.HTTPForbidden(explanation=msg)
except exception.NoUniqueMatch as exp:
msg = exp.format_message()
raise exc.HTTPConflict(explanation=msg)
class SecurityGroupControllerBase(object):
"""Base class for Security Group controllers."""
def __init__(self):
self.security_group_api = (
openstack_driver.get_openstack_security_group_driver())
self.compute_api = compute.API(
security_group_api=self.security_group_api)
def _format_security_group_rule(self, context, rule, group_rule_data=None):
"""Return a security group rule in desired API response format.
If group_rule_data is passed in that is used rather than querying
for it.
"""
sg_rule = {}
sg_rule['id'] = rule['id']
sg_rule['parent_group_id'] = rule['parent_group_id']
sg_rule['ip_protocol'] = rule['protocol']
sg_rule['from_port'] = rule['from_port']
sg_rule['to_port'] = rule['to_port']
sg_rule['group'] = {}
sg_rule['ip_range'] = {}
if rule['group_id']:
with translate_exceptions():
try:
source_group = self.security_group_api.get(
context, id=rule['group_id'])
except exception.SecurityGroupNotFound:
# NOTE(arosen): There is a possible race condition that can
# occur here if two api calls occur concurrently: one that
# lists the security groups and another one that deletes a
# security group rule that has a group_id before the
# group_id is fetched. To handle this if
# SecurityGroupNotFound is raised we return None instead
# of the rule and the caller should ignore the rule.
LOG.debug("Security Group ID %s does not exist",
rule['group_id'])
return
sg_rule['group'] = {'name': source_group.get('name'),
'tenant_id': source_group.get('project_id')}
elif group_rule_data:
sg_rule['group'] = group_rule_data
else:
sg_rule['ip_range'] = {'cidr': rule['cidr']}
return sg_rule
def _format_security_group(self, context, group):
security_group = {}
security_group['id'] = group['id']
security_group['description'] = group['description']
security_group['name'] = group['name']
security_group['tenant_id'] = group['project_id']
security_group['rules'] = []
for rule in group['rules']:
formatted_rule = self._format_security_group_rule(context, rule)
if formatted_rule:
security_group['rules'] += [formatted_rule]
return security_group
def _from_body(self, body, key):
if not body:
raise exc.HTTPBadRequest(
explanation=_("The request body can't be empty"))
value = body.get(key, None)
if value is None:
raise exc.HTTPBadRequest(
explanation=_("Missing parameter %s") % key)
return value
class SecurityGroupController(SecurityGroupControllerBase):
"""The Security group API controller for the OpenStack API."""
def show(self, req, id):
"""Return data about the given security group."""
context = _authorize_context(req)
with translate_exceptions():
id = self.security_group_api.validate_id(id)
security_group = self.security_group_api.get(context, None, id,
map_exception=True)
return {'security_group': self._format_security_group(context,
security_group)}
def delete(self, req, id):
"""Delete a security group."""
context = _authorize_context(req)
with translate_exceptions():
id = self.security_group_api.validate_id(id)
security_group = self.security_group_api.get(context, None, id,
map_exception=True)
self.security_group_api.destroy(context, security_group)
return webob.Response(status_int=202)
def index(self, req):
"""Returns a list of security groups."""
context = _authorize_context(req)
search_opts = {}
search_opts.update(req.GET)
with translate_exceptions():
project_id = context.project_id
raw_groups = self.security_group_api.list(context,
project=project_id,
search_opts=search_opts)
limited_list = common.limited(raw_groups, req)
result = [self._format_security_group(context, group)
for group in limited_list]
return {'security_groups':
list(sorted(result,
key=lambda k: (k['tenant_id'], k['name'])))}
def create(self, req, body):
"""Creates a new security group."""
context = _authorize_context(req)
security_group = self._from_body(body, 'security_group')
group_name = security_group.get('name', None)
group_description = security_group.get('description', None)
with translate_exceptions():
self.security_group_api.validate_property(group_name, 'name', None)
self.security_group_api.validate_property(group_description,
'description', None)
group_ref = self.security_group_api.create_security_group(
context, group_name, group_description)
return {'security_group': self._format_security_group(context,
group_ref)}
def update(self, req, id, body):
"""Update a security group."""
context = _authorize_context(req)
with translate_exceptions():
id = self.security_group_api.validate_id(id)
security_group = self.security_group_api.get(context, None, id,
map_exception=True)
security_group_data = self._from_body(body, 'security_group')
group_name = security_group_data.get('name', None)
group_description = security_group_data.get('description', None)
with translate_exceptions():
self.security_group_api.validate_property(group_name, 'name', None)
self.security_group_api.validate_property(group_description,
'description', None)
group_ref = self.security_group_api.update_security_group(
context, security_group, group_name, group_description)
return {'security_group': self._format_security_group(context,
group_ref)}
class SecurityGroupRulesController(SecurityGroupControllerBase):
def create(self, req, body):
context = _authorize_context(req)
sg_rule = self._from_body(body, 'security_group_rule')
with translate_exceptions():
parent_group_id = self.security_group_api.validate_id(
sg_rule.get('parent_group_id', None))
security_group = self.security_group_api.get(context, None,
parent_group_id,
map_exception=True)
try:
new_rule = self._rule_args_to_dict(context,
to_port=sg_rule.get('to_port'),
from_port=sg_rule.get('from_port'),
ip_protocol=sg_rule.get('ip_protocol'),
cidr=sg_rule.get('cidr'),
group_id=sg_rule.get('group_id'))
except exception.SecurityGroupNotFound as e:
raise exc.HTTPNotFound(explanation=e.format_message())
except Exception as exp:
raise exc.HTTPBadRequest(explanation=six.text_type(exp))
if new_rule is None:
msg = _("Not enough parameters to build a valid rule.")
raise exc.HTTPBadRequest(explanation=msg)
new_rule['parent_group_id'] = security_group['id']
if 'cidr' in new_rule:
net, prefixlen = netutils.get_net_and_prefixlen(new_rule['cidr'])
if net not in ('0.0.0.0', '::') and prefixlen == '0':
msg = _("Bad prefix for network in cidr %s") % new_rule['cidr']
raise exc.HTTPBadRequest(explanation=msg)
group_rule_data = None
with translate_exceptions():
if sg_rule.get('group_id'):
source_group = self.security_group_api.get(
context, id=sg_rule['group_id'])
group_rule_data = {'name': source_group.get('name'),
'tenant_id': source_group.get('project_id')}
security_group_rule = (
self.security_group_api.create_security_group_rule(
context, security_group, new_rule))
formatted_rule = self._format_security_group_rule(context,
security_group_rule,
group_rule_data)
return {"security_group_rule": formatted_rule}
def _rule_args_to_dict(self, context, to_port=None, from_port=None,
ip_protocol=None, cidr=None, group_id=None):
if group_id is not None:
group_id = self.security_group_api.validate_id(group_id)
# check if groupId exists
self.security_group_api.get(context, id=group_id)
return self.security_group_api.new_group_ingress_rule(
group_id, ip_protocol, from_port, to_port)
else:
cidr = self.security_group_api.parse_cidr(cidr)
return self.security_group_api.new_cidr_ingress_rule(
cidr, ip_protocol, from_port, to_port)
def delete(self, req, id):
context = _authorize_context(req)
with translate_exceptions():
id = self.security_group_api.validate_id(id)
rule = self.security_group_api.get_rule(context, id)
group_id = rule['parent_group_id']
security_group = self.security_group_api.get(context, None,
group_id,
map_exception=True)
self.security_group_api.remove_rules(context, security_group,
[rule['id']])
return webob.Response(status_int=202)
class ServerSecurityGroupController(SecurityGroupControllerBase):
def index(self, req, server_id):
"""Returns a list of security groups for the given instance."""
context = _authorize_context(req)
self.security_group_api.ensure_default(context)
with translate_exceptions():
instance = common.get_instance(self.compute_api, context,
server_id)
groups = self.security_group_api.get_instance_security_groups(
context, instance, True)
result = [self._format_security_group(context, group)
for group in groups]
return {'security_groups':
list(sorted(result,
key=lambda k: (k['tenant_id'], k['name'])))}
class SecurityGroupActionController(wsgi.Controller):
def __init__(self, *args, **kwargs):
super(SecurityGroupActionController, self).__init__(*args, **kwargs)
self.security_group_api = (
openstack_driver.get_openstack_security_group_driver())
self.compute_api = compute.API(
security_group_api=self.security_group_api)
def _parse(self, body, action):
try:
body = body[action]
group_name = body['name']
except TypeError:
msg = _("Missing parameter dict")
raise webob.exc.HTTPBadRequest(explanation=msg)
except KeyError:
msg = _("Security group not specified")
raise webob.exc.HTTPBadRequest(explanation=msg)
if not group_name or group_name.strip() == '':
msg = _("Security group name cannot be empty")
raise webob.exc.HTTPBadRequest(explanation=msg)
return group_name
def _invoke(self, method, context, id, group_name):
with translate_exceptions():
instance = common.get_instance(self.compute_api, context, id)
method(context, instance, group_name)
return webob.Response(status_int=202)
@wsgi.action('addSecurityGroup')
def _addSecurityGroup(self, req, id, body):
context = req.environ['nova.context']
authorize(context)
group_name = self._parse(body, 'addSecurityGroup')
return self._invoke(self.security_group_api.add_to_instance,
context, id, group_name)
@wsgi.action('removeSecurityGroup')
def _removeSecurityGroup(self, req, id, body):
context = req.environ['nova.context']
authorize(context)
group_name = self._parse(body, 'removeSecurityGroup')
return self._invoke(self.security_group_api.remove_from_instance,
context, id, group_name)
class SecurityGroupsOutputController(wsgi.Controller):
def __init__(self, *args, **kwargs):
super(SecurityGroupsOutputController, self).__init__(*args, **kwargs)
self.compute_api = compute.API()
self.security_group_api = (
openstack_driver.get_openstack_security_group_driver())
def _extend_servers(self, req, servers):
# TODO(arosen) this function should be refactored to reduce duplicate
# code and use get_instance_security_groups instead of get_db_instance.
if not len(servers):
return
key = "security_groups"
context = _authorize_context(req)
if not openstack_driver.is_neutron_security_groups():
for server in servers:
instance = req.get_db_instance(server['id'])
groups = instance.get(key)
if groups:
server[key] = [{"name": group["name"]} for group in groups]
else:
# If method is a POST we get the security groups intended for an
# instance from the request. The reason for this is if using
# neutron security groups the requested security groups for the
# instance are not in the db and have not been sent to neutron yet.
if req.method != 'POST':
sg_instance_bindings = (
self.security_group_api
.get_instances_security_groups_bindings(context,
servers))
for server in servers:
groups = sg_instance_bindings.get(server['id'])
if groups:
server[key] = groups
# In this section of code len(servers) == 1 as you can only POST
# one server in an API request.
else:
try:
# try converting to json
req_obj = jsonutils.loads(req.body)
# Add security group to server, if no security group was in
# request add default since that is the group it is part of
servers[0][key] = req_obj['server'].get(
key, [{'name': 'default'}])
except ValueError:
root = minidom.parseString(req.body)
sg_root = root.getElementsByTagName(key)
groups = []
if sg_root:
security_groups = sg_root[0].getElementsByTagName(
'security_group')
for security_group in security_groups:
groups.append(
{'name': security_group.getAttribute('name')})
if not groups:
groups = [{'name': 'default'}]
servers[0][key] = groups
def _show(self, req, resp_obj):
if not softauth(req.environ['nova.context']):
return
if 'server' in resp_obj.obj:
self._extend_servers(req, [resp_obj.obj['server']])
@wsgi.extends
def show(self, req, resp_obj, id):
return self._show(req, resp_obj)
@wsgi.extends
def create(self, req, resp_obj, body):
return self._show(req, resp_obj)
@wsgi.extends
def detail(self, req, resp_obj):
if not softauth(req.environ['nova.context']):
return
self._extend_servers(req, list(resp_obj.obj['servers']))
class Security_groups(extensions.ExtensionDescriptor):
"""Security group support."""
name = "SecurityGroups"
alias = "os-security-groups"
namespace = "http://docs.openstack.org/compute/ext/securitygroups/api/v1.1"
updated = "2013-05-28T00:00:00Z"
def get_controller_extensions(self):
controller = SecurityGroupActionController()
actions = extensions.ControllerExtension(self, 'servers', controller)
controller = SecurityGroupsOutputController()
output = extensions.ControllerExtension(self, 'servers', controller)
return [actions, output]
def get_resources(self):
resources = []
res = extensions.ResourceExtension('os-security-groups',
controller=SecurityGroupController())
resources.append(res)
res = extensions.ResourceExtension('os-security-group-rules',
controller=SecurityGroupRulesController())
resources.append(res)
res = extensions.ResourceExtension(
'os-security-groups',
controller=ServerSecurityGroupController(),
parent=dict(member_name='server', collection_name='servers'))
resources.append(res)
return resources
| 40.863905 | 79 | 0.592383 |
import contextlib
from xml.dom import minidom
from oslo_log import log as logging
from oslo_serialization import jsonutils
import six
import webob
from webob import exc
from nova.api.openstack import common
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova import compute
from nova import exception
from nova.i18n import _
from nova.network.security_group import openstack_driver
from nova.virt import netutils
LOG = logging.getLogger(__name__)
authorize = extensions.extension_authorizer('compute', 'security_groups')
softauth = extensions.soft_extension_authorizer('compute', 'security_groups')
def _authorize_context(req):
context = req.environ['nova.context']
authorize(context)
return context
@contextlib.contextmanager
def translate_exceptions():
try:
yield
except exception.Invalid as exp:
msg = exp.format_message()
raise exc.HTTPBadRequest(explanation=msg)
except exception.SecurityGroupNotFound as exp:
msg = exp.format_message()
raise exc.HTTPNotFound(explanation=msg)
except exception.InstanceNotFound as exp:
msg = exp.format_message()
raise exc.HTTPNotFound(explanation=msg)
except exception.SecurityGroupLimitExceeded as exp:
msg = exp.format_message()
raise exc.HTTPForbidden(explanation=msg)
except exception.NoUniqueMatch as exp:
msg = exp.format_message()
raise exc.HTTPConflict(explanation=msg)
class SecurityGroupControllerBase(object):
def __init__(self):
self.security_group_api = (
openstack_driver.get_openstack_security_group_driver())
self.compute_api = compute.API(
security_group_api=self.security_group_api)
def _format_security_group_rule(self, context, rule, group_rule_data=None):
sg_rule = {}
sg_rule['id'] = rule['id']
sg_rule['parent_group_id'] = rule['parent_group_id']
sg_rule['ip_protocol'] = rule['protocol']
sg_rule['from_port'] = rule['from_port']
sg_rule['to_port'] = rule['to_port']
sg_rule['group'] = {}
sg_rule['ip_range'] = {}
if rule['group_id']:
with translate_exceptions():
try:
source_group = self.security_group_api.get(
context, id=rule['group_id'])
except exception.SecurityGroupNotFound:
LOG.debug("Security Group ID %s does not exist",
rule['group_id'])
return
sg_rule['group'] = {'name': source_group.get('name'),
'tenant_id': source_group.get('project_id')}
elif group_rule_data:
sg_rule['group'] = group_rule_data
else:
sg_rule['ip_range'] = {'cidr': rule['cidr']}
return sg_rule
def _format_security_group(self, context, group):
security_group = {}
security_group['id'] = group['id']
security_group['description'] = group['description']
security_group['name'] = group['name']
security_group['tenant_id'] = group['project_id']
security_group['rules'] = []
for rule in group['rules']:
formatted_rule = self._format_security_group_rule(context, rule)
if formatted_rule:
security_group['rules'] += [formatted_rule]
return security_group
def _from_body(self, body, key):
if not body:
raise exc.HTTPBadRequest(
explanation=_("The request body can't be empty"))
value = body.get(key, None)
if value is None:
raise exc.HTTPBadRequest(
explanation=_("Missing parameter %s") % key)
return value
class SecurityGroupController(SecurityGroupControllerBase):
def show(self, req, id):
context = _authorize_context(req)
with translate_exceptions():
id = self.security_group_api.validate_id(id)
security_group = self.security_group_api.get(context, None, id,
map_exception=True)
return {'security_group': self._format_security_group(context,
security_group)}
def delete(self, req, id):
context = _authorize_context(req)
with translate_exceptions():
id = self.security_group_api.validate_id(id)
security_group = self.security_group_api.get(context, None, id,
map_exception=True)
self.security_group_api.destroy(context, security_group)
return webob.Response(status_int=202)
def index(self, req):
context = _authorize_context(req)
search_opts = {}
search_opts.update(req.GET)
with translate_exceptions():
project_id = context.project_id
raw_groups = self.security_group_api.list(context,
project=project_id,
search_opts=search_opts)
limited_list = common.limited(raw_groups, req)
result = [self._format_security_group(context, group)
for group in limited_list]
return {'security_groups':
list(sorted(result,
key=lambda k: (k['tenant_id'], k['name'])))}
def create(self, req, body):
context = _authorize_context(req)
security_group = self._from_body(body, 'security_group')
group_name = security_group.get('name', None)
group_description = security_group.get('description', None)
with translate_exceptions():
self.security_group_api.validate_property(group_name, 'name', None)
self.security_group_api.validate_property(group_description,
'description', None)
group_ref = self.security_group_api.create_security_group(
context, group_name, group_description)
return {'security_group': self._format_security_group(context,
group_ref)}
def update(self, req, id, body):
context = _authorize_context(req)
with translate_exceptions():
id = self.security_group_api.validate_id(id)
security_group = self.security_group_api.get(context, None, id,
map_exception=True)
security_group_data = self._from_body(body, 'security_group')
group_name = security_group_data.get('name', None)
group_description = security_group_data.get('description', None)
with translate_exceptions():
self.security_group_api.validate_property(group_name, 'name', None)
self.security_group_api.validate_property(group_description,
'description', None)
group_ref = self.security_group_api.update_security_group(
context, security_group, group_name, group_description)
return {'security_group': self._format_security_group(context,
group_ref)}
class SecurityGroupRulesController(SecurityGroupControllerBase):
def create(self, req, body):
context = _authorize_context(req)
sg_rule = self._from_body(body, 'security_group_rule')
with translate_exceptions():
parent_group_id = self.security_group_api.validate_id(
sg_rule.get('parent_group_id', None))
security_group = self.security_group_api.get(context, None,
parent_group_id,
map_exception=True)
try:
new_rule = self._rule_args_to_dict(context,
to_port=sg_rule.get('to_port'),
from_port=sg_rule.get('from_port'),
ip_protocol=sg_rule.get('ip_protocol'),
cidr=sg_rule.get('cidr'),
group_id=sg_rule.get('group_id'))
except exception.SecurityGroupNotFound as e:
raise exc.HTTPNotFound(explanation=e.format_message())
except Exception as exp:
raise exc.HTTPBadRequest(explanation=six.text_type(exp))
if new_rule is None:
msg = _("Not enough parameters to build a valid rule.")
raise exc.HTTPBadRequest(explanation=msg)
new_rule['parent_group_id'] = security_group['id']
if 'cidr' in new_rule:
net, prefixlen = netutils.get_net_and_prefixlen(new_rule['cidr'])
if net not in ('0.0.0.0', '::') and prefixlen == '0':
msg = _("Bad prefix for network in cidr %s") % new_rule['cidr']
raise exc.HTTPBadRequest(explanation=msg)
group_rule_data = None
with translate_exceptions():
if sg_rule.get('group_id'):
source_group = self.security_group_api.get(
context, id=sg_rule['group_id'])
group_rule_data = {'name': source_group.get('name'),
'tenant_id': source_group.get('project_id')}
security_group_rule = (
self.security_group_api.create_security_group_rule(
context, security_group, new_rule))
formatted_rule = self._format_security_group_rule(context,
security_group_rule,
group_rule_data)
return {"security_group_rule": formatted_rule}
def _rule_args_to_dict(self, context, to_port=None, from_port=None,
ip_protocol=None, cidr=None, group_id=None):
if group_id is not None:
group_id = self.security_group_api.validate_id(group_id)
# check if groupId exists
self.security_group_api.get(context, id=group_id)
return self.security_group_api.new_group_ingress_rule(
group_id, ip_protocol, from_port, to_port)
else:
cidr = self.security_group_api.parse_cidr(cidr)
return self.security_group_api.new_cidr_ingress_rule(
cidr, ip_protocol, from_port, to_port)
def delete(self, req, id):
context = _authorize_context(req)
with translate_exceptions():
id = self.security_group_api.validate_id(id)
rule = self.security_group_api.get_rule(context, id)
group_id = rule['parent_group_id']
security_group = self.security_group_api.get(context, None,
group_id,
map_exception=True)
self.security_group_api.remove_rules(context, security_group,
[rule['id']])
return webob.Response(status_int=202)
class ServerSecurityGroupController(SecurityGroupControllerBase):
def index(self, req, server_id):
context = _authorize_context(req)
self.security_group_api.ensure_default(context)
with translate_exceptions():
instance = common.get_instance(self.compute_api, context,
server_id)
groups = self.security_group_api.get_instance_security_groups(
context, instance, True)
result = [self._format_security_group(context, group)
for group in groups]
return {'security_groups':
list(sorted(result,
key=lambda k: (k['tenant_id'], k['name'])))}
class SecurityGroupActionController(wsgi.Controller):
def __init__(self, *args, **kwargs):
super(SecurityGroupActionController, self).__init__(*args, **kwargs)
self.security_group_api = (
openstack_driver.get_openstack_security_group_driver())
self.compute_api = compute.API(
security_group_api=self.security_group_api)
def _parse(self, body, action):
try:
body = body[action]
group_name = body['name']
except TypeError:
msg = _("Missing parameter dict")
raise webob.exc.HTTPBadRequest(explanation=msg)
except KeyError:
msg = _("Security group not specified")
raise webob.exc.HTTPBadRequest(explanation=msg)
if not group_name or group_name.strip() == '':
msg = _("Security group name cannot be empty")
raise webob.exc.HTTPBadRequest(explanation=msg)
return group_name
def _invoke(self, method, context, id, group_name):
with translate_exceptions():
instance = common.get_instance(self.compute_api, context, id)
method(context, instance, group_name)
return webob.Response(status_int=202)
@wsgi.action('addSecurityGroup')
def _addSecurityGroup(self, req, id, body):
context = req.environ['nova.context']
authorize(context)
group_name = self._parse(body, 'addSecurityGroup')
return self._invoke(self.security_group_api.add_to_instance,
context, id, group_name)
@wsgi.action('removeSecurityGroup')
def _removeSecurityGroup(self, req, id, body):
context = req.environ['nova.context']
authorize(context)
group_name = self._parse(body, 'removeSecurityGroup')
return self._invoke(self.security_group_api.remove_from_instance,
context, id, group_name)
class SecurityGroupsOutputController(wsgi.Controller):
def __init__(self, *args, **kwargs):
super(SecurityGroupsOutputController, self).__init__(*args, **kwargs)
self.compute_api = compute.API()
self.security_group_api = (
openstack_driver.get_openstack_security_group_driver())
def _extend_servers(self, req, servers):
# TODO(arosen) this function should be refactored to reduce duplicate
# code and use get_instance_security_groups instead of get_db_instance.
if not len(servers):
return
key = "security_groups"
context = _authorize_context(req)
if not openstack_driver.is_neutron_security_groups():
for server in servers:
instance = req.get_db_instance(server['id'])
groups = instance.get(key)
if groups:
server[key] = [{"name": group["name"]} for group in groups]
else:
# If method is a POST we get the security groups intended for an
# instance from the request. The reason for this is if using
# neutron security groups the requested security groups for the
# instance are not in the db and have not been sent to neutron yet.
if req.method != 'POST':
sg_instance_bindings = (
self.security_group_api
.get_instances_security_groups_bindings(context,
servers))
for server in servers:
groups = sg_instance_bindings.get(server['id'])
if groups:
server[key] = groups
# In this section of code len(servers) == 1 as you can only POST
# one server in an API request.
else:
try:
# try converting to json
req_obj = jsonutils.loads(req.body)
# Add security group to server, if no security group was in
# request add default since that is the group it is part of
servers[0][key] = req_obj['server'].get(
key, [{'name': 'default'}])
except ValueError:
root = minidom.parseString(req.body)
sg_root = root.getElementsByTagName(key)
groups = []
if sg_root:
security_groups = sg_root[0].getElementsByTagName(
'security_group')
for security_group in security_groups:
groups.append(
{'name': security_group.getAttribute('name')})
if not groups:
groups = [{'name': 'default'}]
servers[0][key] = groups
def _show(self, req, resp_obj):
if not softauth(req.environ['nova.context']):
return
if 'server' in resp_obj.obj:
self._extend_servers(req, [resp_obj.obj['server']])
@wsgi.extends
def show(self, req, resp_obj, id):
return self._show(req, resp_obj)
@wsgi.extends
def create(self, req, resp_obj, body):
return self._show(req, resp_obj)
@wsgi.extends
def detail(self, req, resp_obj):
if not softauth(req.environ['nova.context']):
return
self._extend_servers(req, list(resp_obj.obj['servers']))
class Security_groups(extensions.ExtensionDescriptor):
name = "SecurityGroups"
alias = "os-security-groups"
namespace = "http://docs.openstack.org/compute/ext/securitygroups/api/v1.1"
updated = "2013-05-28T00:00:00Z"
def get_controller_extensions(self):
controller = SecurityGroupActionController()
actions = extensions.ControllerExtension(self, 'servers', controller)
controller = SecurityGroupsOutputController()
output = extensions.ControllerExtension(self, 'servers', controller)
return [actions, output]
def get_resources(self):
resources = []
res = extensions.ResourceExtension('os-security-groups',
controller=SecurityGroupController())
resources.append(res)
res = extensions.ResourceExtension('os-security-group-rules',
controller=SecurityGroupRulesController())
resources.append(res)
res = extensions.ResourceExtension(
'os-security-groups',
controller=ServerSecurityGroupController(),
parent=dict(member_name='server', collection_name='servers'))
resources.append(res)
return resources
| true | true |
1c458edcdd1b7f3f78cef784442634fe79c4c946 | 78 | py | Python | ass_17.py | Divyanshi0409/Python-Programs | 7fb8ab2159cc69de7168bf19f91325b9c7a908c7 | [
"MIT"
] | null | null | null | ass_17.py | Divyanshi0409/Python-Programs | 7fb8ab2159cc69de7168bf19f91325b9c7a908c7 | [
"MIT"
] | null | null | null | ass_17.py | Divyanshi0409/Python-Programs | 7fb8ab2159cc69de7168bf19f91325b9c7a908c7 | [
"MIT"
] | null | null | null | for i in range(50,81):
if i%2==0:
print(i)
else:
break | 15.6 | 22 | 0.448718 | for i in range(50,81):
if i%2==0:
print(i)
else:
break | true | true |
1c458f5175cf9bf35887e6e17a55a96733dcd698 | 2,954 | py | Python | pint/testing.py | fernandezc/pint | 37a61ede6fbd628c7dc160eb36278cf41c96484c | [
"BSD-3-Clause"
] | null | null | null | pint/testing.py | fernandezc/pint | 37a61ede6fbd628c7dc160eb36278cf41c96484c | [
"BSD-3-Clause"
] | null | null | null | pint/testing.py | fernandezc/pint | 37a61ede6fbd628c7dc160eb36278cf41c96484c | [
"BSD-3-Clause"
] | null | null | null | from __future__ import annotations
import math
import warnings
from numbers import Number
from . import Quantity
from .compat import ndarray
try:
import numpy as np
except ImportError:
np = None
def _get_comparable_magnitudes(first, second, msg):
if isinstance(first, Quantity) and isinstance(second, Quantity):
ctx = first._REGISTRY._active_ctx.contexts
if first.is_compatible_with(second, *ctx):
second = second.to(first)
assert first.units == second.units, msg + " Units are not equal."
m1, m2 = first.magnitude, second.magnitude
elif isinstance(first, Quantity):
assert first.dimensionless, msg + " The first is not dimensionless."
first = first.to("")
m1, m2 = first.magnitude, second
elif isinstance(second, Quantity):
assert second.dimensionless, msg + " The second is not dimensionless."
second = second.to("")
m1, m2 = first, second.magnitude
else:
m1, m2 = first, second
return m1, m2
def assert_equal(first, second, msg=None):
if msg is None:
msg = "Comparing %r and %r. " % (first, second)
m1, m2 = _get_comparable_magnitudes(first, second, msg)
msg += " (Converted to %r and %r): Magnitudes are not equal" % (m1, m2)
if isinstance(m1, ndarray) or isinstance(m2, ndarray):
np.testing.assert_array_equal(m1, m2, err_msg=msg)
elif not isinstance(m1, Number):
warnings.warn(RuntimeWarning)
return
elif not isinstance(m2, Number):
warnings.warn(RuntimeWarning)
return
elif math.isnan(m1):
assert math.isnan(m2), msg
elif math.isnan(m2):
assert math.isnan(m1), msg
else:
assert m1 == m2, msg
def assert_allclose(first, second, rtol=1e-07, atol=0, msg=None):
if msg is None:
try:
msg = "Comparing %r and %r. " % (first, second)
except TypeError:
try:
msg = "Comparing %s and %s. " % (first, second)
except Exception:
msg = "Comparing"
m1, m2 = _get_comparable_magnitudes(first, second, msg)
msg += " (Converted to %r and %r)" % (m1, m2)
if isinstance(m1, ndarray) or isinstance(m2, ndarray):
np.testing.assert_allclose(m1, m2, rtol=rtol, atol=atol, err_msg=msg)
elif not isinstance(m1, Number):
warnings.warn(RuntimeWarning)
return
elif not isinstance(m2, Number):
warnings.warn(RuntimeWarning)
return
elif math.isnan(m1):
assert math.isnan(m2), msg
elif math.isnan(m2):
assert math.isnan(m1), msg
elif math.isinf(m1):
assert math.isinf(m2), msg
elif math.isinf(m2):
assert math.isinf(m1), msg
else:
# Numpy version (don't like because is not symmetric)
# assert abs(m1 - m2) <= atol + rtol * abs(m2), msg
assert abs(m1 - m2) <= max(rtol * max(abs(m1), abs(m2)), atol), msg
| 31.763441 | 78 | 0.618822 | from __future__ import annotations
import math
import warnings
from numbers import Number
from . import Quantity
from .compat import ndarray
try:
import numpy as np
except ImportError:
np = None
def _get_comparable_magnitudes(first, second, msg):
if isinstance(first, Quantity) and isinstance(second, Quantity):
ctx = first._REGISTRY._active_ctx.contexts
if first.is_compatible_with(second, *ctx):
second = second.to(first)
assert first.units == second.units, msg + " Units are not equal."
m1, m2 = first.magnitude, second.magnitude
elif isinstance(first, Quantity):
assert first.dimensionless, msg + " The first is not dimensionless."
first = first.to("")
m1, m2 = first.magnitude, second
elif isinstance(second, Quantity):
assert second.dimensionless, msg + " The second is not dimensionless."
second = second.to("")
m1, m2 = first, second.magnitude
else:
m1, m2 = first, second
return m1, m2
def assert_equal(first, second, msg=None):
if msg is None:
msg = "Comparing %r and %r. " % (first, second)
m1, m2 = _get_comparable_magnitudes(first, second, msg)
msg += " (Converted to %r and %r): Magnitudes are not equal" % (m1, m2)
if isinstance(m1, ndarray) or isinstance(m2, ndarray):
np.testing.assert_array_equal(m1, m2, err_msg=msg)
elif not isinstance(m1, Number):
warnings.warn(RuntimeWarning)
return
elif not isinstance(m2, Number):
warnings.warn(RuntimeWarning)
return
elif math.isnan(m1):
assert math.isnan(m2), msg
elif math.isnan(m2):
assert math.isnan(m1), msg
else:
assert m1 == m2, msg
def assert_allclose(first, second, rtol=1e-07, atol=0, msg=None):
if msg is None:
try:
msg = "Comparing %r and %r. " % (first, second)
except TypeError:
try:
msg = "Comparing %s and %s. " % (first, second)
except Exception:
msg = "Comparing"
m1, m2 = _get_comparable_magnitudes(first, second, msg)
msg += " (Converted to %r and %r)" % (m1, m2)
if isinstance(m1, ndarray) or isinstance(m2, ndarray):
np.testing.assert_allclose(m1, m2, rtol=rtol, atol=atol, err_msg=msg)
elif not isinstance(m1, Number):
warnings.warn(RuntimeWarning)
return
elif not isinstance(m2, Number):
warnings.warn(RuntimeWarning)
return
elif math.isnan(m1):
assert math.isnan(m2), msg
elif math.isnan(m2):
assert math.isnan(m1), msg
elif math.isinf(m1):
assert math.isinf(m2), msg
elif math.isinf(m2):
assert math.isinf(m1), msg
else:
# assert abs(m1 - m2) <= atol + rtol * abs(m2), msg
assert abs(m1 - m2) <= max(rtol * max(abs(m1), abs(m2)), atol), msg
| true | true |
1c458f9ed188a3d53e4a024d3cb10478bdd12173 | 4,733 | py | Python | sudoku/sudoku/gensudoku.py | PoojithRachakada/sudoku-django | 723de992821e54b63259c00fb949fdfa1e05ac04 | [
"MIT"
] | null | null | null | sudoku/sudoku/gensudoku.py | PoojithRachakada/sudoku-django | 723de992821e54b63259c00fb949fdfa1e05ac04 | [
"MIT"
] | 5 | 2020-12-31T09:42:57.000Z | 2021-01-05T13:59:14.000Z | sudoku/sudoku/gensudoku.py | PoojithRachakada/sudoku-django | 723de992821e54b63259c00fb949fdfa1e05ac04 | [
"MIT"
] | null | null | null | # pylint: disable=unused-variable
import os
import sys
from io import BytesIO, IOBase
import math
import itertools as ITER
from collections import defaultdict as D
from collections import Counter as CO
from collections import deque as Q
import threading
from functools import lru_cache, reduce
from functools import cmp_to_key as CMP
from bisect import bisect_left as BL
from bisect import bisect_right as BR
import random as RA
import cmath, time
# ? Variables
MOD = (10 ** 9) + 7
MA = float("inf")
MI = float("-inf")
# * gui will be here
# * backend code for sudoku
start_time = time.time()
class Sudoku:
def check_row(self, i, board):
values = set()
for k in range(0, 9):
p = board[i][k]
if p == 0:
continue
if p in values:
return False
values.add(p)
return True
def check_col(self, j, board):
values = set()
for k in range(0, 9):
p = board[k][j]
if p == 0:
continue
if p in values:
return False
values.add(p)
return True
def check_sgrid(self, i, j, board):
x, y = i // 3, j // 3
has = set()
for i in range(3):
for j in range(3):
ele = board[x + i][y + j]
if ele in has:
return False
has.add(ele)
return True
def IsValidSudoku(self, board):
def check_sub_grid(i, j):
values = set()
for m in range(i, i + 3):
for n in range(j, j + 3):
p = board[n][m]
if m == n:
if not self.check_row(m, board):
return False
if not self.check_col(n, board):
return False
if p == 0:
continue
if p in values:
return False
values.add(p)
return True
for i in range(0, 9, 3):
for j in range(0, 9, 3):
if not check_sub_grid(i, j):
return False
return True
# * this is the sudoku generator
def Sudoku_generator(self, board):
def next_pos(grid, store):
for i in range(9):
for j in range(9):
if grid[i][j] == 0:
store[0] = i
store[1] = j
return True
return False
def create(grid, row, col):
for i in range(9):
for j in range(9):
w = grid[i][j]
if w != 0:
row[i].add(w)
col[j].add(w)
def is_valid(i, j, key, row, col, grid):
if key in row[i]:
return False
if key in col[j]:
return False
p = (i // 3) * 3
q = (j // 3) * 3
for x in range(3):
for y in range(3):
if grid[x + p][y + q] == key:
return False
return True
arr = [1, 2, 3, 4, 5, 6, 7, 8, 9]
RA.shuffle(arr)
def sudoku_solver(row, col, grid):
store = [0, 0]
if not next_pos(grid, store):
return True
r = store[0]
c = store[1]
for i in arr:
if is_valid(r, c, i, row, col, grid):
grid[r][c] = i
row[r].add(i)
col[c].add(i)
if sudoku_solver(row, col, grid):
return True
grid[r][c] = 0
row[r].remove(i)
col[c].remove(i)
return False
row = D(set)
col = D(set)
create(board, row, col)
sudoku_solver(row, col, board)
return board
def question(board):
hint = RA.randint(18, 30)
totalpos = [(i, j) for i in range(9) for j in range(9)]
wanted = RA.choices(totalpos, k=hint)
qs = [[0] * 9 for i in range(9)]
for i, j in wanted:
qs[i][j] = board[i][j]
return qs
def valid(arr):
sudokuobj = Sudoku()
return sudokuobj.IsValidSudoku(arr)
def all():
board = [[0] * 9 for i in range(9)]
sudokuobj = Sudoku()
ans = sudokuobj.Sudoku_generator((board))
return question(ans), ans
| 27.358382 | 60 | 0.427002 |
import os
import sys
from io import BytesIO, IOBase
import math
import itertools as ITER
from collections import defaultdict as D
from collections import Counter as CO
from collections import deque as Q
import threading
from functools import lru_cache, reduce
from functools import cmp_to_key as CMP
from bisect import bisect_left as BL
from bisect import bisect_right as BR
import random as RA
import cmath, time
MOD = (10 ** 9) + 7
MA = float("inf")
MI = float("-inf")
start_time = time.time()
class Sudoku:
def check_row(self, i, board):
values = set()
for k in range(0, 9):
p = board[i][k]
if p == 0:
continue
if p in values:
return False
values.add(p)
return True
def check_col(self, j, board):
values = set()
for k in range(0, 9):
p = board[k][j]
if p == 0:
continue
if p in values:
return False
values.add(p)
return True
def check_sgrid(self, i, j, board):
x, y = i // 3, j // 3
has = set()
for i in range(3):
for j in range(3):
ele = board[x + i][y + j]
if ele in has:
return False
has.add(ele)
return True
def IsValidSudoku(self, board):
def check_sub_grid(i, j):
values = set()
for m in range(i, i + 3):
for n in range(j, j + 3):
p = board[n][m]
if m == n:
if not self.check_row(m, board):
return False
if not self.check_col(n, board):
return False
if p == 0:
continue
if p in values:
return False
values.add(p)
return True
for i in range(0, 9, 3):
for j in range(0, 9, 3):
if not check_sub_grid(i, j):
return False
return True
def Sudoku_generator(self, board):
def next_pos(grid, store):
for i in range(9):
for j in range(9):
if grid[i][j] == 0:
store[0] = i
store[1] = j
return True
return False
def create(grid, row, col):
for i in range(9):
for j in range(9):
w = grid[i][j]
if w != 0:
row[i].add(w)
col[j].add(w)
def is_valid(i, j, key, row, col, grid):
if key in row[i]:
return False
if key in col[j]:
return False
p = (i // 3) * 3
q = (j // 3) * 3
for x in range(3):
for y in range(3):
if grid[x + p][y + q] == key:
return False
return True
arr = [1, 2, 3, 4, 5, 6, 7, 8, 9]
RA.shuffle(arr)
def sudoku_solver(row, col, grid):
store = [0, 0]
if not next_pos(grid, store):
return True
r = store[0]
c = store[1]
for i in arr:
if is_valid(r, c, i, row, col, grid):
grid[r][c] = i
row[r].add(i)
col[c].add(i)
if sudoku_solver(row, col, grid):
return True
grid[r][c] = 0
row[r].remove(i)
col[c].remove(i)
return False
row = D(set)
col = D(set)
create(board, row, col)
sudoku_solver(row, col, board)
return board
def question(board):
hint = RA.randint(18, 30)
totalpos = [(i, j) for i in range(9) for j in range(9)]
wanted = RA.choices(totalpos, k=hint)
qs = [[0] * 9 for i in range(9)]
for i, j in wanted:
qs[i][j] = board[i][j]
return qs
def valid(arr):
sudokuobj = Sudoku()
return sudokuobj.IsValidSudoku(arr)
def all():
board = [[0] * 9 for i in range(9)]
sudokuobj = Sudoku()
ans = sudokuobj.Sudoku_generator((board))
return question(ans), ans
| true | true |
1c4590d51df3d7bf9eea558bb224c176d93b580d | 4,832 | py | Python | fastmot/utils/visualization.py | rafcy/FastMOT | 9aee101b1ac83a5fea8cece1f8cfda8030adb743 | [
"MIT"
] | null | null | null | fastmot/utils/visualization.py | rafcy/FastMOT | 9aee101b1ac83a5fea8cece1f8cfda8030adb743 | [
"MIT"
] | null | null | null | fastmot/utils/visualization.py | rafcy/FastMOT | 9aee101b1ac83a5fea8cece1f8cfda8030adb743 | [
"MIT"
] | null | null | null | import colorsys
import numpy as np
import cv2
GOLDEN_RATIO = 0.618033988749895
def draw_tracks(frame, tracks, show_flow=False, show_cov=False):
for track in tracks:
draw_bbox(frame, track.tlbr, get_color(track.trk_id), 2, str(track.trk_id))
if show_flow:
draw_feature_match(frame, track.prev_keypoints, track.keypoints, (0, 255, 255))
if show_cov:
draw_covariance(frame, track.tlbr, track.state[1])
def draw_detections(frame, detections, color=(255, 255, 255), show_conf=False):
for det in detections:
text = f'{det.label}: {det.conf:.2f}' if show_conf else None
draw_bbox(frame, det.tlbr, color, 1, text)
def draw_klt_bboxes(frame, klt_bboxes, color=(0, 0, 0)):
for tlbr in klt_bboxes:
draw_bbox(frame, tlbr, color, 1)
def draw_tiles(frame, tiles, scale_factor, color=(0, 0, 0)):
for tile in tiles:
tlbr = np.rint(tile * np.tile(scale_factor, 2))
draw_bbox(frame, tlbr, color, 1)
def draw_background_flow(frame, prev_bg_keypoints, bg_keypoints, color=(0, 0, 255)):
draw_feature_match(frame, prev_bg_keypoints, bg_keypoints, color)
def get_color(idx, s=0.8, vmin=0.7):
h = np.fmod(idx * GOLDEN_RATIO, 1.)
v = 1. - np.fmod(idx * GOLDEN_RATIO, 1. - vmin)
r, g, b = colorsys.hsv_to_rgb(h, s, v)
return int(255 * b), int(255 * g), int(255 * r)
def draw_bbox(frame, tlbr, color, thickness, text=None):
tlbr = tlbr.astype(int)
tl, br = tuple(tlbr[:2]), tuple(tlbr[2:])
cv2.rectangle(frame, tl, br, color, thickness)
if text is not None:
(text_width, text_height), _ = cv2.getTextSize(text, cv2.FONT_HERSHEY_DUPLEX, 0.5, 1)
cv2.rectangle(frame, tl, (tl[0] + text_width - 1, tl[1] + text_height - 1),
color, cv2.FILLED)
cv2.putText(frame, text, (tl[0], tl[1] + text_height - 1), cv2.FONT_HERSHEY_DUPLEX,
0.5, 0, 1, cv2.LINE_AA)
def draw_feature_match(frame, prev_pts, cur_pts, color):
if len(cur_pts) > 0:
cur_pts = np.rint(cur_pts).astype(np.int32)
for pt in cur_pts:
cv2.circle(frame, tuple(pt), 1, color, cv2.FILLED)
if len(prev_pts) > 0:
prev_pts = np.rint(prev_pts).astype(np.int32)
for pt1, pt2 in zip(prev_pts, cur_pts):
cv2.line(frame, tuple(pt1), tuple(pt2), color, 1, cv2.LINE_AA)
def draw_covariance(frame, tlbr, covariance):
tlbr = tlbr.astype(int)
tl, br = tuple(tlbr[:2]), tuple(tlbr[2:])
def ellipse(cov):
vals, vecs = np.linalg.eigh(cov)
order = vals.argsort()[::-1]
# 95% confidence ellipse
vals, vecs = np.sqrt(vals[order] * 5.9915), vecs[:, order]
axes = int(vals[0] + 0.5), int(vals[1] + 0.5)
angle = np.degrees(np.arctan2(vecs[1, 0], vecs[0, 0]))
return axes, angle
axes, angle = ellipse(covariance[:2, :2])
cv2.ellipse(frame, tl, axes, angle, 0, 360, (255, 255, 255), 1, cv2.LINE_AA)
axes, angle = ellipse(covariance[2:4, 2:4])
cv2.ellipse(frame, br, axes, angle, 0, 360, (255, 255, 255), 1, cv2.LINE_AA)
class Visualizer:
def __init__(self,
draw_detections=False,
draw_confidence=False,
draw_covariance=False,
draw_klt=False,
draw_obj_flow=False,
draw_bg_flow=False):
"""Class for visualization.
Parameters
----------
draw_detections : bool, optional
Enable drawing detections.
draw_confidence : bool, optional
Enable drawing detection confidence, ignored if `draw_detections` is disabled.
draw_covariance : bool, optional
Enable drawing Kalman filter position covariance.
draw_klt : bool, optional
Enable drawing KLT bounding boxes.
draw_obj_flow : bool, optional
Enable drawing object flow matches.
draw_bg_flow : bool, optional
Enable drawing background flow matches.
"""
self.draw_detections = draw_detections
self.draw_confidence = draw_confidence
self.draw_covariance = draw_covariance
self.draw_klt = draw_klt
self.draw_obj_flow = draw_obj_flow
self.draw_bg_flow = draw_bg_flow
def render(self, frame, tracks, detections, klt_bboxes, prev_bg_keypoints, bg_keypoints):
"""Render visualizations onto the frame."""
draw_tracks(frame, tracks, show_flow=self.draw_obj_flow, show_cov=self.draw_covariance)
if self.draw_detections:
draw_detections(frame, detections, show_conf=self.draw_confidence)
if self.draw_klt:
draw_klt_bboxes(frame, klt_bboxes)
if self.draw_bg_flow:
draw_background_flow(frame, prev_bg_keypoints, bg_keypoints)
| 37.457364 | 95 | 0.627276 | import colorsys
import numpy as np
import cv2
GOLDEN_RATIO = 0.618033988749895
def draw_tracks(frame, tracks, show_flow=False, show_cov=False):
for track in tracks:
draw_bbox(frame, track.tlbr, get_color(track.trk_id), 2, str(track.trk_id))
if show_flow:
draw_feature_match(frame, track.prev_keypoints, track.keypoints, (0, 255, 255))
if show_cov:
draw_covariance(frame, track.tlbr, track.state[1])
def draw_detections(frame, detections, color=(255, 255, 255), show_conf=False):
for det in detections:
text = f'{det.label}: {det.conf:.2f}' if show_conf else None
draw_bbox(frame, det.tlbr, color, 1, text)
def draw_klt_bboxes(frame, klt_bboxes, color=(0, 0, 0)):
for tlbr in klt_bboxes:
draw_bbox(frame, tlbr, color, 1)
def draw_tiles(frame, tiles, scale_factor, color=(0, 0, 0)):
for tile in tiles:
tlbr = np.rint(tile * np.tile(scale_factor, 2))
draw_bbox(frame, tlbr, color, 1)
def draw_background_flow(frame, prev_bg_keypoints, bg_keypoints, color=(0, 0, 255)):
draw_feature_match(frame, prev_bg_keypoints, bg_keypoints, color)
def get_color(idx, s=0.8, vmin=0.7):
h = np.fmod(idx * GOLDEN_RATIO, 1.)
v = 1. - np.fmod(idx * GOLDEN_RATIO, 1. - vmin)
r, g, b = colorsys.hsv_to_rgb(h, s, v)
return int(255 * b), int(255 * g), int(255 * r)
def draw_bbox(frame, tlbr, color, thickness, text=None):
tlbr = tlbr.astype(int)
tl, br = tuple(tlbr[:2]), tuple(tlbr[2:])
cv2.rectangle(frame, tl, br, color, thickness)
if text is not None:
(text_width, text_height), _ = cv2.getTextSize(text, cv2.FONT_HERSHEY_DUPLEX, 0.5, 1)
cv2.rectangle(frame, tl, (tl[0] + text_width - 1, tl[1] + text_height - 1),
color, cv2.FILLED)
cv2.putText(frame, text, (tl[0], tl[1] + text_height - 1), cv2.FONT_HERSHEY_DUPLEX,
0.5, 0, 1, cv2.LINE_AA)
def draw_feature_match(frame, prev_pts, cur_pts, color):
if len(cur_pts) > 0:
cur_pts = np.rint(cur_pts).astype(np.int32)
for pt in cur_pts:
cv2.circle(frame, tuple(pt), 1, color, cv2.FILLED)
if len(prev_pts) > 0:
prev_pts = np.rint(prev_pts).astype(np.int32)
for pt1, pt2 in zip(prev_pts, cur_pts):
cv2.line(frame, tuple(pt1), tuple(pt2), color, 1, cv2.LINE_AA)
def draw_covariance(frame, tlbr, covariance):
tlbr = tlbr.astype(int)
tl, br = tuple(tlbr[:2]), tuple(tlbr[2:])
def ellipse(cov):
vals, vecs = np.linalg.eigh(cov)
order = vals.argsort()[::-1]
vals, vecs = np.sqrt(vals[order] * 5.9915), vecs[:, order]
axes = int(vals[0] + 0.5), int(vals[1] + 0.5)
angle = np.degrees(np.arctan2(vecs[1, 0], vecs[0, 0]))
return axes, angle
axes, angle = ellipse(covariance[:2, :2])
cv2.ellipse(frame, tl, axes, angle, 0, 360, (255, 255, 255), 1, cv2.LINE_AA)
axes, angle = ellipse(covariance[2:4, 2:4])
cv2.ellipse(frame, br, axes, angle, 0, 360, (255, 255, 255), 1, cv2.LINE_AA)
class Visualizer:
def __init__(self,
draw_detections=False,
draw_confidence=False,
draw_covariance=False,
draw_klt=False,
draw_obj_flow=False,
draw_bg_flow=False):
self.draw_detections = draw_detections
self.draw_confidence = draw_confidence
self.draw_covariance = draw_covariance
self.draw_klt = draw_klt
self.draw_obj_flow = draw_obj_flow
self.draw_bg_flow = draw_bg_flow
def render(self, frame, tracks, detections, klt_bboxes, prev_bg_keypoints, bg_keypoints):
draw_tracks(frame, tracks, show_flow=self.draw_obj_flow, show_cov=self.draw_covariance)
if self.draw_detections:
draw_detections(frame, detections, show_conf=self.draw_confidence)
if self.draw_klt:
draw_klt_bboxes(frame, klt_bboxes)
if self.draw_bg_flow:
draw_background_flow(frame, prev_bg_keypoints, bg_keypoints)
| true | true |
1c4591a6e22722c8a1760289f625d852a5960577 | 2,354 | py | Python | tests/io/simple_process.py | rajgiriUW/pyUSID | 064dcd81d9c42f4eb4782f0a41fd437b3f56f50c | [
"MIT"
] | 25 | 2018-07-11T21:43:56.000Z | 2021-11-17T11:40:00.000Z | tests/io/simple_process.py | rajgiriUW/pyUSID | 064dcd81d9c42f4eb4782f0a41fd437b3f56f50c | [
"MIT"
] | 62 | 2018-07-05T20:28:52.000Z | 2021-12-14T09:49:35.000Z | tests/io/simple_process.py | rajgiriUW/pyUSID | 064dcd81d9c42f4eb4782f0a41fd437b3f56f50c | [
"MIT"
] | 15 | 2019-03-27T22:28:47.000Z | 2021-01-03T20:23:42.000Z | """
Simple process class for purpose of testing.
Created on: Jul 19, 2019
Author: Emily Costa
"""
import h5py
from pyUSID.processing.process import Process
import numpy as np
from pyUSID import hdf_utils
import matplotlib.pyplot as plt
class SimpleProcess(Process):
def __init__(self, h5_main, verbose=True, **kwargs):
super(SimpleProcess, self).__init__(h5_main, verbose, **kwargs)
self.data = None
self.test_data = None
self.results = None
self.chunk_amount = 0
self.process_name = 'Simple_Process'
if self.verbose: print('Done with initializing book-keepings')
def test(self):
if self.mpi_rank > 0:
return
ran_ind = np.random.randint(0, high=self.h5_main.shape[0])
self.test_data = np.fft.fftshift(np.fft.fft(self.h5_main[ran_ind]))
def _create_results_datasets(self):
self.h5_results_grp = hdf_utils.create_results_group(self.h5_main, self.process_name)
assert isinstance(self.h5_results_grp, h5py.Group)
if self.verbose: print('Results group created.')
self.results = hdf_utils.create_empty_dataset(self.h5_main, self.h5_main.dtype, 'Filtered_Data',
h5_group=self.h5_results_grp)
#self.results = hdf_utils.write_main_dataset(self.h5_results_grp, (self.h5_main.shape[0], 1), "Results", "Results", "Units", None,
#usid.io.write_utils.Dimension('arb', '', [1]), h5_pos_inds=self.h5_main.h5_pos_inds, h5_pos_vals=self.h5_main.h5_pos_vals, dtype=np.float32)
if self.verbose: print('Empty main dataset for results written')
def _write_results_chunk(self):
pos_in_batch = self._get_pixels_in_current_batch()
print(type(self.data))
print(type(self.results))
self.results[pos_in_batch, :] = self.data
#self.results = self.h5_results_grp['Simple_Data']
self.chunk_amount = self.chunk_amount + 1
if self.verbose: print('Chunk {} written.'.format(self.chunk_amount))
def _unit_computation(self):
self.data = np.fft.fftshift(np.fft.fft(self.data, axis=1), axes=1)
def plot_test(self):
fig, axis = plt.subplots()
axis.plot(self.test_data)
plt.savefig('test_partial.png')
if self.verbose: print('Test image created.')
| 39.898305 | 149 | 0.666525 |
import h5py
from pyUSID.processing.process import Process
import numpy as np
from pyUSID import hdf_utils
import matplotlib.pyplot as plt
class SimpleProcess(Process):
def __init__(self, h5_main, verbose=True, **kwargs):
super(SimpleProcess, self).__init__(h5_main, verbose, **kwargs)
self.data = None
self.test_data = None
self.results = None
self.chunk_amount = 0
self.process_name = 'Simple_Process'
if self.verbose: print('Done with initializing book-keepings')
def test(self):
if self.mpi_rank > 0:
return
ran_ind = np.random.randint(0, high=self.h5_main.shape[0])
self.test_data = np.fft.fftshift(np.fft.fft(self.h5_main[ran_ind]))
def _create_results_datasets(self):
self.h5_results_grp = hdf_utils.create_results_group(self.h5_main, self.process_name)
assert isinstance(self.h5_results_grp, h5py.Group)
if self.verbose: print('Results group created.')
self.results = hdf_utils.create_empty_dataset(self.h5_main, self.h5_main.dtype, 'Filtered_Data',
h5_group=self.h5_results_grp)
if self.verbose: print('Empty main dataset for results written')
def _write_results_chunk(self):
pos_in_batch = self._get_pixels_in_current_batch()
print(type(self.data))
print(type(self.results))
self.results[pos_in_batch, :] = self.data
self.chunk_amount = self.chunk_amount + 1
if self.verbose: print('Chunk {} written.'.format(self.chunk_amount))
def _unit_computation(self):
self.data = np.fft.fftshift(np.fft.fft(self.data, axis=1), axes=1)
def plot_test(self):
fig, axis = plt.subplots()
axis.plot(self.test_data)
plt.savefig('test_partial.png')
if self.verbose: print('Test image created.')
| true | true |
1c4591b85ef0cb783c72ba1b6a6beb97dbfb0aa3 | 2,482 | py | Python | pysnmp/CISCO-SCTP-CAPABILITY.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 11 | 2021-02-02T16:27:16.000Z | 2021-08-31T06:22:49.000Z | pysnmp/CISCO-SCTP-CAPABILITY.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 75 | 2021-02-24T17:30:31.000Z | 2021-12-08T00:01:18.000Z | pysnmp/CISCO-SCTP-CAPABILITY.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module CISCO-SCTP-CAPABILITY (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-SCTP-CAPABILITY
# Produced by pysmi-0.3.4 at Mon Apr 29 17:54:50 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ValueSizeConstraint, SingleValueConstraint, ConstraintsUnion, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsUnion", "ValueRangeConstraint")
ciscoAgentCapability, = mibBuilder.importSymbols("CISCO-SMI", "ciscoAgentCapability")
AgentCapabilities, NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "AgentCapabilities", "NotificationGroup", "ModuleCompliance")
Counter64, MibScalar, MibTable, MibTableRow, MibTableColumn, ObjectIdentity, ModuleIdentity, Bits, MibIdentifier, Gauge32, TimeTicks, NotificationType, iso, IpAddress, Unsigned32, Counter32, Integer32 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter64", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ObjectIdentity", "ModuleIdentity", "Bits", "MibIdentifier", "Gauge32", "TimeTicks", "NotificationType", "iso", "IpAddress", "Unsigned32", "Counter32", "Integer32")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
ceSctpCapability = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 7, 190))
ceSctpCapability.setRevisions(('2001-06-05 00:00',))
if mibBuilder.loadTexts: ceSctpCapability.setLastUpdated('200106050000Z')
if mibBuilder.loadTexts: ceSctpCapability.setOrganization('Cisco Systems, Inc.')
ceSctpCapabilityV12R021MB1 = AgentCapabilities((1, 3, 6, 1, 4, 1, 9, 7, 190, 1))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ceSctpCapabilityV12R021MB1 = ceSctpCapabilityV12R021MB1.setProductRelease('Cisco IOS 12.2(1)MB1')
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ceSctpCapabilityV12R021MB1 = ceSctpCapabilityV12R021MB1.setStatus('current')
mibBuilder.exportSymbols("CISCO-SCTP-CAPABILITY", ceSctpCapability=ceSctpCapability, ceSctpCapabilityV12R021MB1=ceSctpCapabilityV12R021MB1, PYSNMP_MODULE_ID=ceSctpCapability)
| 99.28 | 477 | 0.787671 |
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ValueSizeConstraint, SingleValueConstraint, ConstraintsUnion, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsUnion", "ValueRangeConstraint")
ciscoAgentCapability, = mibBuilder.importSymbols("CISCO-SMI", "ciscoAgentCapability")
AgentCapabilities, NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "AgentCapabilities", "NotificationGroup", "ModuleCompliance")
Counter64, MibScalar, MibTable, MibTableRow, MibTableColumn, ObjectIdentity, ModuleIdentity, Bits, MibIdentifier, Gauge32, TimeTicks, NotificationType, iso, IpAddress, Unsigned32, Counter32, Integer32 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter64", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ObjectIdentity", "ModuleIdentity", "Bits", "MibIdentifier", "Gauge32", "TimeTicks", "NotificationType", "iso", "IpAddress", "Unsigned32", "Counter32", "Integer32")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
ceSctpCapability = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 7, 190))
ceSctpCapability.setRevisions(('2001-06-05 00:00',))
if mibBuilder.loadTexts: ceSctpCapability.setLastUpdated('200106050000Z')
if mibBuilder.loadTexts: ceSctpCapability.setOrganization('Cisco Systems, Inc.')
ceSctpCapabilityV12R021MB1 = AgentCapabilities((1, 3, 6, 1, 4, 1, 9, 7, 190, 1))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ceSctpCapabilityV12R021MB1 = ceSctpCapabilityV12R021MB1.setProductRelease('Cisco IOS 12.2(1)MB1')
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ceSctpCapabilityV12R021MB1 = ceSctpCapabilityV12R021MB1.setStatus('current')
mibBuilder.exportSymbols("CISCO-SCTP-CAPABILITY", ceSctpCapability=ceSctpCapability, ceSctpCapabilityV12R021MB1=ceSctpCapabilityV12R021MB1, PYSNMP_MODULE_ID=ceSctpCapability)
| true | true |
1c45922460b3274c214c472b39912156f5a9ae77 | 1,632 | py | Python | game/startMk2.py | Penniling/launchpad-dont-choose-the-wrong | 490e814a531168ae3b4cbbd0db89a9887b5d0bb3 | [
"MIT"
] | null | null | null | game/startMk2.py | Penniling/launchpad-dont-choose-the-wrong | 490e814a531168ae3b4cbbd0db89a9887b5d0bb3 | [
"MIT"
] | null | null | null | game/startMk2.py | Penniling/launchpad-dont-choose-the-wrong | 490e814a531168ae3b4cbbd0db89a9887b5d0bb3 | [
"MIT"
] | null | null | null | import LaunchpadMk2
import atexit
import os
import random
def on_exit():
os.system(f"python {os.getcwd()}/startMk2.py")
class Game:
def __init__(self):
self.n = int(input("Please choose a number of wrong pads: "))
self.lp = LaunchpadMk2.LaunchpadMk2()
self.lp.Reset()
self.lp.register_on_button_press(on_button=self.on_button_press)
self.wrong = []
self.pres = []
self.isDead = False
self.start_game()
def start_game(self):
self.lp.LedAllOn(colorcode=self.lp.COLORS["green"])
for i in range(self.n):
x = (random.randint(0, 7), random.randint(1, 8))
while x in self.wrong:
x = (random.randint(0, 7), random.randint(1, 8))
self.wrong.append(x)
while len(self.pres) <= 63:
pass
self.on_win()
def on_button_press(self, x, y, pres):
if pres > 0 and (x, y) != self.pres:
if (x, y) in self.wrong:
self.on_death()
else:
self.pres.append((x, y))
self.lp.LedCtrlXY(x, y, 0, 0, 0)
def on_win(self):
self.lp.Reset()
self.lp.LedCtrlString("Win", 0, 255, 0, direction=self.lp.SCROLL_LEFT, waitms=50)
self.lp.continue_listener = False
self.lp.Close()
exit()
def on_death(self):
self.lp.Reset()
for i in self.wrong:
self.lp.LedCtrlXY(i[0], i[1], 255, 0, 0)
self.lp.continue_listener = False
self.lp.Close()
exit()
if __name__ == "__main__":
atexit.register(on_exit)
Game()
| 26.754098 | 89 | 0.550858 | import LaunchpadMk2
import atexit
import os
import random
def on_exit():
os.system(f"python {os.getcwd()}/startMk2.py")
class Game:
def __init__(self):
self.n = int(input("Please choose a number of wrong pads: "))
self.lp = LaunchpadMk2.LaunchpadMk2()
self.lp.Reset()
self.lp.register_on_button_press(on_button=self.on_button_press)
self.wrong = []
self.pres = []
self.isDead = False
self.start_game()
def start_game(self):
self.lp.LedAllOn(colorcode=self.lp.COLORS["green"])
for i in range(self.n):
x = (random.randint(0, 7), random.randint(1, 8))
while x in self.wrong:
x = (random.randint(0, 7), random.randint(1, 8))
self.wrong.append(x)
while len(self.pres) <= 63:
pass
self.on_win()
def on_button_press(self, x, y, pres):
if pres > 0 and (x, y) != self.pres:
if (x, y) in self.wrong:
self.on_death()
else:
self.pres.append((x, y))
self.lp.LedCtrlXY(x, y, 0, 0, 0)
def on_win(self):
self.lp.Reset()
self.lp.LedCtrlString("Win", 0, 255, 0, direction=self.lp.SCROLL_LEFT, waitms=50)
self.lp.continue_listener = False
self.lp.Close()
exit()
def on_death(self):
self.lp.Reset()
for i in self.wrong:
self.lp.LedCtrlXY(i[0], i[1], 255, 0, 0)
self.lp.continue_listener = False
self.lp.Close()
exit()
if __name__ == "__main__":
atexit.register(on_exit)
Game()
| true | true |
1c4592dbfd3957588d06fd935ce4c485dc1377a0 | 7,268 | py | Python | pennylane/interfaces/batch/tensorflow.py | ral9000/pennylane | 0afbd155d044730af546c6d90cef9d01f931632d | [
"Apache-2.0"
] | 712 | 2020-07-29T03:46:52.000Z | 2022-03-27T11:21:51.000Z | pennylane/interfaces/batch/tensorflow.py | ral9000/pennylane | 0afbd155d044730af546c6d90cef9d01f931632d | [
"Apache-2.0"
] | 1,627 | 2020-07-28T13:07:58.000Z | 2022-03-31T21:47:29.000Z | pennylane/interfaces/batch/tensorflow.py | ral9000/pennylane | 0afbd155d044730af546c6d90cef9d01f931632d | [
"Apache-2.0"
] | 249 | 2020-07-29T03:26:18.000Z | 2022-03-31T19:59:48.000Z | # Copyright 2018-2021 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module contains functions for adding the TensorFlow interface
to a PennyLane Device class.
"""
# pylint: disable=too-many-arguments,too-many-branches
import numpy as np
import tensorflow as tf
from tensorflow.python.eager import context
import pennylane as qml
def _compute_vjp(dy, jacs):
# compute the vector-Jacobian product dy @ jac
# for a list of dy's and Jacobian matrices.
vjps = []
for d, jac in zip(dy, jacs):
vjp = qml.gradients.compute_vjp(d, jac)
if not context.executing_eagerly():
vjp = qml.math.unstack(vjp)
vjps.extend(vjp)
return vjps
def execute(tapes, device, execute_fn, gradient_fn, gradient_kwargs, _n=1, max_diff=2):
"""Execute a batch of tapes with TensorFlow parameters on a device.
Args:
tapes (Sequence[.QuantumTape]): batch of tapes to execute
device (.Device): Device to use to execute the batch of tapes.
If the device does not provide a ``batch_execute`` method,
by default the tapes will be executed in serial.
execute_fn (callable): The execution function used to execute the tapes
during the forward pass. This function must return a tuple ``(results, jacobians)``.
If ``jacobians`` is an empty list, then ``gradient_fn`` is used to
compute the gradients during the backwards pass.
gradient_kwargs (dict): dictionary of keyword arguments to pass when
determining the gradients of tapes
gradient_fn (callable): the gradient function to use to compute quantum gradients
_n (int): a positive integer used to track nesting of derivatives, for example
if the nth-order derivative is requested.
max_diff (int): If ``gradient_fn`` is a gradient transform, this option specifies
the maximum number of derivatives to support. Increasing this value allows
for higher order derivatives to be extracted, at the cost of additional
(classical) computational overhead during the backwards pass.
Returns:
list[list[tf.Tensor]]: A nested list of tape results. Each element in
the returned list corresponds in order to the provided tapes.
"""
parameters = []
params_unwrapped = []
for i, tape in enumerate(tapes):
# store the trainable parameters
params = tape.get_parameters(trainable_only=False)
tape.trainable_params = qml.math.get_trainable_indices(params)
parameters += [p for i, p in enumerate(params) if i in tape.trainable_params]
# store all unwrapped parameters
params_unwrapped.append(
[i.numpy() if isinstance(i, (tf.Variable, tf.Tensor)) else i for i in params]
)
with qml.tape.Unwrap(*tapes, set_trainable=False):
# Forward pass: execute the tapes
res, jacs = execute_fn(tapes, **gradient_kwargs)
for i, tape in enumerate(tapes):
# convert output to TensorFlow tensors
r = np.hstack(res[i]) if res[i].dtype == np.dtype("object") else res[i]
res[i] = tf.convert_to_tensor(r)
@tf.custom_gradient
def _execute(*parameters): # pylint:disable=unused-argument
def grad_fn(*dy, **tfkwargs):
"""Returns the vector-Jacobian product with given
parameter values and output gradient dy"""
dy = [qml.math.T(d) for d in dy]
if jacs:
# Jacobians were computed on the forward pass (mode="forward")
# No additional quantum evaluations needed; simply compute the VJPs directly.
vjps = _compute_vjp(dy, jacs)
else:
# Need to compute the Jacobians on the backward pass (accumulation="backward")
if isinstance(gradient_fn, qml.gradients.gradient_transform):
# Gradient function is a gradient transform.
# Generate and execute the required gradient tapes
if _n == max_diff or not context.executing_eagerly():
with qml.tape.Unwrap(*tapes, params=params_unwrapped, set_trainable=False):
vjp_tapes, processing_fn = qml.gradients.batch_vjp(
tapes,
dy,
gradient_fn,
reduction=lambda vjps, x: vjps.extend(qml.math.unstack(x)),
gradient_kwargs=gradient_kwargs,
)
vjps = processing_fn(execute_fn(vjp_tapes)[0])
else:
vjp_tapes, processing_fn = qml.gradients.batch_vjp(
tapes,
dy,
gradient_fn,
reduction="extend",
gradient_kwargs=gradient_kwargs,
)
# This is where the magic happens. Note that we call ``execute``.
# This recursion, coupled with the fact that the gradient transforms
# are differentiable, allows for arbitrary order differentiation.
vjps = processing_fn(
execute(
vjp_tapes,
device,
execute_fn,
gradient_fn,
gradient_kwargs,
_n=_n + 1,
max_diff=max_diff,
)
)
else:
# Gradient function is not a gradient transform
# (e.g., it might be a device method).
# Note that unlike the previous branch:
#
# - there is no recursion here
# - gradient_fn is not differentiable
#
# so we cannot support higher-order derivatives.
with qml.tape.Unwrap(*tapes, params=params_unwrapped, set_trainable=False):
vjps = _compute_vjp(dy, gradient_fn(tapes, **gradient_kwargs))
variables = tfkwargs.get("variables", None)
return (vjps, variables) if variables is not None else vjps
return res, grad_fn
return _execute(*parameters)
| 43.261905 | 100 | 0.569895 |
import numpy as np
import tensorflow as tf
from tensorflow.python.eager import context
import pennylane as qml
def _compute_vjp(dy, jacs):
vjps = []
for d, jac in zip(dy, jacs):
vjp = qml.gradients.compute_vjp(d, jac)
if not context.executing_eagerly():
vjp = qml.math.unstack(vjp)
vjps.extend(vjp)
return vjps
def execute(tapes, device, execute_fn, gradient_fn, gradient_kwargs, _n=1, max_diff=2):
parameters = []
params_unwrapped = []
for i, tape in enumerate(tapes):
# store the trainable parameters
params = tape.get_parameters(trainable_only=False)
tape.trainable_params = qml.math.get_trainable_indices(params)
parameters += [p for i, p in enumerate(params) if i in tape.trainable_params]
# store all unwrapped parameters
params_unwrapped.append(
[i.numpy() if isinstance(i, (tf.Variable, tf.Tensor)) else i for i in params]
)
with qml.tape.Unwrap(*tapes, set_trainable=False):
# Forward pass: execute the tapes
res, jacs = execute_fn(tapes, **gradient_kwargs)
for i, tape in enumerate(tapes):
# convert output to TensorFlow tensors
r = np.hstack(res[i]) if res[i].dtype == np.dtype("object") else res[i]
res[i] = tf.convert_to_tensor(r)
@tf.custom_gradient
def _execute(*parameters): # pylint:disable=unused-argument
def grad_fn(*dy, **tfkwargs):
dy = [qml.math.T(d) for d in dy]
if jacs:
# Jacobians were computed on the forward pass (mode="forward")
# No additional quantum evaluations needed; simply compute the VJPs directly.
vjps = _compute_vjp(dy, jacs)
else:
# Need to compute the Jacobians on the backward pass (accumulation="backward")
if isinstance(gradient_fn, qml.gradients.gradient_transform):
# Gradient function is a gradient transform.
# Generate and execute the required gradient tapes
if _n == max_diff or not context.executing_eagerly():
with qml.tape.Unwrap(*tapes, params=params_unwrapped, set_trainable=False):
vjp_tapes, processing_fn = qml.gradients.batch_vjp(
tapes,
dy,
gradient_fn,
reduction=lambda vjps, x: vjps.extend(qml.math.unstack(x)),
gradient_kwargs=gradient_kwargs,
)
vjps = processing_fn(execute_fn(vjp_tapes)[0])
else:
vjp_tapes, processing_fn = qml.gradients.batch_vjp(
tapes,
dy,
gradient_fn,
reduction="extend",
gradient_kwargs=gradient_kwargs,
)
# This is where the magic happens. Note that we call ``execute``.
# This recursion, coupled with the fact that the gradient transforms
# are differentiable, allows for arbitrary order differentiation.
vjps = processing_fn(
execute(
vjp_tapes,
device,
execute_fn,
gradient_fn,
gradient_kwargs,
_n=_n + 1,
max_diff=max_diff,
)
)
else:
# Gradient function is not a gradient transform
# (e.g., it might be a device method).
# Note that unlike the previous branch:
#
# - there is no recursion here
# - gradient_fn is not differentiable
#
# so we cannot support higher-order derivatives.
with qml.tape.Unwrap(*tapes, params=params_unwrapped, set_trainable=False):
vjps = _compute_vjp(dy, gradient_fn(tapes, **gradient_kwargs))
variables = tfkwargs.get("variables", None)
return (vjps, variables) if variables is not None else vjps
return res, grad_fn
return _execute(*parameters)
| true | true |
1c459309ba1a81398fc095a2ca8f6f6f4053e120 | 990 | py | Python | linkv_sdk/config/bindings/ffi.py | linkv-io/python2-sdk | 45699372ffcf6e3e745d870cfca004fc885ee15f | [
"Apache-2.0"
] | null | null | null | linkv_sdk/config/bindings/ffi.py | linkv-io/python2-sdk | 45699372ffcf6e3e745d870cfca004fc885ee15f | [
"Apache-2.0"
] | null | null | null | linkv_sdk/config/bindings/ffi.py | linkv-io/python2-sdk | 45699372ffcf6e3e745d870cfca004fc885ee15f | [
"Apache-2.0"
] | null | null | null | # -*- coding: UTF-8 -*-
import platform
import os
from requests import get
from tempfile import gettempdir
from ctypes import CDLL
def _platform_file(name):
ext = ''
if platform.uname()[0] == "Linux":
ext = 'so'
elif platform.uname()[0] == "Darwin":
ext = 'dylib'
elif platform.uname()[0] == "Windows":
ext = 'dll'
return "lib{}.{}".format(name, ext)
def dlopen_platform_specific(name, path):
return CDLL('{}/{}'.format(gettempdir() if path == "" else path, _platform_file(name)))
DownloadURL = 'http://dl.linkv.fun/static/server'
def download(name, path, version):
filepath = '{}/{}'.format(gettempdir() if path == "" else path, _platform_file(name))
if os.path.exists(filepath):
return True
r = get('{}/{}/{}'.format(DownloadURL, version, _platform_file(name)))
if r.status_code != 200:
return False
with open(filepath, 'wb') as f:
f.write(r.content)
r.close()
return True
| 22 | 91 | 0.611111 |
import platform
import os
from requests import get
from tempfile import gettempdir
from ctypes import CDLL
def _platform_file(name):
ext = ''
if platform.uname()[0] == "Linux":
ext = 'so'
elif platform.uname()[0] == "Darwin":
ext = 'dylib'
elif platform.uname()[0] == "Windows":
ext = 'dll'
return "lib{}.{}".format(name, ext)
def dlopen_platform_specific(name, path):
return CDLL('{}/{}'.format(gettempdir() if path == "" else path, _platform_file(name)))
DownloadURL = 'http://dl.linkv.fun/static/server'
def download(name, path, version):
filepath = '{}/{}'.format(gettempdir() if path == "" else path, _platform_file(name))
if os.path.exists(filepath):
return True
r = get('{}/{}/{}'.format(DownloadURL, version, _platform_file(name)))
if r.status_code != 200:
return False
with open(filepath, 'wb') as f:
f.write(r.content)
r.close()
return True
| true | true |
1c4595dae899b6160a00fb35d2139755cf007c2b | 2,254 | py | Python | backend/pyrogram/raw/functions/messages/get_attached_stickers.py | appheap/social-media-analyzer | 0f9da098bfb0b4f9eb38e0244aa3a168cf97d51c | [
"Apache-2.0"
] | 5 | 2021-09-11T22:01:15.000Z | 2022-03-16T21:33:42.000Z | backend/pyrogram/raw/functions/messages/get_attached_stickers.py | iamatlasss/social-media-analyzer | 429d1d2bbd8bfce80c50c5f8edda58f87ace668d | [
"Apache-2.0"
] | null | null | null | backend/pyrogram/raw/functions/messages/get_attached_stickers.py | iamatlasss/social-media-analyzer | 429d1d2bbd8bfce80c50c5f8edda58f87ace668d | [
"Apache-2.0"
] | 3 | 2022-01-18T11:06:22.000Z | 2022-02-26T13:39:28.000Z | # Pyrogram - Telegram MTProto API Client Library for Python
# Copyright (C) 2017-2021 Dan <https://github.com/delivrance>
#
# This file is part of Pyrogram.
#
# Pyrogram is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pyrogram is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Pyrogram. If not, see <http://www.gnu.org/licenses/>.
from io import BytesIO
from pyrogram.raw.core.primitives import Int, Long, Int128, Int256, Bool, Bytes, String, Double, Vector
from pyrogram.raw.core import TLObject
from pyrogram import raw
from typing import List, Union, Any
# # # # # # # # # # # # # # # # # # # # # # # #
# !!! WARNING !!! #
# This is a generated file! #
# All changes made in this file will be lost! #
# # # # # # # # # # # # # # # # # # # # # # # #
class GetAttachedStickers(TLObject): # type: ignore
"""Telegram API method.
Details:
- Layer: ``123``
- ID: ``0xcc5b67cc``
Parameters:
media: :obj:`InputStickeredMedia <pyrogram.raw.base.InputStickeredMedia>`
Returns:
List of :obj:`StickerSetCovered <pyrogram.raw.base.StickerSetCovered>`
"""
__slots__: List[str] = ["media"]
ID = 0xcc5b67cc
QUALNAME = "functions.messages.GetAttachedStickers"
def __init__(self, *, media: "raw.base.InputStickeredMedia") -> None:
self.media = media # InputStickeredMedia
@staticmethod
def read(data: BytesIO, *args: Any) -> "GetAttachedStickers":
# No flags
media = TLObject.read(data)
return GetAttachedStickers(media=media)
def write(self) -> bytes:
data = BytesIO()
data.write(Int(self.ID, False))
# No flags
data.write(self.media.write())
return data.getvalue()
| 31.305556 | 103 | 0.645519 |
from io import BytesIO
from pyrogram.raw.core.primitives import Int, Long, Int128, Int256, Bool, Bytes, String, Double, Vector
from pyrogram.raw.core import TLObject
from pyrogram import raw
from typing import List, Union, Any
| true | true |
1c459686e0c6196509dccaf4fcbecf5fdc393fc7 | 41,713 | py | Python | xform/models.py | alisonamerico/Django-XForm | ad2e96455307b57ef3c485a006db478fe4352a36 | [
"MIT"
] | 3 | 2019-07-25T14:46:14.000Z | 2020-12-14T22:43:46.000Z | xform/models.py | alisonamerico/Django-XForm | ad2e96455307b57ef3c485a006db478fe4352a36 | [
"MIT"
] | 4 | 2019-09-04T17:39:04.000Z | 2021-11-05T23:14:58.000Z | xform/models.py | alisonamerico/Django-XForm | ad2e96455307b57ef3c485a006db478fe4352a36 | [
"MIT"
] | 1 | 2021-11-05T23:05:48.000Z | 2021-11-05T23:05:48.000Z | import csv
import json
import mimetypes
import os
import random
import re
import requests
import xlrd
from contextlib import closing
from hashlib import md5
from io import BytesIO
from io import StringIO
from pyxform import SurveyElementBuilder
from pyxform.builder import create_survey_element_from_dict
from pyxform.utils import has_external_choices
from pyxform.xform2json import create_survey_element_from_xml
from pyxform.xls2json import parse_file_to_json
from xml.dom import Node
from django.conf import settings
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.fields import GenericRelation
from django.contrib.contenttypes.models import ContentType
from django.contrib.gis.db import models
from django.contrib.gis.geos import GeometryCollection, Point
from django.core.exceptions import ValidationError
from django.core.files.temp import NamedTemporaryFile
from django.core.files.uploadedfile import InMemoryUploadedFile
from django.core.validators import URLValidator
from django.db.models.signals import post_save
from django.utils import timezone
from .tags import (
UUID, ID, ATTACHMENTS, STATUS, NOTES, VERSION, DURATION, XFORM_ID_STRING,
XFORM_ID, GEOLOCATION, SUBMITTED_BY, SUBMISSION_TIME, TOTAL_MEDIA,
MEDIA_COUNT, MEDIA_ALL_RECEIVED, EDITED, LAST_EDITED, KNOWN_MEDIA_TYPES,
START, END
)
from .utils import (
get_values_matching_key, get_uuid_from_xml, set_uuid, XFormInstanceParser,
clean_and_parse_xml, get_numeric_fields, numeric_checker,
_get_tag_or_element_type_xpath, calculate_duration
)
if 'postg' in settings.DATABASES['default']['ENGINE']:
from django.contrib.postgres.fields import JSONField
else:
from jsonfield import JSONField
CHUNK_SIZE = 1024
XFORM_TITLE_LENGTH = 255
title_pattern = re.compile(r"<h:title>(.*?)</h:title>")
def contains_xml_invalid_char(text, invalids=['&', '>', '<']):
"""Check whether 'text' contains ANY invalid xml chars"""
return 1 in [c in text for c in invalids]
def convert_to_serializable_date(date):
if hasattr(date, 'isoformat'):
return date.isoformat()
return date
def _get_attachments_from_instance(instance):
attachments = []
for a in instance.attachments.all():
attachment = dict()
attachment['download_url'] = a.media_file.url
attachment['small_download_url'] = a.media_file.url
attachment['medium_download_url'] = a.media_file.url
attachment['mimetype'] = a.mimetype
attachment['filename'] = a.media_file.name
attachment['name'] = a.name
attachment['instance'] = a.instance.pk
attachment['xform'] = instance.xform.id
attachment['id'] = a.id
attachments.append(attachment)
return attachments
def get_default_content_type():
content_object, created = ContentType.objects.get_or_create(
app_label="xform", model='xform')
return content_object.id
def upload_to(instance, filename):
try:
return os.path.join(
instance.user.username, 'xls',
os.path.split(filename)[1])
except Exception:
folder = "{}_{}".format(instance.instance.xform.id,
instance.instance.xform.id_string)
return os.path.join(
instance.instance.xform.user.username, 'attachments', folder,
os.path.split(filename)[1])
class XLSFormError(Exception):
pass
class FormInactiveError(Exception):
pass
class XForm(models.Model):
dynamic_choices = True
xls = models.FileField(upload_to=upload_to, null=True)
json = models.TextField(default=u'')
description = models.TextField(default=u'', null=True, blank=True)
xml = models.TextField()
user = models.ForeignKey(
settings.AUTH_USER_MODEL, related_name='xforms', null=True, on_delete=models.CASCADE)
id_string = models.SlugField(
editable=False,
verbose_name="ID",
max_length=100)
title = models.CharField(editable=False, max_length=255)
date_created = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
last_submission_time = models.DateTimeField(blank=True, null=True)
has_start_time = models.BooleanField(default=False)
uuid = models.CharField(max_length=36, default=u'')
uuid_regex = re.compile(r'(<instance>.*?id="[^"]+">)(.*</instance>)(.*)',
re.DOTALL)
instance_id_regex = re.compile(r'<instance>.*?id="([^"]+)".*</instance>',
re.DOTALL)
instances_with_geopoints = models.BooleanField(default=False)
num_of_submissions = models.IntegerField(default=0)
version = models.CharField(
max_length=255, null=True, blank=True)
created_by = models.ForeignKey(
settings.AUTH_USER_MODEL, null=True, blank=True, on_delete=models.CASCADE)
metadata_set = GenericRelation(
'MetaData',
content_type_field='content_type_id',
object_id_field="object_id")
has_hxl_support = models.BooleanField(default=False)
last_updated_at = models.DateTimeField(auto_now=True)
hash = models.CharField("Hash", max_length=36, blank=True, null=True,
default=None)
class Meta:
unique_together = ("user", "id_string",)
verbose_name = "XForm"
verbose_name_plural = "XForms"
ordering = ("pk", )
def get_osm_survey_xpaths(self):
"""
Returns abbreviated_xpath for OSM question types in the survey.
"""
return [
elem.get_abbreviated_xpath()
for elem in self.get_survey_elements_of_type('osm')]
def get_media_survey_xpaths(self):
return [
e.get_abbreviated_xpath()
for e in sum([
self.get_survey_elements_of_type(m) for m in KNOWN_MEDIA_TYPES
], [])
]
def file_name(self):
return self.id_string + ".xml"
def get_survey_elements_of_type(self, element_type):
return [
e for e in self.get_survey_elements() if e.type == element_type
]
def _set_uuid_in_xml(self, file_name=None):
"""
Add bind to automatically set UUID node in XML.
"""
if not file_name:
file_name = self.file_name()
file_name, file_ext = os.path.splitext(file_name)
doc = clean_and_parse_xml(self.xml)
model_nodes = doc.getElementsByTagName("model")
if len(model_nodes) != 1:
raise Exception(u"xml contains multiple model nodes")
model_node = model_nodes[0]
instance_nodes = [
node for node in model_node.childNodes
if node.nodeType == Node.ELEMENT_NODE and
node.tagName.lower() == "instance" and not node.hasAttribute("id")
]
if len(instance_nodes) != 1:
raise Exception("Multiple instance nodes without the id "
"attribute, can't tell which is the main one")
instance_node = instance_nodes[0]
# get the first child whose id attribute matches our id_string
survey_nodes = [
node for node in instance_node.childNodes
if node.nodeType == Node.ELEMENT_NODE and
(node.tagName == file_name or node.attributes.get('id'))
]
if len(survey_nodes) != 1:
raise Exception(
"Multiple survey nodes with the id '%s'" % self.id_string)
survey_node = survey_nodes[0]
formhub_nodes = [
n for n in survey_node.childNodes
if n.nodeType == Node.ELEMENT_NODE and n.tagName == "formhub"
]
if len(formhub_nodes) > 1:
raise Exception(
"Multiple formhub nodes within main instance node")
elif len(formhub_nodes) == 1:
formhub_node = formhub_nodes[0]
else:
formhub_node = survey_node.insertBefore(
doc.createElement("formhub"), survey_node.firstChild)
uuid_nodes = [
node for node in formhub_node.childNodes
if node.nodeType == Node.ELEMENT_NODE and node.tagName == "uuid"
]
if len(uuid_nodes) == 0:
formhub_node.appendChild(doc.createElement("uuid"))
if len(formhub_nodes) == 0:
# append the calculate bind node
calculate_node = doc.createElement("bind")
calculate_node.setAttribute(
"nodeset", "/%s/formhub/uuid" % survey_node.tagName)
calculate_node.setAttribute("type", "string")
calculate_node.setAttribute("calculate", "'%s'" % self.uuid)
model_node.appendChild(calculate_node)
self.xml = doc.toprettyxml(indent=" ", encoding='utf-8')
# hack
# http://ronrothman.com/public/leftbraned/xml-dom-minidom-toprettyxml-\
# and-silly-whitespace/
text_re = re.compile('(>)\n\s*(\s[^<>\s].*?)\n\s*(\s</)', re.DOTALL)
output_re = re.compile('\n.*(<output.*>)\n( )*')
pretty_xml = text_re.sub(lambda m: ''.join(m.group(1, 2, 3)),
self.xml.decode('utf-8'))
inline_output = output_re.sub('\g<1>', pretty_xml)
inline_output = re.compile('<label>\s*\n*\s*\n*\s*</label>').sub(
'<label></label>', inline_output)
self.xml = inline_output
def _mark_start_time_boolean(self):
starttime_substring = 'jr:preloadParams="start"'
if self.xml.find(starttime_substring) != -1:
self.has_start_time = True
else:
self.has_start_time = False
def _id_string_already_exists_in_account(self, id_string):
try:
XForm.objects.get(id_string__iexact=id_string)
except XForm.DoesNotExist:
return False
return True
def get_unique_id_string(self, id_string, count=0):
# used to generate a new id_string for new data_dictionary object if
# id_string already existed
if self._id_string_already_exists_in_account(id_string):
if count != 0:
if re.match(r'\w+_\d+$', id_string):
a = id_string.split('_')
id_string = "_".join(a[:-1])
count += 1
id_string = "{}_{}".format(id_string, count)
return self.get_unique_id_string(id_string, count)
return id_string
def _set_title(self):
xml = re.sub(r"\s+", " ", self.xml)
matches = title_pattern.findall(xml)
if len(matches) != 1:
raise XLSFormError(("There should be a single title."), matches)
if matches:
title_xml = matches[0][:XFORM_TITLE_LENGTH]
else:
title_xml = self.title[:XFORM_TITLE_LENGTH] if self.title else ''
if self.title and title_xml != self.title:
title_xml = self.title[:XFORM_TITLE_LENGTH]
if isinstance(self.xml, bytes):
self.xml = self.xml.decode('utf-8')
self.xml = title_pattern.sub(u"<h:title>%s</h:title>" % title_xml,
self.xml)
self._set_hash()
if contains_xml_invalid_char(title_xml):
raise XLSFormError(
"Title shouldn't have any invalid xml "
"characters ('>' '&' '<')"
)
self.title = title_xml
def get_hash(self):
return u'md5:%s' % md5(self.xml.encode('utf8')).hexdigest()
def get_random_hash(self):
return u'md5:%s' % md5(
("%s-%s" % (
self.xml,
random.randint(0, 25101991)
)).encode('utf8')
).hexdigest()
@property
def random_hash(self):
return self.get_random_hash()
def _set_hash(self):
self.hash = self.get_hash()
def _set_id_string(self):
matches = self.instance_id_regex.findall(self.xml)
if len(matches) != 1:
raise XLSFormError("There should be a single id string.")
self.id_string = matches[0]
def save(self, *args, **kwargs):
update_fields = kwargs.get('update_fields')
if update_fields:
kwargs['update_fields'] = list(
set(list(update_fields) + ['date_modified']))
if update_fields is None or 'title' in update_fields:
self._set_title()
if self.pk is None:
self._set_hash()
if update_fields is None or 'id_string' in update_fields:
old_id_string = self.id_string
self._set_id_string()
# check if we have an existing id_string,
# if so, the one must match but only if xform is NOT new
if self.pk and old_id_string and old_id_string != self.id_string \
and self.num_of_submissions > 0:
raise XLSFormError(
"Your updated form's id_string '%(new_id)s' must match "
"the existing forms' id_string '%(old_id)s'." % {
'new_id': self.id_string,
'old_id': old_id_string
})
if getattr(settings, 'STRICT', True) and \
not re.search(r"^[\w-]+$", self.id_string):
raise XLSFormError(
'In strict mode, the XForm ID must be a '
'valid slug and contain no spaces.')
if 'skip_xls_read' in kwargs:
del kwargs['skip_xls_read']
super(XForm, self).save(*args, **kwargs)
def get_survey(self):
if not hasattr(self, "_survey"):
try:
builder = SurveyElementBuilder()
self._survey = \
builder.create_survey_element_from_json(self.json)
except ValueError:
xml = bytes(bytearray(self.xml, encoding='utf-8'))
self._survey = create_survey_element_from_xml(xml)
return self._survey
survey = property(get_survey)
def get_survey_elements(self):
return self.survey.iter_descendants()
def geopoint_xpaths(self):
survey_elements = self.get_survey_elements()
return [
e.get_abbreviated_xpath() for e in survey_elements
if e.bind.get(u'type') == u'geopoint'
]
def __str__(self):
return self.id_string
def type_for_form(content_object, data_type):
content_type = ContentType.objects.get_for_model(content_object)
return MetaData.objects.filter(object_id=content_object.id,
content_type=content_type,
data_type=data_type)
def is_valid_url(uri):
try:
URLValidator(uri)
except ValidationError:
return False
return True
def create_media(media):
"""Download media link"""
if is_valid_url(media.data_value):
filename = media.data_value.split('/')[-1]
data_file = NamedTemporaryFile()
content_type = mimetypes.guess_type(filename)
with closing(requests.get(media.data_value, stream=True)) as r:
for chunk in r.iter_content(chunk_size=CHUNK_SIZE):
if chunk:
data_file.write(chunk)
data_file.seek(os.SEEK_SET, os.SEEK_END)
size = os.path.getsize(data_file.name)
data_file.seek(os.SEEK_SET)
media.data_value = filename
media.data_file = InMemoryUploadedFile(
data_file, 'data_file', filename, content_type,
size, charset=None)
return media
return None
def media_resources(media_list, download=False):
"""List of MetaData objects of type media
@param media_list - list of MetaData objects of type `media`
@param download - boolean, when True downloads media files when
media.data_value is a valid url
return a list of MetaData objects
"""
data = []
for media in media_list:
if media.data_file.name == '' and download:
media = create_media(media)
if media:
data.append(media)
else:
data.append(media)
return data
def meta_data_upload_to(instance, filename):
username = None
if instance.content_object.user is None and \
instance.content_type.model == 'instance':
username = instance.content_object.xform.user.username
else:
username = instance.content_object.user.username
if instance.data_type == 'media':
return os.path.join(username, 'formid-media', filename)
return os.path.join(username, 'docs', filename)
class MetaData(models.Model):
data_type = models.CharField(max_length=255)
data_value = models.CharField(max_length=255)
data_file = models.FileField(
upload_to=meta_data_upload_to, blank=True, null=True)
data_file_type = models.CharField(max_length=255, blank=True, null=True)
file_hash = models.CharField(max_length=50, blank=True, null=True)
date_created = models.DateTimeField(null=True, auto_now_add=True)
date_modified = models.DateTimeField(null=True, auto_now=True)
deleted_at = models.DateTimeField(null=True, default=None)
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE,
default=get_default_content_type)
object_id = models.PositiveIntegerField(null=True, blank=True)
content_object = GenericForeignKey('content_type', 'object_id')
objects = models.Manager()
class Meta:
unique_together = ('object_id', 'data_type', 'data_value',
'content_type')
def __str__(self):
return self.data_value
def file(self, username=None):
if hasattr(self, '_file'):
return self._file
url = requests.Request(
'GET', self.data_value, params={
'username': username
}
).prepare().url
self._file = MetaData.get_file(url)
return self._file
@staticmethod
def media_upload(content_object, data_file=None, download=False):
data_type = 'media'
if data_file:
allowed_types = settings.XFORM_SUPPORTED_MEDIA_UPLOAD_TYPES
data_content_type = data_file.content_type \
if data_file.content_type in allowed_types else \
mimetypes.guess_type(data_file.name)[0]
if data_content_type in allowed_types:
content_type = ContentType.objects.get_for_model(
content_object)
media, created = MetaData.objects.update_or_create(
data_type=data_type,
content_type=content_type,
object_id=content_object.id,
data_value=data_file.name,
defaults={
'data_file': data_file,
'data_file_type': data_content_type
})
return media_resources(
type_for_form(content_object, data_type), download)
@staticmethod
def get_md5(data_file):
hash_md5 = md5()
for chunk in iter(lambda: data_file.read(4096), b""):
hash_md5.update(chunk)
return 'md5:%s' % hash_md5.hexdigest()
@staticmethod
def get_file(url):
data_file = None
output = BytesIO()
def getsize(f):
f.seek(0)
f.read()
s = f.tell()
f.seek(0)
return s
r = requests.get(url, allow_redirects=True)
d = r.headers['content-disposition']
fname = re.findall("filename=\"(.+)\"", d)[0]
content_type = r.headers.get('content-type')
output.write(r.content)
size = getsize(output)
data_file = InMemoryUploadedFile(
file=output, name=fname,
field_name=None,
content_type=content_type,
charset='utf-8', size=size
)
return data_file
@staticmethod
def add_url(content_object, url=None, download=False):
data_type = 'url'
try:
data_file = MetaData.get_file(url)
except Exception:
return None
allowed_types = settings.XFORM_SUPPORTED_MEDIA_UPLOAD_TYPES
data_content_type = data_file.content_type \
if data_file.content_type in allowed_types else \
mimetypes.guess_type(data_file.name)[0]
if data_content_type in allowed_types:
content_type = ContentType.objects.get_for_model(
content_object)
media, created = MetaData.objects.update_or_create(
data_type=data_type,
content_type=content_type,
object_id=content_object.id,
data_value=url,
defaults={
'data_file': None,
'data_file_type': data_content_type
})
return media_resources(
type_for_form(content_object, data_type), download)
def save(self, *args, **kwargs):
self._set_hash()
super(MetaData, self).save(*args, **kwargs)
@property
def hash(self):
if self.file_hash is not None and self.file_hash != '':
return self.file_hash
else:
return self._set_hash()
def _set_hash(self):
if not self.data_file:
return None
file_exists = self.data_file.storage.exists(self.data_file.name)
if (file_exists and self.data_file.name != '') \
or (not file_exists and self.data_file):
try:
self.data_file.seek(os.SEEK_SET)
except IOError:
return ''
else:
self.file_hash = 'md5:%s' % md5(
self.data_file.read()).hexdigest()
return self.file_hash
return ''
class Instance(models.Model):
"""
Model representing a single submission to an XForm
"""
json = JSONField(default=dict, null=False)
xml = models.TextField()
user = models.ForeignKey(
settings.AUTH_USER_MODEL, related_name='instances', null=True, on_delete=models.CASCADE)
xform = models.ForeignKey('xform.XForm', null=False,
related_name='instances', on_delete=models.CASCADE)
# shows when we first received this instance
date_created = models.DateTimeField(auto_now_add=True)
# this will end up representing "date last parsed"
date_modified = models.DateTimeField(auto_now=True)
# this will be edited when we need to create a new InstanceHistory object
last_edited = models.DateTimeField(null=True, default=None)
# ODK keeps track of three statuses for an instance:
# incomplete, submitted, complete
# we add a fourth status: submitted_via_web
status = models.CharField(max_length=20,
default=u'submitted_via_web')
uuid = models.CharField(max_length=249, default=u'', db_index=True)
version = models.CharField(max_length=255, null=True)
# store a geographic objects associated with this instance
geom = models.GeometryCollectionField(null=True)
# Keep track of whether all media attachments have been received
media_all_received = models.NullBooleanField(
"Received All Media Attachemts",
null=True,
default=True)
total_media = models.PositiveIntegerField("Total Media Attachments",
null=True,
default=0)
media_count = models.PositiveIntegerField("Received Media Attachments",
null=True,
default=0)
checksum = models.CharField(max_length=64, null=True, blank=True,
db_index=True)
class Meta:
unique_together = ('xform', 'uuid')
def __str__(self):
return "Status: %s" % self.status
@property
def point(self):
gc = self.geom
if gc and len(gc):
return gc[0]
def get_duration(self):
data = self.get_dict()
# pylint: disable=no-member
start_name = _get_tag_or_element_type_xpath(self.xform, START)
end_name = _get_tag_or_element_type_xpath(self.xform, END)
start_time, end_time = data.get(start_name), data.get(end_name)
return calculate_duration(start_time, end_time)
@property
def num_of_media(self):
"""
Returns number of media attachments expected in the submission.
"""
if not hasattr(self, '_num_of_media'):
# pylint: disable=attribute-defined-outside-init
self._num_of_media = len(self.get_expected_media())
return self._num_of_media
@property
def attachments_count(self):
return len(set(self.attachments.filter(
name__in=self.get_expected_media()
).values_list('name', flat=True)))
def get_expected_media(self):
"""
Returns a list of expected media files from the submission data.
"""
if not hasattr(self, '_expected_media'):
# pylint: disable=no-member
data = self.get_dict()
media_list = []
if 'encryptedXmlFile' in data and self.xform.encrypted:
media_list.append(data['encryptedXmlFile'])
if 'media' in data:
# pylint: disable=no-member
media_list.extend([i['media/file'] for i in data['media']])
else:
media_xpaths = (self.xform.get_media_survey_xpaths() +
self.xform.get_osm_survey_xpaths())
for media_xpath in media_xpaths:
media_list.extend(
get_values_matching_key(data, media_xpath))
# pylint: disable=attribute-defined-outside-init
self._expected_media = list(set(media_list))
return self._expected_media
def numeric_converter(self, json_dict, numeric_fields=None):
if numeric_fields is None:
# pylint: disable=no-member
numeric_fields = get_numeric_fields(self.xform)
for key, value in json_dict.items():
if isinstance(value, (str, bytes)) and key in numeric_fields:
converted_value = numeric_checker(value)
if converted_value:
json_dict[key] = converted_value
elif isinstance(value, dict):
json_dict[key] = self.numeric_converter(
value, numeric_fields)
elif isinstance(value, list):
for k, v in enumerate(value):
if isinstance(v, (str, bytes)) and key in numeric_fields:
converted_value = numeric_checker(v)
if converted_value:
json_dict[key] = converted_value
elif isinstance(v, dict):
value[k] = self.numeric_converter(
v, numeric_fields)
return json_dict
def _set_geom(self):
# pylint: disable=no-member
xform = self.xform
geo_xpaths = xform.geopoint_xpaths()
doc = self.get_dict()
points = []
if geo_xpaths:
for xpath in geo_xpaths:
for gps in get_values_matching_key(doc, xpath):
try:
geometry = [float(s) for s in gps.split()]
lat, lng = geometry[0:2]
points.append(Point(lng, lat))
except ValueError:
return
if not xform.instances_with_geopoints and len(points):
xform.instances_with_geopoints = True
xform.save()
self.geom = GeometryCollection(points)
def _check_active(self, force):
"""Check that form is active and raise exception if not.
:param force: Ignore restrictions on saving.
"""
# pylint: disable=no-member
# if not force and self.xform and not self.xform.downloadable:
# raise FormInactiveError()
pass
def _set_json(self):
self.json = self.get_full_dict()
def get_full_dict(self, load_existing=True):
doc = self.json or {} if load_existing else {}
# Get latest dict
doc = self.get_dict()
# pylint: disable=no-member
if self.id:
doc.update({
UUID: self.uuid,
ID: self.id,
# BAMBOO_DATASET_ID: self.xform.bamboo_dataset,
ATTACHMENTS: _get_attachments_from_instance(self),
STATUS: self.status,
# TAGS: list(self.tags.names()),
NOTES: [],
VERSION: self.version,
DURATION: self.get_duration(),
XFORM_ID_STRING: self._parser.get_xform_id_string(),
XFORM_ID: self.xform.pk,
GEOLOCATION: [self.point.y, self.point.x] if self.point
else [None, None],
SUBMITTED_BY: self.user.username if self.user else None
})
# for osm in self.osm_data.all():
# doc.update(osm.get_tags_with_prefix())
if not self.date_created:
self.date_created = timezone.now()
doc[SUBMISSION_TIME] = self.date_created.strftime(
'%Y-%m-%dT%H:%M:%S')
doc[TOTAL_MEDIA] = self.total_media
doc[MEDIA_COUNT] = self.media_count
doc[MEDIA_ALL_RECEIVED] = self.media_all_received
edited = False
if hasattr(self, 'last_edited'):
edited = self.last_edited is not None
doc[EDITED] = edited
edited and doc.update({
LAST_EDITED: convert_to_serializable_date(self.last_edited)
})
return doc
def get_dict(self, force_new=False, flat=True):
"""Return a python object representation of this instance's XML."""
self._set_parser()
instance_dict = self._parser.get_flat_dict_with_attributes() if flat \
else self._parser.to_dict()
return self.numeric_converter(instance_dict)
def _set_survey_type(self):
self.survey_type = self.get_root_node_name()
def _set_parser(self):
if not hasattr(self, "_parser"):
# pylint: disable=no-member
self._parser = XFormInstanceParser(self.xml, self.xform)
def get_root_node_name(self):
self._set_parser()
return self._parser.get_root_node_name()
def _set_uuid(self):
# pylint: disable=no-member, attribute-defined-outside-init
if self.xml and not self.uuid:
# pylint: disable=no-member
uuid = get_uuid_from_xml(self.xml)
if uuid is not None:
self.uuid = uuid
set_uuid(self)
def save(self, *args, **kwargs):
force = kwargs.get('force')
if force:
del kwargs['force']
# self._check_is_merged_dataset()
self._check_active(force)
self._set_geom()
self._set_json()
self._set_survey_type()
self._set_uuid()
# pylint: disable=no-member
self.version = self.json.get(VERSION, self.xform.version)
super(Instance, self).save(*args, **kwargs)
class Attachment(models.Model):
OSM = 'osm'
instance = models.ForeignKey(
Instance, related_name="attachments", on_delete=models.CASCADE)
media_file = models.FileField(
max_length=255, upload_to=upload_to)
mimetype = models.CharField(
max_length=100, null=False, blank=True, default='')
extension = models.CharField(
max_length=10, null=False, blank=False, default=u"non", db_index=True)
date_created = models.DateTimeField(null=True, auto_now_add=True)
date_modified = models.DateTimeField(null=True, auto_now=True)
file_size = models.PositiveIntegerField(default=0)
name = models.CharField(max_length=100, null=True, blank=True)
class Meta:
ordering = ("pk", )
def save(self, *args, **kwargs):
if self.media_file and self.mimetype == '':
# guess mimetype
mimetype, encoding = mimetypes.guess_type(self.media_file.name)
if mimetype:
self.mimetype = mimetype
if self.media_file and len(self.media_file.name) > 255:
raise ValueError(
"Length of the media file should be less or equal to 255")
try:
f_size = self.media_file.size
if f_size:
self.file_size = f_size
except (OSError, AttributeError):
pass
try:
self.name = self.filename
self.extension = self.name.rsplit('.', 1)[1]
except Exception:
pass
super(Attachment, self).save(*args, **kwargs)
@property
def file_hash(self):
if self.media_file.storage.exists(self.media_file.name):
return u'%s' % md5(self.media_file.read()).hexdigest()
return u''
@property
def filename(self):
if self.media_file:
return os.path.basename(self.media_file.name)
def is_newline_error(e):
"""
Return True is e is a new line error based on the error text.
Otherwise return False.
"""
newline_error = u'new-line character seen in unquoted field - do you need'\
u' to open the file in universal-newline mode?'
return newline_error == str(e)
def process_xlsform(xls, default_name):
"""
Process XLSForm file and return the survey dictionary for the XLSForm.
"""
# FLOW Results package is a JSON file.
file_object = None
if xls.name.endswith('csv'):
# a csv file gets closed in pyxform, make a copy
xls.seek(0)
file_object = BytesIO()
file_object.write(xls.read())
file_object.seek(0)
xls.seek(0)
try:
return parse_file_to_json(xls.name, file_object=file_object or xls)
except csv.Error as e:
if is_newline_error(e):
xls.seek(0)
file_object = StringIO(
u'\n'.join(xls.read().splitlines()))
return parse_file_to_json(
xls.name, default_name=default_name, file_object=file_object)
raise e
def get_columns_with_hxl(survey_elements):
'''
Returns a dictionary whose keys are xform field names and values are
`instance::hxl` values set on the xform
:param include_hxl - boolean value
:param survey_elements - survey elements of an xform
return dictionary or None
'''
return survey_elements and {
se.get('name'): val.get('hxl')
for se in survey_elements
for key, val in se.items()
if key == 'instance' and val and 'hxl' in val
}
def check_version_set(survey):
"""
Checks if the version has been set in the xls file and if not adds
the default version in this datetime (yyyymmddhhmm) format.
"""
# get the json and check for the version key
survey_json = json.loads(survey.to_json())
if not survey_json.get("version"):
# set utc time as the default version
survey_json['version'] = \
timezone.now().strftime("%Y%m%d%H%M")
builder = SurveyElementBuilder()
survey = builder.create_survey_element_from_json(
json.dumps(survey_json))
return survey
class DataDictionary(XForm): # pylint: disable=too-many-instance-attributes
"""
DataDictionary model class.
"""
def __init__(self, *args, **kwargs):
self.instances_for_export = lambda d: d.instances.all()
self.has_external_choices = False
self._id_string_changed = False
super(DataDictionary, self).__init__(*args, **kwargs)
def __str__(self):
return getattr(self, "id_string", "")
def save(self, *args, **kwargs):
skip_xls_read = kwargs.get('skip_xls_read')
if self.xls and not skip_xls_read:
default_name = None \
if not self.pk else self.survey.xml_instance().tagName
survey_dict = process_xlsform(self.xls, default_name)
if has_external_choices(survey_dict):
self.has_external_choices = True
survey = create_survey_element_from_dict(survey_dict)
survey = check_version_set(survey)
if get_columns_with_hxl(survey.get('children')):
self.has_hxl_support = True
# if form is being replaced, don't check for id_string uniqueness
if self.pk is None:
new_id_string = self.get_unique_id_string(
survey.get('id_string'))
self._id_string_changed = \
new_id_string != survey.get('id_string')
survey['id_string'] = new_id_string
# For flow results packages use the user defined id/uuid
elif self.id_string != survey.get('id_string'):
raise XLSFormError(
("Your updated form's id_string '%(new_id)s' must match "
"the existing forms' id_string '%(old_id)s'." % {
'new_id': survey.get('id_string'),
'old_id': self.id_string}))
elif default_name and default_name != survey.get('name'):
survey['name'] = default_name
else:
survey['id_string'] = self.id_string
self.json = survey.to_json()
self.xml = survey.to_xml()
self.version = survey.get('version')
self.last_updated_at = timezone.now()
self.title = survey.get('title')
self._mark_start_time_boolean()
set_uuid(self)
self._set_uuid_in_xml()
self._set_hash()
if 'skip_xls_read' in kwargs:
del kwargs['skip_xls_read']
super(DataDictionary, self).save(*args, **kwargs)
def file_name(self):
return os.path.split(self.xls.name)[-1]
def sheet_to_csv(xls_content, sheet_name):
"""Writes a csv file of a specified sheet from a an excel file
:param xls_content: Excel file contents
:param sheet_name: the name of the excel sheet to generate the csv file
:returns: a (StrionIO) csv file object
"""
workbook = xlrd.open_workbook(file_contents=xls_content)
sheet = workbook.sheet_by_name(sheet_name)
if not sheet or sheet.nrows < 2:
raise Exception("Sheet <'%(sheet_name)s'> has no data." % {
'sheet_name': sheet_name})
csv_file = BytesIO()
writer = csv.writer(csv_file, encoding='utf-8', quoting=csv.QUOTE_ALL)
mask = [v and len(v.strip()) > 0 for v in sheet.row_values(0)]
header = [v for v, m in zip(sheet.row_values(0), mask) if m]
writer.writerow(header)
name_column = None
try:
name_column = header.index('name')
except ValueError:
pass
integer_fields = False
date_fields = False
if name_column:
name_column_values = sheet.col_values(name_column)
for index in range(len(name_column_values)):
if sheet.cell_type(index, name_column) == xlrd.XL_CELL_NUMBER:
integer_fields = True
elif sheet.cell_type(index, name_column) == xlrd.XL_CELL_DATE:
date_fields = True
for row in range(1, sheet.nrows):
if integer_fields or date_fields:
# convert integers to string/datetime if name has numbers/dates
row_values = []
for index, val in enumerate(sheet.row_values(row)):
if sheet.cell_type(row, index) == xlrd.XL_CELL_NUMBER:
try:
val = str(
float(val) if (
float(val) > int(val)
) else int(val)
)
except ValueError:
pass
elif sheet.cell_type(row, index) == xlrd.XL_CELL_DATE:
val = xlrd.xldate_as_datetime(
val, workbook.datemode).isoformat()
row_values.append(val)
writer.writerow([v for v, m in zip(row_values, mask) if m])
else:
writer.writerow(
[v for v, m in zip(sheet.row_values(row), mask) if m])
return csv_file
def set_object_permissions(sender, instance=None, created=False, **kwargs):
"""
Apply the relevant object permissions for the form to all users who should
have access to it.
"""
# seems the super is not called, have to get xform from here
xform = XForm.objects.get(pk=instance.pk)
if hasattr(instance, 'has_external_choices') \
and instance.has_external_choices:
instance.xls.seek(0)
f = sheet_to_csv(instance.xls.read(), 'external_choices')
f.seek(0, os.SEEK_END)
size = f.tell()
f.seek(0)
data_file = InMemoryUploadedFile(
file=f,
field_name='data_file',
name='itemsets.csv',
content_type='text/csv',
size=size,
charset=None
)
MetaData.media_upload(xform, data_file)
post_save.connect(set_object_permissions, sender=DataDictionary,
dispatch_uid='xform_object_permissions')
| 34.818865 | 96 | 0.601995 | import csv
import json
import mimetypes
import os
import random
import re
import requests
import xlrd
from contextlib import closing
from hashlib import md5
from io import BytesIO
from io import StringIO
from pyxform import SurveyElementBuilder
from pyxform.builder import create_survey_element_from_dict
from pyxform.utils import has_external_choices
from pyxform.xform2json import create_survey_element_from_xml
from pyxform.xls2json import parse_file_to_json
from xml.dom import Node
from django.conf import settings
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.fields import GenericRelation
from django.contrib.contenttypes.models import ContentType
from django.contrib.gis.db import models
from django.contrib.gis.geos import GeometryCollection, Point
from django.core.exceptions import ValidationError
from django.core.files.temp import NamedTemporaryFile
from django.core.files.uploadedfile import InMemoryUploadedFile
from django.core.validators import URLValidator
from django.db.models.signals import post_save
from django.utils import timezone
from .tags import (
UUID, ID, ATTACHMENTS, STATUS, NOTES, VERSION, DURATION, XFORM_ID_STRING,
XFORM_ID, GEOLOCATION, SUBMITTED_BY, SUBMISSION_TIME, TOTAL_MEDIA,
MEDIA_COUNT, MEDIA_ALL_RECEIVED, EDITED, LAST_EDITED, KNOWN_MEDIA_TYPES,
START, END
)
from .utils import (
get_values_matching_key, get_uuid_from_xml, set_uuid, XFormInstanceParser,
clean_and_parse_xml, get_numeric_fields, numeric_checker,
_get_tag_or_element_type_xpath, calculate_duration
)
if 'postg' in settings.DATABASES['default']['ENGINE']:
from django.contrib.postgres.fields import JSONField
else:
from jsonfield import JSONField
CHUNK_SIZE = 1024
XFORM_TITLE_LENGTH = 255
title_pattern = re.compile(r"<h:title>(.*?)</h:title>")
def contains_xml_invalid_char(text, invalids=['&', '>', '<']):
return 1 in [c in text for c in invalids]
def convert_to_serializable_date(date):
if hasattr(date, 'isoformat'):
return date.isoformat()
return date
def _get_attachments_from_instance(instance):
attachments = []
for a in instance.attachments.all():
attachment = dict()
attachment['download_url'] = a.media_file.url
attachment['small_download_url'] = a.media_file.url
attachment['medium_download_url'] = a.media_file.url
attachment['mimetype'] = a.mimetype
attachment['filename'] = a.media_file.name
attachment['name'] = a.name
attachment['instance'] = a.instance.pk
attachment['xform'] = instance.xform.id
attachment['id'] = a.id
attachments.append(attachment)
return attachments
def get_default_content_type():
content_object, created = ContentType.objects.get_or_create(
app_label="xform", model='xform')
return content_object.id
def upload_to(instance, filename):
try:
return os.path.join(
instance.user.username, 'xls',
os.path.split(filename)[1])
except Exception:
folder = "{}_{}".format(instance.instance.xform.id,
instance.instance.xform.id_string)
return os.path.join(
instance.instance.xform.user.username, 'attachments', folder,
os.path.split(filename)[1])
class XLSFormError(Exception):
pass
class FormInactiveError(Exception):
pass
class XForm(models.Model):
dynamic_choices = True
xls = models.FileField(upload_to=upload_to, null=True)
json = models.TextField(default=u'')
description = models.TextField(default=u'', null=True, blank=True)
xml = models.TextField()
user = models.ForeignKey(
settings.AUTH_USER_MODEL, related_name='xforms', null=True, on_delete=models.CASCADE)
id_string = models.SlugField(
editable=False,
verbose_name="ID",
max_length=100)
title = models.CharField(editable=False, max_length=255)
date_created = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
last_submission_time = models.DateTimeField(blank=True, null=True)
has_start_time = models.BooleanField(default=False)
uuid = models.CharField(max_length=36, default=u'')
uuid_regex = re.compile(r'(<instance>.*?id="[^"]+">)(.*</instance>)(.*)',
re.DOTALL)
instance_id_regex = re.compile(r'<instance>.*?id="([^"]+)".*</instance>',
re.DOTALL)
instances_with_geopoints = models.BooleanField(default=False)
num_of_submissions = models.IntegerField(default=0)
version = models.CharField(
max_length=255, null=True, blank=True)
created_by = models.ForeignKey(
settings.AUTH_USER_MODEL, null=True, blank=True, on_delete=models.CASCADE)
metadata_set = GenericRelation(
'MetaData',
content_type_field='content_type_id',
object_id_field="object_id")
has_hxl_support = models.BooleanField(default=False)
last_updated_at = models.DateTimeField(auto_now=True)
hash = models.CharField("Hash", max_length=36, blank=True, null=True,
default=None)
class Meta:
unique_together = ("user", "id_string",)
verbose_name = "XForm"
verbose_name_plural = "XForms"
ordering = ("pk", )
def get_osm_survey_xpaths(self):
return [
elem.get_abbreviated_xpath()
for elem in self.get_survey_elements_of_type('osm')]
def get_media_survey_xpaths(self):
return [
e.get_abbreviated_xpath()
for e in sum([
self.get_survey_elements_of_type(m) for m in KNOWN_MEDIA_TYPES
], [])
]
def file_name(self):
return self.id_string + ".xml"
def get_survey_elements_of_type(self, element_type):
return [
e for e in self.get_survey_elements() if e.type == element_type
]
def _set_uuid_in_xml(self, file_name=None):
if not file_name:
file_name = self.file_name()
file_name, file_ext = os.path.splitext(file_name)
doc = clean_and_parse_xml(self.xml)
model_nodes = doc.getElementsByTagName("model")
if len(model_nodes) != 1:
raise Exception(u"xml contains multiple model nodes")
model_node = model_nodes[0]
instance_nodes = [
node for node in model_node.childNodes
if node.nodeType == Node.ELEMENT_NODE and
node.tagName.lower() == "instance" and not node.hasAttribute("id")
]
if len(instance_nodes) != 1:
raise Exception("Multiple instance nodes without the id "
"attribute, can't tell which is the main one")
instance_node = instance_nodes[0]
# get the first child whose id attribute matches our id_string
survey_nodes = [
node for node in instance_node.childNodes
if node.nodeType == Node.ELEMENT_NODE and
(node.tagName == file_name or node.attributes.get('id'))
]
if len(survey_nodes) != 1:
raise Exception(
"Multiple survey nodes with the id '%s'" % self.id_string)
survey_node = survey_nodes[0]
formhub_nodes = [
n for n in survey_node.childNodes
if n.nodeType == Node.ELEMENT_NODE and n.tagName == "formhub"
]
if len(formhub_nodes) > 1:
raise Exception(
"Multiple formhub nodes within main instance node")
elif len(formhub_nodes) == 1:
formhub_node = formhub_nodes[0]
else:
formhub_node = survey_node.insertBefore(
doc.createElement("formhub"), survey_node.firstChild)
uuid_nodes = [
node for node in formhub_node.childNodes
if node.nodeType == Node.ELEMENT_NODE and node.tagName == "uuid"
]
if len(uuid_nodes) == 0:
formhub_node.appendChild(doc.createElement("uuid"))
if len(formhub_nodes) == 0:
# append the calculate bind node
calculate_node = doc.createElement("bind")
calculate_node.setAttribute(
"nodeset", "/%s/formhub/uuid" % survey_node.tagName)
calculate_node.setAttribute("type", "string")
calculate_node.setAttribute("calculate", "'%s'" % self.uuid)
model_node.appendChild(calculate_node)
self.xml = doc.toprettyxml(indent=" ", encoding='utf-8')
# hack
# http://ronrothman.com/public/leftbraned/xml-dom-minidom-toprettyxml-\
# and-silly-whitespace/
text_re = re.compile('(>)\n\s*(\s[^<>\s].*?)\n\s*(\s</)', re.DOTALL)
output_re = re.compile('\n.*(<output.*>)\n( )*')
pretty_xml = text_re.sub(lambda m: ''.join(m.group(1, 2, 3)),
self.xml.decode('utf-8'))
inline_output = output_re.sub('\g<1>', pretty_xml)
inline_output = re.compile('<label>\s*\n*\s*\n*\s*</label>').sub(
'<label></label>', inline_output)
self.xml = inline_output
def _mark_start_time_boolean(self):
starttime_substring = 'jr:preloadParams="start"'
if self.xml.find(starttime_substring) != -1:
self.has_start_time = True
else:
self.has_start_time = False
def _id_string_already_exists_in_account(self, id_string):
try:
XForm.objects.get(id_string__iexact=id_string)
except XForm.DoesNotExist:
return False
return True
def get_unique_id_string(self, id_string, count=0):
# used to generate a new id_string for new data_dictionary object if
# id_string already existed
if self._id_string_already_exists_in_account(id_string):
if count != 0:
if re.match(r'\w+_\d+$', id_string):
a = id_string.split('_')
id_string = "_".join(a[:-1])
count += 1
id_string = "{}_{}".format(id_string, count)
return self.get_unique_id_string(id_string, count)
return id_string
def _set_title(self):
xml = re.sub(r"\s+", " ", self.xml)
matches = title_pattern.findall(xml)
if len(matches) != 1:
raise XLSFormError(("There should be a single title."), matches)
if matches:
title_xml = matches[0][:XFORM_TITLE_LENGTH]
else:
title_xml = self.title[:XFORM_TITLE_LENGTH] if self.title else ''
if self.title and title_xml != self.title:
title_xml = self.title[:XFORM_TITLE_LENGTH]
if isinstance(self.xml, bytes):
self.xml = self.xml.decode('utf-8')
self.xml = title_pattern.sub(u"<h:title>%s</h:title>" % title_xml,
self.xml)
self._set_hash()
if contains_xml_invalid_char(title_xml):
raise XLSFormError(
"Title shouldn't have any invalid xml "
"characters ('>' '&' '<')"
)
self.title = title_xml
def get_hash(self):
return u'md5:%s' % md5(self.xml.encode('utf8')).hexdigest()
def get_random_hash(self):
return u'md5:%s' % md5(
("%s-%s" % (
self.xml,
random.randint(0, 25101991)
)).encode('utf8')
).hexdigest()
@property
def random_hash(self):
return self.get_random_hash()
def _set_hash(self):
self.hash = self.get_hash()
def _set_id_string(self):
matches = self.instance_id_regex.findall(self.xml)
if len(matches) != 1:
raise XLSFormError("There should be a single id string.")
self.id_string = matches[0]
def save(self, *args, **kwargs):
update_fields = kwargs.get('update_fields')
if update_fields:
kwargs['update_fields'] = list(
set(list(update_fields) + ['date_modified']))
if update_fields is None or 'title' in update_fields:
self._set_title()
if self.pk is None:
self._set_hash()
if update_fields is None or 'id_string' in update_fields:
old_id_string = self.id_string
self._set_id_string()
if self.pk and old_id_string and old_id_string != self.id_string \
and self.num_of_submissions > 0:
raise XLSFormError(
"Your updated form's id_string '%(new_id)s' must match "
"the existing forms' id_string '%(old_id)s'." % {
'new_id': self.id_string,
'old_id': old_id_string
})
if getattr(settings, 'STRICT', True) and \
not re.search(r"^[\w-]+$", self.id_string):
raise XLSFormError(
'In strict mode, the XForm ID must be a '
'valid slug and contain no spaces.')
if 'skip_xls_read' in kwargs:
del kwargs['skip_xls_read']
super(XForm, self).save(*args, **kwargs)
def get_survey(self):
if not hasattr(self, "_survey"):
try:
builder = SurveyElementBuilder()
self._survey = \
builder.create_survey_element_from_json(self.json)
except ValueError:
xml = bytes(bytearray(self.xml, encoding='utf-8'))
self._survey = create_survey_element_from_xml(xml)
return self._survey
survey = property(get_survey)
def get_survey_elements(self):
return self.survey.iter_descendants()
def geopoint_xpaths(self):
survey_elements = self.get_survey_elements()
return [
e.get_abbreviated_xpath() for e in survey_elements
if e.bind.get(u'type') == u'geopoint'
]
def __str__(self):
return self.id_string
def type_for_form(content_object, data_type):
content_type = ContentType.objects.get_for_model(content_object)
return MetaData.objects.filter(object_id=content_object.id,
content_type=content_type,
data_type=data_type)
def is_valid_url(uri):
try:
URLValidator(uri)
except ValidationError:
return False
return True
def create_media(media):
if is_valid_url(media.data_value):
filename = media.data_value.split('/')[-1]
data_file = NamedTemporaryFile()
content_type = mimetypes.guess_type(filename)
with closing(requests.get(media.data_value, stream=True)) as r:
for chunk in r.iter_content(chunk_size=CHUNK_SIZE):
if chunk:
data_file.write(chunk)
data_file.seek(os.SEEK_SET, os.SEEK_END)
size = os.path.getsize(data_file.name)
data_file.seek(os.SEEK_SET)
media.data_value = filename
media.data_file = InMemoryUploadedFile(
data_file, 'data_file', filename, content_type,
size, charset=None)
return media
return None
def media_resources(media_list, download=False):
data = []
for media in media_list:
if media.data_file.name == '' and download:
media = create_media(media)
if media:
data.append(media)
else:
data.append(media)
return data
def meta_data_upload_to(instance, filename):
username = None
if instance.content_object.user is None and \
instance.content_type.model == 'instance':
username = instance.content_object.xform.user.username
else:
username = instance.content_object.user.username
if instance.data_type == 'media':
return os.path.join(username, 'formid-media', filename)
return os.path.join(username, 'docs', filename)
class MetaData(models.Model):
data_type = models.CharField(max_length=255)
data_value = models.CharField(max_length=255)
data_file = models.FileField(
upload_to=meta_data_upload_to, blank=True, null=True)
data_file_type = models.CharField(max_length=255, blank=True, null=True)
file_hash = models.CharField(max_length=50, blank=True, null=True)
date_created = models.DateTimeField(null=True, auto_now_add=True)
date_modified = models.DateTimeField(null=True, auto_now=True)
deleted_at = models.DateTimeField(null=True, default=None)
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE,
default=get_default_content_type)
object_id = models.PositiveIntegerField(null=True, blank=True)
content_object = GenericForeignKey('content_type', 'object_id')
objects = models.Manager()
class Meta:
unique_together = ('object_id', 'data_type', 'data_value',
'content_type')
def __str__(self):
return self.data_value
def file(self, username=None):
if hasattr(self, '_file'):
return self._file
url = requests.Request(
'GET', self.data_value, params={
'username': username
}
).prepare().url
self._file = MetaData.get_file(url)
return self._file
@staticmethod
def media_upload(content_object, data_file=None, download=False):
data_type = 'media'
if data_file:
allowed_types = settings.XFORM_SUPPORTED_MEDIA_UPLOAD_TYPES
data_content_type = data_file.content_type \
if data_file.content_type in allowed_types else \
mimetypes.guess_type(data_file.name)[0]
if data_content_type in allowed_types:
content_type = ContentType.objects.get_for_model(
content_object)
media, created = MetaData.objects.update_or_create(
data_type=data_type,
content_type=content_type,
object_id=content_object.id,
data_value=data_file.name,
defaults={
'data_file': data_file,
'data_file_type': data_content_type
})
return media_resources(
type_for_form(content_object, data_type), download)
@staticmethod
def get_md5(data_file):
hash_md5 = md5()
for chunk in iter(lambda: data_file.read(4096), b""):
hash_md5.update(chunk)
return 'md5:%s' % hash_md5.hexdigest()
@staticmethod
def get_file(url):
data_file = None
output = BytesIO()
def getsize(f):
f.seek(0)
f.read()
s = f.tell()
f.seek(0)
return s
r = requests.get(url, allow_redirects=True)
d = r.headers['content-disposition']
fname = re.findall("filename=\"(.+)\"", d)[0]
content_type = r.headers.get('content-type')
output.write(r.content)
size = getsize(output)
data_file = InMemoryUploadedFile(
file=output, name=fname,
field_name=None,
content_type=content_type,
charset='utf-8', size=size
)
return data_file
@staticmethod
def add_url(content_object, url=None, download=False):
data_type = 'url'
try:
data_file = MetaData.get_file(url)
except Exception:
return None
allowed_types = settings.XFORM_SUPPORTED_MEDIA_UPLOAD_TYPES
data_content_type = data_file.content_type \
if data_file.content_type in allowed_types else \
mimetypes.guess_type(data_file.name)[0]
if data_content_type in allowed_types:
content_type = ContentType.objects.get_for_model(
content_object)
media, created = MetaData.objects.update_or_create(
data_type=data_type,
content_type=content_type,
object_id=content_object.id,
data_value=url,
defaults={
'data_file': None,
'data_file_type': data_content_type
})
return media_resources(
type_for_form(content_object, data_type), download)
def save(self, *args, **kwargs):
self._set_hash()
super(MetaData, self).save(*args, **kwargs)
@property
def hash(self):
if self.file_hash is not None and self.file_hash != '':
return self.file_hash
else:
return self._set_hash()
def _set_hash(self):
if not self.data_file:
return None
file_exists = self.data_file.storage.exists(self.data_file.name)
if (file_exists and self.data_file.name != '') \
or (not file_exists and self.data_file):
try:
self.data_file.seek(os.SEEK_SET)
except IOError:
return ''
else:
self.file_hash = 'md5:%s' % md5(
self.data_file.read()).hexdigest()
return self.file_hash
return ''
class Instance(models.Model):
json = JSONField(default=dict, null=False)
xml = models.TextField()
user = models.ForeignKey(
settings.AUTH_USER_MODEL, related_name='instances', null=True, on_delete=models.CASCADE)
xform = models.ForeignKey('xform.XForm', null=False,
related_name='instances', on_delete=models.CASCADE)
date_created = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
last_edited = models.DateTimeField(null=True, default=None)
status = models.CharField(max_length=20,
default=u'submitted_via_web')
uuid = models.CharField(max_length=249, default=u'', db_index=True)
version = models.CharField(max_length=255, null=True)
geom = models.GeometryCollectionField(null=True)
media_all_received = models.NullBooleanField(
"Received All Media Attachemts",
null=True,
default=True)
total_media = models.PositiveIntegerField("Total Media Attachments",
null=True,
default=0)
media_count = models.PositiveIntegerField("Received Media Attachments",
null=True,
default=0)
checksum = models.CharField(max_length=64, null=True, blank=True,
db_index=True)
class Meta:
unique_together = ('xform', 'uuid')
def __str__(self):
return "Status: %s" % self.status
@property
def point(self):
gc = self.geom
if gc and len(gc):
return gc[0]
def get_duration(self):
data = self.get_dict()
start_name = _get_tag_or_element_type_xpath(self.xform, START)
end_name = _get_tag_or_element_type_xpath(self.xform, END)
start_time, end_time = data.get(start_name), data.get(end_name)
return calculate_duration(start_time, end_time)
@property
def num_of_media(self):
if not hasattr(self, '_num_of_media'):
self._num_of_media = len(self.get_expected_media())
return self._num_of_media
@property
def attachments_count(self):
return len(set(self.attachments.filter(
name__in=self.get_expected_media()
).values_list('name', flat=True)))
def get_expected_media(self):
if not hasattr(self, '_expected_media'):
data = self.get_dict()
media_list = []
if 'encryptedXmlFile' in data and self.xform.encrypted:
media_list.append(data['encryptedXmlFile'])
if 'media' in data:
media_list.extend([i['media/file'] for i in data['media']])
else:
media_xpaths = (self.xform.get_media_survey_xpaths() +
self.xform.get_osm_survey_xpaths())
for media_xpath in media_xpaths:
media_list.extend(
get_values_matching_key(data, media_xpath))
self._expected_media = list(set(media_list))
return self._expected_media
def numeric_converter(self, json_dict, numeric_fields=None):
if numeric_fields is None:
numeric_fields = get_numeric_fields(self.xform)
for key, value in json_dict.items():
if isinstance(value, (str, bytes)) and key in numeric_fields:
converted_value = numeric_checker(value)
if converted_value:
json_dict[key] = converted_value
elif isinstance(value, dict):
json_dict[key] = self.numeric_converter(
value, numeric_fields)
elif isinstance(value, list):
for k, v in enumerate(value):
if isinstance(v, (str, bytes)) and key in numeric_fields:
converted_value = numeric_checker(v)
if converted_value:
json_dict[key] = converted_value
elif isinstance(v, dict):
value[k] = self.numeric_converter(
v, numeric_fields)
return json_dict
def _set_geom(self):
xform = self.xform
geo_xpaths = xform.geopoint_xpaths()
doc = self.get_dict()
points = []
if geo_xpaths:
for xpath in geo_xpaths:
for gps in get_values_matching_key(doc, xpath):
try:
geometry = [float(s) for s in gps.split()]
lat, lng = geometry[0:2]
points.append(Point(lng, lat))
except ValueError:
return
if not xform.instances_with_geopoints and len(points):
xform.instances_with_geopoints = True
xform.save()
self.geom = GeometryCollection(points)
def _check_active(self, force):
pass
def _set_json(self):
self.json = self.get_full_dict()
def get_full_dict(self, load_existing=True):
doc = self.json or {} if load_existing else {}
doc = self.get_dict()
if self.id:
doc.update({
UUID: self.uuid,
ID: self.id,
ATTACHMENTS: _get_attachments_from_instance(self),
STATUS: self.status,
NOTES: [],
VERSION: self.version,
DURATION: self.get_duration(),
XFORM_ID_STRING: self._parser.get_xform_id_string(),
XFORM_ID: self.xform.pk,
GEOLOCATION: [self.point.y, self.point.x] if self.point
else [None, None],
SUBMITTED_BY: self.user.username if self.user else None
})
if not self.date_created:
self.date_created = timezone.now()
doc[SUBMISSION_TIME] = self.date_created.strftime(
'%Y-%m-%dT%H:%M:%S')
doc[TOTAL_MEDIA] = self.total_media
doc[MEDIA_COUNT] = self.media_count
doc[MEDIA_ALL_RECEIVED] = self.media_all_received
edited = False
if hasattr(self, 'last_edited'):
edited = self.last_edited is not None
doc[EDITED] = edited
edited and doc.update({
LAST_EDITED: convert_to_serializable_date(self.last_edited)
})
return doc
def get_dict(self, force_new=False, flat=True):
self._set_parser()
instance_dict = self._parser.get_flat_dict_with_attributes() if flat \
else self._parser.to_dict()
return self.numeric_converter(instance_dict)
def _set_survey_type(self):
self.survey_type = self.get_root_node_name()
def _set_parser(self):
if not hasattr(self, "_parser"):
self._parser = XFormInstanceParser(self.xml, self.xform)
def get_root_node_name(self):
self._set_parser()
return self._parser.get_root_node_name()
def _set_uuid(self):
if self.xml and not self.uuid:
uuid = get_uuid_from_xml(self.xml)
if uuid is not None:
self.uuid = uuid
set_uuid(self)
def save(self, *args, **kwargs):
force = kwargs.get('force')
if force:
del kwargs['force']
self._check_active(force)
self._set_geom()
self._set_json()
self._set_survey_type()
self._set_uuid()
self.version = self.json.get(VERSION, self.xform.version)
super(Instance, self).save(*args, **kwargs)
class Attachment(models.Model):
OSM = 'osm'
instance = models.ForeignKey(
Instance, related_name="attachments", on_delete=models.CASCADE)
media_file = models.FileField(
max_length=255, upload_to=upload_to)
mimetype = models.CharField(
max_length=100, null=False, blank=True, default='')
extension = models.CharField(
max_length=10, null=False, blank=False, default=u"non", db_index=True)
date_created = models.DateTimeField(null=True, auto_now_add=True)
date_modified = models.DateTimeField(null=True, auto_now=True)
file_size = models.PositiveIntegerField(default=0)
name = models.CharField(max_length=100, null=True, blank=True)
class Meta:
ordering = ("pk", )
def save(self, *args, **kwargs):
if self.media_file and self.mimetype == '':
mimetype, encoding = mimetypes.guess_type(self.media_file.name)
if mimetype:
self.mimetype = mimetype
if self.media_file and len(self.media_file.name) > 255:
raise ValueError(
"Length of the media file should be less or equal to 255")
try:
f_size = self.media_file.size
if f_size:
self.file_size = f_size
except (OSError, AttributeError):
pass
try:
self.name = self.filename
self.extension = self.name.rsplit('.', 1)[1]
except Exception:
pass
super(Attachment, self).save(*args, **kwargs)
@property
def file_hash(self):
if self.media_file.storage.exists(self.media_file.name):
return u'%s' % md5(self.media_file.read()).hexdigest()
return u''
@property
def filename(self):
if self.media_file:
return os.path.basename(self.media_file.name)
def is_newline_error(e):
newline_error = u'new-line character seen in unquoted field - do you need'\
u' to open the file in universal-newline mode?'
return newline_error == str(e)
def process_xlsform(xls, default_name):
file_object = None
if xls.name.endswith('csv'):
xls.seek(0)
file_object = BytesIO()
file_object.write(xls.read())
file_object.seek(0)
xls.seek(0)
try:
return parse_file_to_json(xls.name, file_object=file_object or xls)
except csv.Error as e:
if is_newline_error(e):
xls.seek(0)
file_object = StringIO(
u'\n'.join(xls.read().splitlines()))
return parse_file_to_json(
xls.name, default_name=default_name, file_object=file_object)
raise e
def get_columns_with_hxl(survey_elements):
return survey_elements and {
se.get('name'): val.get('hxl')
for se in survey_elements
for key, val in se.items()
if key == 'instance' and val and 'hxl' in val
}
def check_version_set(survey):
survey_json = json.loads(survey.to_json())
if not survey_json.get("version"):
survey_json['version'] = \
timezone.now().strftime("%Y%m%d%H%M")
builder = SurveyElementBuilder()
survey = builder.create_survey_element_from_json(
json.dumps(survey_json))
return survey
class DataDictionary(XForm):
def __init__(self, *args, **kwargs):
self.instances_for_export = lambda d: d.instances.all()
self.has_external_choices = False
self._id_string_changed = False
super(DataDictionary, self).__init__(*args, **kwargs)
def __str__(self):
return getattr(self, "id_string", "")
def save(self, *args, **kwargs):
skip_xls_read = kwargs.get('skip_xls_read')
if self.xls and not skip_xls_read:
default_name = None \
if not self.pk else self.survey.xml_instance().tagName
survey_dict = process_xlsform(self.xls, default_name)
if has_external_choices(survey_dict):
self.has_external_choices = True
survey = create_survey_element_from_dict(survey_dict)
survey = check_version_set(survey)
if get_columns_with_hxl(survey.get('children')):
self.has_hxl_support = True
if self.pk is None:
new_id_string = self.get_unique_id_string(
survey.get('id_string'))
self._id_string_changed = \
new_id_string != survey.get('id_string')
survey['id_string'] = new_id_string
# For flow results packages use the user defined id/uuid
elif self.id_string != survey.get('id_string'):
raise XLSFormError(
("Your updated form's id_string '%(new_id)s' must match "
"the existing forms' id_string '%(old_id)s'." % {
'new_id': survey.get('id_string'),
'old_id': self.id_string}))
elif default_name and default_name != survey.get('name'):
survey['name'] = default_name
else:
survey['id_string'] = self.id_string
self.json = survey.to_json()
self.xml = survey.to_xml()
self.version = survey.get('version')
self.last_updated_at = timezone.now()
self.title = survey.get('title')
self._mark_start_time_boolean()
set_uuid(self)
self._set_uuid_in_xml()
self._set_hash()
if 'skip_xls_read' in kwargs:
del kwargs['skip_xls_read']
super(DataDictionary, self).save(*args, **kwargs)
def file_name(self):
return os.path.split(self.xls.name)[-1]
def sheet_to_csv(xls_content, sheet_name):
workbook = xlrd.open_workbook(file_contents=xls_content)
sheet = workbook.sheet_by_name(sheet_name)
if not sheet or sheet.nrows < 2:
raise Exception("Sheet <'%(sheet_name)s'> has no data." % {
'sheet_name': sheet_name})
csv_file = BytesIO()
writer = csv.writer(csv_file, encoding='utf-8', quoting=csv.QUOTE_ALL)
mask = [v and len(v.strip()) > 0 for v in sheet.row_values(0)]
header = [v for v, m in zip(sheet.row_values(0), mask) if m]
writer.writerow(header)
name_column = None
try:
name_column = header.index('name')
except ValueError:
pass
integer_fields = False
date_fields = False
if name_column:
name_column_values = sheet.col_values(name_column)
for index in range(len(name_column_values)):
if sheet.cell_type(index, name_column) == xlrd.XL_CELL_NUMBER:
integer_fields = True
elif sheet.cell_type(index, name_column) == xlrd.XL_CELL_DATE:
date_fields = True
for row in range(1, sheet.nrows):
if integer_fields or date_fields:
# convert integers to string/datetime if name has numbers/dates
row_values = []
for index, val in enumerate(sheet.row_values(row)):
if sheet.cell_type(row, index) == xlrd.XL_CELL_NUMBER:
try:
val = str(
float(val) if (
float(val) > int(val)
) else int(val)
)
except ValueError:
pass
elif sheet.cell_type(row, index) == xlrd.XL_CELL_DATE:
val = xlrd.xldate_as_datetime(
val, workbook.datemode).isoformat()
row_values.append(val)
writer.writerow([v for v, m in zip(row_values, mask) if m])
else:
writer.writerow(
[v for v, m in zip(sheet.row_values(row), mask) if m])
return csv_file
def set_object_permissions(sender, instance=None, created=False, **kwargs):
# seems the super is not called, have to get xform from here
xform = XForm.objects.get(pk=instance.pk)
if hasattr(instance, 'has_external_choices') \
and instance.has_external_choices:
instance.xls.seek(0)
f = sheet_to_csv(instance.xls.read(), 'external_choices')
f.seek(0, os.SEEK_END)
size = f.tell()
f.seek(0)
data_file = InMemoryUploadedFile(
file=f,
field_name='data_file',
name='itemsets.csv',
content_type='text/csv',
size=size,
charset=None
)
MetaData.media_upload(xform, data_file)
post_save.connect(set_object_permissions, sender=DataDictionary,
dispatch_uid='xform_object_permissions')
| true | true |
1c459720c843885a8386143a876fd1904e17dd73 | 3,345 | py | Python | leaderboard_service/leaderboard_service/settings.py | AVatch/leaderboard-service | 9b70e24866fe862ba5d71dc3404e123303325431 | [
"Apache-2.0"
] | 1 | 2016-02-25T22:50:22.000Z | 2016-02-25T22:50:22.000Z | leaderboard_service/leaderboard_service/settings.py | AVatch/leaderboard-service | 9b70e24866fe862ba5d71dc3404e123303325431 | [
"Apache-2.0"
] | null | null | null | leaderboard_service/leaderboard_service/settings.py | AVatch/leaderboard-service | 9b70e24866fe862ba5d71dc3404e123303325431 | [
"Apache-2.0"
] | null | null | null | """
Django settings for leaderboard_service project.
Generated by 'django-admin startproject' using Django 1.9.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'y(9k(&i#f11*to()nc^qy9nnokkwg^d(7g1zk9^p8%4!@cz)td'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
CORE_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
THIRD_PARTY_APPS = ['rest_framework', 'rest_framework.authtoken']
APPS = ['leaderboards']
INSTALLED_APPS = CORE_APPS + THIRD_PARTY_APPS + APPS
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'leaderboard_service.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'leaderboard_service.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
| 26.338583 | 91 | 0.704933 |
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = 'y(9k(&i#f11*to()nc^qy9nnokkwg^d(7g1zk9^p8%4!@cz)td'
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
CORE_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
THIRD_PARTY_APPS = ['rest_framework', 'rest_framework.authtoken']
APPS = ['leaderboards']
INSTALLED_APPS = CORE_APPS + THIRD_PARTY_APPS + APPS
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'leaderboard_service.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'leaderboard_service.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
| true | true |
1c459747e39517110330c01929492f60ac06c5aa | 503 | py | Python | vqseg/wordseg_algorithms.py | janinerugayan/VectorQuantizedCPC | b4e9fe6aeebca3b792ab604a770e8c3e289a46a1 | [
"MIT"
] | null | null | null | vqseg/wordseg_algorithms.py | janinerugayan/VectorQuantizedCPC | b4e9fe6aeebca3b792ab604a770e8c3e289a46a1 | [
"MIT"
] | null | null | null | vqseg/wordseg_algorithms.py | janinerugayan/VectorQuantizedCPC | b4e9fe6aeebca3b792ab604a770e8c3e289a46a1 | [
"MIT"
] | null | null | null | """
Word segmentation algorithms.
Author: Herman Kamper
Contact: kamperh@gmail.com
Date: 2020
"""
from wordseg.algos import tp, puddle, dpseg, baseline, dibs, ag
import wordseg.algos
def ag(utterance_list, **kwargs):
return list(wordseg.algos.ag.segment(utterance_list, **kwargs))
def tp(utterance_list, **kwargs):
return list(wordseg.algos.tp.segment(utterance_list, **kwargs))
def dpseg(utterance_list, **kwargs):
return list(wordseg.algos.dpseg.segment(utterance_list, **kwargs))
| 21.869565 | 70 | 0.745527 |
from wordseg.algos import tp, puddle, dpseg, baseline, dibs, ag
import wordseg.algos
def ag(utterance_list, **kwargs):
return list(wordseg.algos.ag.segment(utterance_list, **kwargs))
def tp(utterance_list, **kwargs):
return list(wordseg.algos.tp.segment(utterance_list, **kwargs))
def dpseg(utterance_list, **kwargs):
return list(wordseg.algos.dpseg.segment(utterance_list, **kwargs))
| true | true |
1c45984c4c6ee38da52bda0420ddc998d5a7f5a2 | 2,024 | py | Python | tests/test_git.py | igorbernstein2/synthtool | 6b33cffb4301c3f05cc6976fff0022d98b47772f | [
"Apache-2.0"
] | null | null | null | tests/test_git.py | igorbernstein2/synthtool | 6b33cffb4301c3f05cc6976fff0022d98b47772f | [
"Apache-2.0"
] | null | null | null | tests/test_git.py | igorbernstein2/synthtool | 6b33cffb4301c3f05cc6976fff0022d98b47772f | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import mock
import pytest
from synthtool.sources import git
def test_make_repo_clone_url(monkeypatch):
monkeypatch.setattr(git, "USE_SSH", True)
assert (
git.make_repo_clone_url("theacodes/nox") == "git@github.com:theacodes/nox.git"
)
def test_make_repo_clone_url_https(monkeypatch):
monkeypatch.setattr(git, "USE_SSH", False)
assert (
git.make_repo_clone_url("theacodes/nox")
== "https://github.com/theacodes/nox.git"
)
@pytest.mark.parametrize(
("input, expected"),
[
("git@github.com:theacodes/nox.git", {"owner": "theacodes", "name": "nox"}),
("https://github.com/theacodes/nox.git", {"owner": "theacodes", "name": "nox"}),
("theacodes/nox", {"owner": "theacodes", "name": "nox"}),
("theacodes/nox.git", {"owner": "theacodes", "name": "nox"}),
],
)
def test_parse_repo_url(input, expected):
assert git.parse_repo_url(input) == expected
@mock.patch("subprocess.check_output", autospec=True)
def test_get_latest_commit(check_call):
check_call.return_value = b"abc123\ncommit\nmessage."
sha, message = git.get_latest_commit()
assert sha == "abc123"
assert message == "commit\nmessage."
def test_extract_commit_message_metadata():
message = """\
Hello, world!
One: Hello!
Two: 1234
"""
metadata = git.extract_commit_message_metadata(message)
assert metadata == {"One": "Hello!", "Two": "1234"}
| 28.914286 | 88 | 0.6917 |
from unittest import mock
import pytest
from synthtool.sources import git
def test_make_repo_clone_url(monkeypatch):
monkeypatch.setattr(git, "USE_SSH", True)
assert (
git.make_repo_clone_url("theacodes/nox") == "git@github.com:theacodes/nox.git"
)
def test_make_repo_clone_url_https(monkeypatch):
monkeypatch.setattr(git, "USE_SSH", False)
assert (
git.make_repo_clone_url("theacodes/nox")
== "https://github.com/theacodes/nox.git"
)
@pytest.mark.parametrize(
("input, expected"),
[
("git@github.com:theacodes/nox.git", {"owner": "theacodes", "name": "nox"}),
("https://github.com/theacodes/nox.git", {"owner": "theacodes", "name": "nox"}),
("theacodes/nox", {"owner": "theacodes", "name": "nox"}),
("theacodes/nox.git", {"owner": "theacodes", "name": "nox"}),
],
)
def test_parse_repo_url(input, expected):
assert git.parse_repo_url(input) == expected
@mock.patch("subprocess.check_output", autospec=True)
def test_get_latest_commit(check_call):
check_call.return_value = b"abc123\ncommit\nmessage."
sha, message = git.get_latest_commit()
assert sha == "abc123"
assert message == "commit\nmessage."
def test_extract_commit_message_metadata():
message = """\
Hello, world!
One: Hello!
Two: 1234
"""
metadata = git.extract_commit_message_metadata(message)
assert metadata == {"One": "Hello!", "Two": "1234"}
| true | true |
1c45988afdd14740a571c6b781a72451a6d25636 | 3,162 | py | Python | dispel4py/seismo/obspy_stream.py | AndreiFrunze/wrangler | 076a07de00fc966dcf18ca6b6a6e804be5245ed9 | [
"Apache-2.0"
] | 2 | 2017-09-07T04:33:18.000Z | 2019-01-07T13:32:15.000Z | dispel4py/seismo/obspy_stream.py | AndreiFrunze/wrangler | 076a07de00fc966dcf18ca6b6a6e804be5245ed9 | [
"Apache-2.0"
] | 2 | 2016-10-06T13:07:05.000Z | 2017-12-20T09:47:08.000Z | dispel4py/seismo/obspy_stream.py | AndreiFrunze/wrangler | 076a07de00fc966dcf18ca6b6a6e804be5245ed9 | [
"Apache-2.0"
] | 5 | 2016-09-01T08:38:20.000Z | 2018-08-28T12:08:39.000Z | from dispel4py.seismo.seismo import SeismoPE
import traceback
INPUT_NAME = 'input'
OUTPUT_NAME = 'output'
class ObspyStreamPE(SeismoPE):
'''
A SeismoPE that calls a function to process an input stream.
'''
def __init__(self):
SeismoPE.__init__(self)
def setCompute(self, compute_fn, params={}):
'''
Define the compute function that this PE uses for processing input streams, and any input parameters for the function.
The function must have at least one input, an obspy stream, and can accept more input parameters that must be provided
before the PE is executed.
'''
self.compute_fn = compute_fn, dict(params)
def setInputTypes(self, types):
self.inout_types = { OUTPUT_NAME : types[INPUT_NAME] }
def getOutputTypes(self):
# output = input
return self.inout_types
def compute(self):
'''
Calls the processing function with the given parameters and one input stream.
'''
try:
try:
func, params = self.compute_fn
except TypeError:
func = self.compute_fn
params = {}
output = func(self, self.st, **params)
self.outputstreams.append(output)
except:
self.log(traceback.format_exc())
self.error+=traceback.format_exc()
self.log("Failed to execute function '%s' with parameters %s" % (func.__name__, params))
from dispel4py.workflow_graph import WorkflowGraph
def createProcessingComposite(chain, suffix='', controlParameters={}, provRecorder=None):
'''
Creates a composite PE wrapping a pipeline that processes obspy streams.
:param chain: list of functions that process obspy streams. The function takes one input parameter, stream, and returns an output stream.
:param requestId: id of the request that the stream is associated with
:param controlParameters: environment parameters for the processing elements
:rtype: dictionary inputs and outputs of the composite PE that was created
'''
prev = None
first = None
graph = WorkflowGraph()
for fn_desc in chain:
pe = ObspyStreamPE()
try:
fn = fn_desc[0]
params = fn_desc[1]
except TypeError:
fn = fn_desc
params = {}
pe.compute_fn = fn
pe.name = 'ObspyStreamPE_' + fn.__name__ + suffix
pe.controlParameters = controlParameters
pe.appParameters = dict(params)
pe.setCompute(fn, params)
# connect the metadata output to the provenance recorder PE if there is one
if provRecorder:
graph.connect(pe, 'metadata', provRecorder, 'metadata')
if prev:
graph.connect(prev, OUTPUT_NAME, pe, INPUT_NAME)
else:
first = pe
prev = pe
# Map inputs and outputs of the wrapper to the nodes in the subgraph
graph.inputmappings = { 'input' : (first, INPUT_NAME) }
graph.outputmappings = { 'output' : (prev, OUTPUT_NAME) }
return graph
| 34.747253 | 141 | 0.624921 | from dispel4py.seismo.seismo import SeismoPE
import traceback
INPUT_NAME = 'input'
OUTPUT_NAME = 'output'
class ObspyStreamPE(SeismoPE):
def __init__(self):
SeismoPE.__init__(self)
def setCompute(self, compute_fn, params={}):
self.compute_fn = compute_fn, dict(params)
def setInputTypes(self, types):
self.inout_types = { OUTPUT_NAME : types[INPUT_NAME] }
def getOutputTypes(self):
return self.inout_types
def compute(self):
try:
try:
func, params = self.compute_fn
except TypeError:
func = self.compute_fn
params = {}
output = func(self, self.st, **params)
self.outputstreams.append(output)
except:
self.log(traceback.format_exc())
self.error+=traceback.format_exc()
self.log("Failed to execute function '%s' with parameters %s" % (func.__name__, params))
from dispel4py.workflow_graph import WorkflowGraph
def createProcessingComposite(chain, suffix='', controlParameters={}, provRecorder=None):
prev = None
first = None
graph = WorkflowGraph()
for fn_desc in chain:
pe = ObspyStreamPE()
try:
fn = fn_desc[0]
params = fn_desc[1]
except TypeError:
fn = fn_desc
params = {}
pe.compute_fn = fn
pe.name = 'ObspyStreamPE_' + fn.__name__ + suffix
pe.controlParameters = controlParameters
pe.appParameters = dict(params)
pe.setCompute(fn, params)
if provRecorder:
graph.connect(pe, 'metadata', provRecorder, 'metadata')
if prev:
graph.connect(prev, OUTPUT_NAME, pe, INPUT_NAME)
else:
first = pe
prev = pe
graph.inputmappings = { 'input' : (first, INPUT_NAME) }
graph.outputmappings = { 'output' : (prev, OUTPUT_NAME) }
return graph
| true | true |
1c4598e6f314bfee7c1a31680ad93afaa47b3067 | 4,132 | py | Python | plugins/samanage/komand_samanage/actions/list_users/action.py | lukaszlaszuk/insightconnect-plugins | 8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892 | [
"MIT"
] | 46 | 2019-06-05T20:47:58.000Z | 2022-03-29T10:18:01.000Z | plugins/samanage/komand_samanage/actions/list_users/action.py | lukaszlaszuk/insightconnect-plugins | 8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892 | [
"MIT"
] | 386 | 2019-06-07T20:20:39.000Z | 2022-03-30T17:35:01.000Z | plugins/samanage/komand_samanage/actions/list_users/action.py | lukaszlaszuk/insightconnect-plugins | 8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892 | [
"MIT"
] | 43 | 2019-07-09T14:13:58.000Z | 2022-03-28T12:04:46.000Z | import komand
from .schema import ListUsersInput, ListUsersOutput
# Custom imports below
class ListUsers(komand.Action):
def __init__(self):
super(self.__class__, self).__init__(
name="list_users",
description="List all users",
input=ListUsersInput(),
output=ListUsersOutput(),
)
def run(self, params={}):
return {"users": self.connection.api.list_users()}
def test(self):
return {
"users": [
{
"id": 4245115,
"name": "Anon",
"disabled": False,
"email": "123@service.hmail.eu",
"created_at": "2018-11-22T08:13:00.000-05:00",
"role": {
"id": 461180,
"name": "Requester",
"description": "Requester role to view and submit service request.",
"portal": True,
"show_my_tasks": False,
},
"salt": "04f20390ecf0c97571167c6c3350782663b6a7e0",
"group_ids": [4492327],
"custom_fields_values": [],
"avatar": {"type": "initials", "color": "#dfcd00", "initials": "AN"},
"mfa_enabled": False,
},
{
"id": 4244043,
"name": "Tom",
"disabled": False,
"title": "Panic",
"email": "20180913dp@gmail.com",
"created_at": "2018-11-21T12:28:31.000-05:00",
"phone": "12345678",
"mobile_phone": "87654321",
"department": {
"id": 133361,
"name": "Information Technology",
"default_assignee_id": 4485265,
},
"role": {
"id": 461179,
"name": "Service Agent User",
"description": "Almost like an administrator but no access to setup.",
"portal": False,
"show_my_tasks": False,
},
"salt": "b3e360e65de5b592ce1ff92e1d90acedbaddbcf7",
"group_ids": [4491226],
"custom_fields_values": [],
"avatar": {"type": "initials", "color": "#dfcd00", "initials": "TO"},
"mfa_enabled": False,
"reports_to": {
"id": 4485266,
"name": "Helpdesk",
"disabled": False,
"is_user": False,
"reports_to": {"id": -1, "href": "https://api.samanage.com/groups/-1.json"},
"avatar": {"type": "group", "color": "#0bc46f"},
},
"site": {"id": 96691, "name": "Headquarters", "location": "Main Office"},
},
{
"id": 4238379,
"name": "WW WW",
"disabled": False,
"email": "wwww@service.hmail.eu",
"created_at": "2018-11-20T05:29:00.000-05:00",
"last_login": "2018-11-21T17:20:46.000-05:00",
"phone": "+37254312367",
"role": {
"id": 461178,
"name": "Administrator",
"description": "This is the all powerful administrator user!",
"portal": False,
"show_my_tasks": False,
},
"salt": "7e2c35f51cc6ccdf727f7e48bc42403adbf6534d",
"group_ids": [4485265, 4485266],
"custom_fields_values": [],
"avatar": {"type": "initials", "color": "#dfcd00", "initials": "WW"},
"mfa_enabled": False,
},
]
}
| 41.32 | 100 | 0.393272 | import komand
from .schema import ListUsersInput, ListUsersOutput
class ListUsers(komand.Action):
def __init__(self):
super(self.__class__, self).__init__(
name="list_users",
description="List all users",
input=ListUsersInput(),
output=ListUsersOutput(),
)
def run(self, params={}):
return {"users": self.connection.api.list_users()}
def test(self):
return {
"users": [
{
"id": 4245115,
"name": "Anon",
"disabled": False,
"email": "123@service.hmail.eu",
"created_at": "2018-11-22T08:13:00.000-05:00",
"role": {
"id": 461180,
"name": "Requester",
"description": "Requester role to view and submit service request.",
"portal": True,
"show_my_tasks": False,
},
"salt": "04f20390ecf0c97571167c6c3350782663b6a7e0",
"group_ids": [4492327],
"custom_fields_values": [],
"avatar": {"type": "initials", "color": "#dfcd00", "initials": "AN"},
"mfa_enabled": False,
},
{
"id": 4244043,
"name": "Tom",
"disabled": False,
"title": "Panic",
"email": "20180913dp@gmail.com",
"created_at": "2018-11-21T12:28:31.000-05:00",
"phone": "12345678",
"mobile_phone": "87654321",
"department": {
"id": 133361,
"name": "Information Technology",
"default_assignee_id": 4485265,
},
"role": {
"id": 461179,
"name": "Service Agent User",
"description": "Almost like an administrator but no access to setup.",
"portal": False,
"show_my_tasks": False,
},
"salt": "b3e360e65de5b592ce1ff92e1d90acedbaddbcf7",
"group_ids": [4491226],
"custom_fields_values": [],
"avatar": {"type": "initials", "color": "#dfcd00", "initials": "TO"},
"mfa_enabled": False,
"reports_to": {
"id": 4485266,
"name": "Helpdesk",
"disabled": False,
"is_user": False,
"reports_to": {"id": -1, "href": "https://api.samanage.com/groups/-1.json"},
"avatar": {"type": "group", "color": "#0bc46f"},
},
"site": {"id": 96691, "name": "Headquarters", "location": "Main Office"},
},
{
"id": 4238379,
"name": "WW WW",
"disabled": False,
"email": "wwww@service.hmail.eu",
"created_at": "2018-11-20T05:29:00.000-05:00",
"last_login": "2018-11-21T17:20:46.000-05:00",
"phone": "+37254312367",
"role": {
"id": 461178,
"name": "Administrator",
"description": "This is the all powerful administrator user!",
"portal": False,
"show_my_tasks": False,
},
"salt": "7e2c35f51cc6ccdf727f7e48bc42403adbf6534d",
"group_ids": [4485265, 4485266],
"custom_fields_values": [],
"avatar": {"type": "initials", "color": "#dfcd00", "initials": "WW"},
"mfa_enabled": False,
},
]
}
| true | true |
1c4598f31962fb4914c01183dfd2b5367f20731a | 136 | py | Python | al_phonebook/types.py | vtrvtr/al_phonebook | 7bcdb7fa0323c873c523036da99b4b1616c0e00e | [
"MIT"
] | null | null | null | al_phonebook/types.py | vtrvtr/al_phonebook | 7bcdb7fa0323c873c523036da99b4b1616c0e00e | [
"MIT"
] | 1 | 2022-01-17T14:45:50.000Z | 2022-01-17T14:45:51.000Z | al_phonebook/types.py | vtrvtr/al_phonebook | 7bcdb7fa0323c873c523036da99b4b1616c0e00e | [
"MIT"
] | null | null | null | from typing import Any, Union
import os
DictItem = dict[str, Any]
OptionalDictItem = DictItem | None
PathLike = Union[os.PathLike, str] | 22.666667 | 34 | 0.764706 | from typing import Any, Union
import os
DictItem = dict[str, Any]
OptionalDictItem = DictItem | None
PathLike = Union[os.PathLike, str] | true | true |
1c459b1eb973ce00d988425faa2a536d4bd861cd | 744 | py | Python | dm_control/composer/constants.py | h8907283/dm_control | fe4449606742a7b8bec81930790b98244cddc538 | [
"Apache-2.0"
] | 2,863 | 2018-01-03T01:38:52.000Z | 2022-03-30T09:49:50.000Z | dm_control/composer/constants.py | krakhit/dm_control | 4e1a35595124742015ae0c7a829e099a5aa100f5 | [
"Apache-2.0"
] | 266 | 2018-01-03T16:00:04.000Z | 2022-03-26T15:45:48.000Z | dm_control/composer/constants.py | krakhit/dm_control | 4e1a35595124742015ae0c7a829e099a5aa100f5 | [
"Apache-2.0"
] | 580 | 2018-01-03T03:17:27.000Z | 2022-03-31T19:29:32.000Z | # Copyright 2018 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Module defining constant values for Composer."""
SENSOR_SITES_GROUP = 4
| 37.2 | 78 | 0.681452 |
SENSOR_SITES_GROUP = 4
| true | true |
1c459b5a3be59498565c981523bb698670abd0ef | 255 | py | Python | manage.py | justsostephen/track | b1749f7db664d76fab0c501c23f0d0705cc95fce | [
"MIT"
] | null | null | null | manage.py | justsostephen/track | b1749f7db664d76fab0c501c23f0d0705cc95fce | [
"MIT"
] | null | null | null | manage.py | justsostephen/track | b1749f7db664d76fab0c501c23f0d0705cc95fce | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "stockcontrol.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| 23.181818 | 76 | 0.776471 |
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "stockcontrol.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| true | true |
1c459b9e9ba258120e841df624eb7641c3121e90 | 4,297 | py | Python | aiograph/utils/html.py | fakegit/aiograph | a00aacebb04c1e743055ba524b978a06027e31ed | [
"MIT"
] | 45 | 2018-05-05T12:31:43.000Z | 2022-03-23T11:20:03.000Z | aiographfix/utils/html.py | Yyonging/aiograph | 78d291f9e1157720c949e336a9aa2711ad707285 | [
"MIT"
] | 6 | 2019-03-04T11:23:49.000Z | 2022-03-30T11:25:46.000Z | aiographfix/utils/html.py | Yyonging/aiograph | 78d291f9e1157720c949e336a9aa2711ad707285 | [
"MIT"
] | 16 | 2019-02-22T19:10:19.000Z | 2021-09-15T22:12:55.000Z | from html import escape
from html.entities import name2codepoint
from html.parser import HTMLParser
from typing import List, Union
import attr
from ..types import NodeElement
ALLOWED_TAGS = [
'a', 'aside', 'b', 'blockquote', 'br', 'code', 'em', 'figcaption', 'figure',
'h3', 'h4', 'hr', 'i', 'iframe', 'img', 'li', 'ol', 'p', 'pre', 's',
'strong', 'u', 'ul', 'video'
]
VOID_ELEMENTS = {
'area', 'base', 'br', 'col', 'embed', 'hr', 'img', 'input', 'keygen',
'link', 'menuitem', 'meta', 'param', 'source', 'track', 'wbr'
}
ALLOWED_ATTRS = ['href', 'src']
def node_to_html(node: Union[str, NodeElement, list]) -> str:
"""
Convert Nodes to HTML
:param node:
:return:
"""
if isinstance(node, str): # Text
return escape(node)
elif isinstance(node, list): # List of nodes
result = ''
for child_node in node:
result += node_to_html(child_node)
return result
elif not isinstance(node, NodeElement):
raise TypeError(f"Node must be instance of str or NodeElement, not {type(node)}")
# NodeElement
# Open
result = "<" + node.tag
if node.attrs:
result += ' ' + ' '.join(f"{k}=\"{v}\"" for k, v in node.attrs.items())
if node.tag in VOID_ELEMENTS: # Close void element
result += '/>'
else:
result += '>'
for child_node in node.children: # Container body
result += node_to_html(child_node)
result += '</' + node.tag + '>' # Close tag
return result
def html_to_nodes(html_content: str) -> List[Union[str, NodeElement]]:
"""
Convert HTML code to Nodes
:param html_content:
:return:
"""
parser = HtmlToNodesParser()
parser.feed(html_content)
return parser.get_nodes()
def _node_converter_filter(attribute, value) -> bool:
return bool(value)
def nodes_to_json(nodes: List[Union[str, NodeElement]]) -> List[Union[str, dict]]:
"""
Convert Nodes to JSON
:param nodes:
:return:
"""
result = []
for node in nodes:
if isinstance(node, str):
result.append(node)
elif isinstance(node, NodeElement):
result.append(attr.asdict(node, filter=_node_converter_filter))
return result
def html_to_json(content: str) -> List[Union[str, dict]]:
"""
Convert HTML to JSON
:param content:
:return:
"""
return nodes_to_json(html_to_nodes(content))
class HtmlToNodesParser(HTMLParser):
def __init__(self):
super(HtmlToNodesParser, self).__init__()
self.current_nodes = []
self.parent_nodes = []
def error(self, message):
raise ValueError(message)
def add_str_node(self, s):
if self.current_nodes and isinstance(self.current_nodes[-1], str):
self.current_nodes[-1] += s
else:
self.current_nodes.append(s)
def handle_starttag(self, tag, attrs_list):
if tag not in ALLOWED_TAGS:
self.error(f"{tag} tag is not allowed")
node = NodeElement(tag=tag)
if attrs_list:
for attr, value in attrs_list:
node.attrs[attr] = value
self.current_nodes.append(node)
if tag not in VOID_ELEMENTS:
self.parent_nodes.append(self.current_nodes)
self.current_nodes = node.children = []
def handle_endtag(self, tag):
if tag in VOID_ELEMENTS:
return
self.current_nodes = self.parent_nodes.pop()
last_node = self.current_nodes[-1]
if last_node.tag != tag:
self.error(f"\"{tag}\" tag closed instead of \"{last_node.tag}\"")
if not last_node.children:
last_node.children.clear()
def handle_data(self, data):
self.add_str_node(data)
def handle_entityref(self, name):
self.add_str_node(chr(name2codepoint[name]))
def handle_charref(self, name):
if name.startswith('x'):
c = chr(int(name[1:], 16))
else:
c = chr(int(name))
self.add_str_node(c)
def get_nodes(self):
if self.parent_nodes:
not_closed_tag = self.parent_nodes[-1][-1].tag
self.error(f"\"{not_closed_tag}\" tag is not closed")
return self.current_nodes
| 25.577381 | 89 | 0.594601 | from html import escape
from html.entities import name2codepoint
from html.parser import HTMLParser
from typing import List, Union
import attr
from ..types import NodeElement
ALLOWED_TAGS = [
'a', 'aside', 'b', 'blockquote', 'br', 'code', 'em', 'figcaption', 'figure',
'h3', 'h4', 'hr', 'i', 'iframe', 'img', 'li', 'ol', 'p', 'pre', 's',
'strong', 'u', 'ul', 'video'
]
VOID_ELEMENTS = {
'area', 'base', 'br', 'col', 'embed', 'hr', 'img', 'input', 'keygen',
'link', 'menuitem', 'meta', 'param', 'source', 'track', 'wbr'
}
ALLOWED_ATTRS = ['href', 'src']
def node_to_html(node: Union[str, NodeElement, list]) -> str:
if isinstance(node, str):
return escape(node)
elif isinstance(node, list):
result = ''
for child_node in node:
result += node_to_html(child_node)
return result
elif not isinstance(node, NodeElement):
raise TypeError(f"Node must be instance of str or NodeElement, not {type(node)}")
result = "<" + node.tag
if node.attrs:
result += ' ' + ' '.join(f"{k}=\"{v}\"" for k, v in node.attrs.items())
if node.tag in VOID_ELEMENTS:
result += '/>'
else:
result += '>'
for child_node in node.children:
result += node_to_html(child_node)
result += '</' + node.tag + '>'
return result
def html_to_nodes(html_content: str) -> List[Union[str, NodeElement]]:
parser = HtmlToNodesParser()
parser.feed(html_content)
return parser.get_nodes()
def _node_converter_filter(attribute, value) -> bool:
return bool(value)
def nodes_to_json(nodes: List[Union[str, NodeElement]]) -> List[Union[str, dict]]:
result = []
for node in nodes:
if isinstance(node, str):
result.append(node)
elif isinstance(node, NodeElement):
result.append(attr.asdict(node, filter=_node_converter_filter))
return result
def html_to_json(content: str) -> List[Union[str, dict]]:
return nodes_to_json(html_to_nodes(content))
class HtmlToNodesParser(HTMLParser):
def __init__(self):
super(HtmlToNodesParser, self).__init__()
self.current_nodes = []
self.parent_nodes = []
def error(self, message):
raise ValueError(message)
def add_str_node(self, s):
if self.current_nodes and isinstance(self.current_nodes[-1], str):
self.current_nodes[-1] += s
else:
self.current_nodes.append(s)
def handle_starttag(self, tag, attrs_list):
if tag not in ALLOWED_TAGS:
self.error(f"{tag} tag is not allowed")
node = NodeElement(tag=tag)
if attrs_list:
for attr, value in attrs_list:
node.attrs[attr] = value
self.current_nodes.append(node)
if tag not in VOID_ELEMENTS:
self.parent_nodes.append(self.current_nodes)
self.current_nodes = node.children = []
def handle_endtag(self, tag):
if tag in VOID_ELEMENTS:
return
self.current_nodes = self.parent_nodes.pop()
last_node = self.current_nodes[-1]
if last_node.tag != tag:
self.error(f"\"{tag}\" tag closed instead of \"{last_node.tag}\"")
if not last_node.children:
last_node.children.clear()
def handle_data(self, data):
self.add_str_node(data)
def handle_entityref(self, name):
self.add_str_node(chr(name2codepoint[name]))
def handle_charref(self, name):
if name.startswith('x'):
c = chr(int(name[1:], 16))
else:
c = chr(int(name))
self.add_str_node(c)
def get_nodes(self):
if self.parent_nodes:
not_closed_tag = self.parent_nodes[-1][-1].tag
self.error(f"\"{not_closed_tag}\" tag is not closed")
return self.current_nodes
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.