repo_name stringlengths 7 65 | path stringlengths 5 185 | copies stringlengths 1 4 | size stringlengths 4 6 | content stringlengths 977 990k | license stringclasses 14 values | hash stringlengths 32 32 | line_mean float64 7.18 99.4 | line_max int64 31 999 | alpha_frac float64 0.25 0.95 | ratio float64 1.5 7.84 | autogenerated bool 1 class | config_or_test bool 2 classes | has_no_keywords bool 2 classes | has_few_assignments bool 1 class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
poliastro/poliastro | contrib/cr3bp_DhruvJ/example_L4_vertical_orb_fam_palc.py | 1 | 2671 | """
@author: Dhruv Jain, Multi-Body Dynamics Research Group, Purdue University
dhruvj9922@gmail.com
Obj: To compute family of L2 Vertical Orbit
Single Shooter Variabl Time Setup
1. Continue in 'x' + XZ plane symmetry and X-axis symmetry use => targets Period/4 states
2. PALC+ XZ plane symmetry and X-axis symmetry use => targets Period/4 states
Initial Condition obtained from:
D. Grebow, "Generating Periodic Orbits in the Circular Restricted Three-Body Problem with Applications to Lunar South Pole Coverage," M.S., May 2006.
"""
import numpy as np
import plotly.graph_objs as go
from cr3bp_char_quant import sys_chars
from cr3bp_lib_calc import lib_pt_loc
from cr3bp_po_fam_continuation import periodic_orbit_fam_continuation
from cr3bp_po_plot_orbits import plot_orbits
sys_p1p2 = sys_chars("Earth", "Moon")
lib_loc = lib_pt_loc(sys_p1p2)
li = lib_loc[1, :] # 0 for L1 and 1 for L2
# From D. Grebow
ig = np.array([1.0842, 0, 0, 0, -0.5417, 0.8415])
tf_guess = 6.1305
orbit_results = []
free_vars = ["x", "vy", "vz", "t"]
constraints = ["y", "vx", "vz"]
orb_fam_obj = periodic_orbit_fam_continuation(sys_p1p2, ig, tf=tf_guess)
# Target Vertical orbit using Single Shooter Variable Time setup
# Exploits XZ plane symmetry and X-axis symmetry(sym_perioid_targ set to 1/4)
# Continue in 'x' using Natural Paramter Continuaton to compute 20 family members
orb_fam_obj.npc_po_fam(
free_vars,
constraints,
sym_period_targ=1 / 4,
Nmax=20,
step_size=-1e-4,
num_fam_members=3,
param_continue="x",
line_search=True,
)
"""
PALC
"""
# Target Vertical orbit using Single Shooter Variable Time setup
# Exploits Periodcity(sym_perioid_targ set to 1)
# Continue in 'x' using Natural Paramter Continuaton to compute 20 family members
orb_fam_obj.palc_po_fam(
free_vars,
constraints,
sym_period_targ=1 / 4,
step_size=1e-1 * 3,
num_fam_members=10,
line_search=True,
)
"""
Plot family
"""
# if targeted_po_char != None:
colourby = orb_fam_obj.targeted_po_char["jc"]
colourmap = "plasma"
cb_label = "JC"
title = "EM_L2_Vertical_family_PALC"
data_trace = []
# Add L2
data_trace.append(
go.Scatter3d(x=[li[0]], y=[0], z=[0], marker=dict(color="red", size=2))
)
# Add Earth
data_trace.append(
go.Scatter3d(
x=[-sys_p1p2.mu], y=[0], z=[0], marker=dict(color="blue", size=10)
)
)
data_trace.append(
go.Scatter3d(
x=[1 - sys_p1p2.mu], y=[0], z=[0], marker=dict(color="grey", size=7)
)
)
plot_orbits(
sys_p1p2.mu,
orb_fam_obj.targeted_po_fam,
colourby,
cb_label,
title=title,
data_trace=data_trace,
save=False,
)
| mit | 9272f87aed140d3abaf9c7fe1e004b0a | 26.255102 | 149 | 0.670535 | 2.700708 | false | false | false | false |
poliastro/poliastro | src/poliastro/core/earth_atmosphere/jacchia.py | 1 | 6526 | """Low-level calculations for the Jacchia77 atmospheric model.
Given an exospheric temperature, Jacchia77 returns model
atmospheric altitude profiles of temperature, the number
densities of N2, O2, O, Ar, He, H, the sum thereof, and the
molecular weight.
For altitudes of 90 km and above, we use the 1977 model of
Jacchia [Ja77]. H-atom densities are returned as non-zero
for altitudes of 150 km and above if the maximum altitude
requested is 500 km or more.
REFERENCES:
Ja77 L. G. Jacchia, "Thermospheric Temperature, Density
and Composition: New Models," SAO Special Report No.
375 (Smithsonian Institution Astrophysical
Observatory, Cambridge, MA, March 15, 1977).
Fortran Implementation:
https://ccmc.gsfc.nasa.gov/pub/modelweb/atmospheric/jacchia/jacchia-77/
"""
import numpy as np
from numba import njit as jit
# Following constants have been taken from the fortran implementation
pi2 = np.pi / 2
wm0 = 28.96
wmN2 = 28.0134
wmO2 = 31.9988
wmO = 15.9994
wmAr = 39.948
wmHe = 4.0026
wmH = 1.0079
qN2 = 0.78110
qO2 = 0.20955
qAr = 0.009343
qHe = 0.000005242
R0 = 6356.766
R = 8314.32 # Units: u.J / (u.kg * u.mol)
@jit
def _O_and_O2_correction(alt, Texo, Z, CN2, CO2, CO, CAr, CHe, CH, CM, WM):
for iz in range(90, alt):
CO2[iz] = CO2[iz] * (
10.0 ** (-0.07 * (1.0 + np.tanh(0.18 * (Z[iz] - 111.0))))
)
CO[iz] = CO[iz] * (
10.0 ** (-0.24 * np.exp(-0.009 * (Z[iz] - 97.7) ** 2))
)
CM[iz] = CN2[iz] + CO2[iz] + CO[iz] + CAr[iz] + CHe[iz] + CH[iz]
WM[iz] = (
wmN2 * CN2[iz]
+ wmO2 * CO2[iz]
+ wmO * CO[iz]
+ wmAr * CAr[iz]
+ wmHe * CHe[iz]
+ wmH * CH[iz]
) / CM[iz]
@jit
def _H_correction(alt, Texo, x, y, Z, CN2, CO2, CO, CAr, CHe, CH, CM, WM, T):
phid00 = 10.0 ** (6.9 + 28.9 * Texo ** (-0.25)) / 2.0e20
phid00 = phid00 * 5.24e2
H_500 = 10.0 ** (-0.06 + 28.9 * Texo ** (-0.25))
# print(alt)
for iz in range(150, alt):
phid0 = phid00 / np.sqrt(T[iz])
WM[iz] = wmH * 0.5897446 * ((1.0 + Z[iz] / R0) ** (-2)) / T[iz] + phid0
CM[iz] = CM[iz] * phid0
y = WM[150]
WM[150] = 0
for iz in range(151, alt):
x = WM[iz - 1] + (y + WM[iz])
y = WM[iz]
WM[iz] = x
for iz in range(150, alt):
WM[iz] = np.exp(WM[iz]) * (T[iz] / T[150]) ** 0.75
CM[iz] = WM[iz] * CM[iz]
y = CM[150]
CM[150] = 0
for iz in range(151, alt):
x = CM[iz - 1] + 0.5 * (y + CM[iz])
y = CM[iz]
CM[iz] = x
for iz in range(150, alt):
CH[iz] = (WM[500] / WM[iz]) * (H_500 - (CM[iz] - CM[500]))
for iz in range(150, alt):
CM[iz] = CN2[iz] + CO2[iz] + CO[iz] + CAr[iz] + CHe[iz] + CH[iz]
WM[iz] = (
wmN2 * CN2[iz]
+ wmO2 * CO2[iz]
+ wmO * CO[iz]
+ wmAr * CAr[iz]
+ wmHe * CHe[iz]
+ wmH * CH[iz]
) / CM[iz]
@jit
def _altitude_profile(alt, Texo, x, y, E5M, E6P):
# Raise Value Error if alt < 90 km or alt > 2500 km.
if alt < 90 or 2500 < alt:
raise ValueError(
"Jacchia77 has been implemented in range 90km - 2500km."
)
alt = int(
alt + 1
) # in fortran the upper limits are included. in python are not.
Texo = int(Texo)
Z = [0.0 for _ in range(alt)]
T = [0.0 for _ in range(alt)]
CN2 = [0.0 for _ in range(alt)]
CO2 = [0.0 for _ in range(alt)]
CO = [0.0 for _ in range(alt)]
CAr = [0.0 for _ in range(alt)]
CHe = [0.0 for _ in range(alt)]
CH = [0.0 for _ in range(alt)]
CM = [0.0 for _ in range(alt)]
WM = [0.0 for _ in range(alt)]
for iz in range(90, alt):
Z[iz] = iz
CH[iz] = 0
if iz <= 90:
T[iz] = 188
elif Texo < 188.1:
T[iz] = 188
else:
x = 0.0045 * (Texo - 188.0)
Tx = 188 + 110.5 * np.log(x + np.sqrt(x * x + 1))
Gx = pi2 * 1.9 * (Tx - 188.0) / (125.0 - 90.0)
if iz <= 125:
T[iz] = Tx + ((Tx - 188.0) / pi2) * np.arctan(
(Gx / (Tx - 188.0))
* (Z[iz] - 125.0)
* (1.0 + 1.7 * ((Z[iz] - 125.0) / (Z[iz] - 90.0)) ** 2)
)
else:
T[iz] = Tx + ((Texo - Tx) / pi2) * np.arctan(
(Gx / (Texo - Tx))
* (Z[iz] - 125.0)
* (1.0 + 5.5e-5 * (Z[iz] - 125.0) ** 2)
)
if iz <= 100:
x = iz - 90
E5M[iz - 90] = 28.89122 + x * (
-2.83071e-2
+ x
* (
-6.59924e-3
+ x * (-3.39574e-4 + x * (+6.19256e-5 + x * (-1.84796e-6)))
)
)
if iz <= 90:
E6P[0] = 7.145e13 * T[90]
else:
G0 = (1 + Z[iz - 1] / R0) ** (-2)
G1 = (1 + Z[iz] / R0) ** (-2)
E6P[iz - 90] = E6P[iz - 91] * np.exp(
-0.5897446
* (
G1 * E5M[iz - 90] / T[iz]
+ G0 * E5M[iz - 91] / T[iz - 1]
)
)
x = E5M[iz - 90] / wm0
y = E6P[iz - 90] / T[iz]
CN2[iz] = qN2 * y * x
CO[iz] = 2.0 * (1.0 - x) * y
CO2[iz] = (x * (1.0 + qO2) - 1.0) * y
CAr[iz] = qAr * y * x
CHe[iz] = qHe * y * x
CH[iz] = 0
else:
G0 = (1 + Z[iz - 1] / R0) ** (-2)
G1 = (1 + Z[iz] / R0) ** (-2)
x = 0.5897446 * (G1 / T[iz] + G0 / T[iz - 1])
y = T[iz - 1] / T[iz]
CN2[iz] = CN2[iz - 1] * y * np.exp(-wmN2 * x)
CO2[iz] = CO2[iz - 1] * y * np.exp(-wmO2 * x)
CO[iz] = CO[iz - 1] * y * np.exp(-wmO * x)
CAr[iz] = CAr[iz - 1] * y * np.exp(-wmAr * x)
CHe[iz] = CHe[iz - 1] * (y**0.62) * np.exp(-wmHe * x)
CH[iz] = 0
_O_and_O2_correction(alt, Texo, Z, CN2, CO2, CO, CAr, CHe, CH, CM, WM)
if 500 <= alt:
_H_correction(
alt, Texo, x, y, Z, CN2, CO2, CO, CAr, CHe, CH, CM, WM, T
)
return (
Z,
T,
np.array(CN2),
np.array(CO2),
np.array(CO),
np.array(CAr),
np.array(CHe),
np.array(CH),
np.array(CM),
WM,
)
| mit | 7317bd65b901925f773506f7c501e4a4 | 28.396396 | 79 | 0.426755 | 2.523589 | false | false | false | false |
poliastro/poliastro | contrib/cr3bp_DhruvJ/example.py | 1 | 1793 | """
Created on Tue Feb 8 22:13:15 2022
@author: Dhruv Jain, Multi-Body Dynamics Research Group, Purdue University
dhruvj9922@gmail.com
This is an exmaple file to test the CR3BP functions
"""
import matplotlib.pyplot as plt
from cr3bp_char_quant import sys_chars
from cr3bp_lib_calc import lib_pt_loc
from cr3bp_model_master import cr3bp_model
sys_p1p2 = sys_chars("Earth", "Moon")
mu = sys_p1p2.mu
print("Earth-Moon mu:", mu)
print("Earth-Moon l*:", sys_p1p2.lstar, "km")
print("Earth-Moon t*:", sys_p1p2.tstar / 86400, "days")
# Calculate the 5 libration points
lib_loc = lib_pt_loc(sys_p1p2)
li = lib_loc[:, :] # 0 for L1 and 1 for L2....
print("Earth-Moon Li:", li)
# Arbitrary state
ic = [1.05903, -0.067492, -0.103524, -0.170109, 0.0960234, -0.135279]
# Propagate the arbitrary state for time = 0 to tf
tf = 10
cr3bp_obj = cr3bp_model(sys_p1p2, ic, tf)
# Compute Jacobi Constant
print("Jacobi constant:", cr3bp_obj.JC())
results = cr3bp_obj.propagate()
# Plot P1, P2, Libration points and configuration space state history of the arbirtray state
pltnum = 1
plt.figure(pltnum)
ax = plt.axes(projection="3d")
ax.set_title("CR3BP EM, trajectory, T = 10[nd], tol = 1e-12")
ax.plot3D(
results["states"][:, 0], results["states"][:, 1], results["states"][:, 2]
)
ax.scatter(
results["states"][0, 0],
results["states"][0, 1],
results["states"][0, 2],
color="black",
label="t=0",
)
ax.scatter(li[:, 0], li[:, 1], li[:, 2], color="red", label="Li")
ax.scatter(-mu, 0, 0, color="blue", label="Earth")
ax.scatter(1 - mu, 0, 0, color="grey", label="Moon")
ax.set_box_aspect(
[ub - lb for lb, ub in (getattr(ax, f"get_{a}lim")() for a in "xyz")]
)
ax.set_ylabel("y [nd]")
ax.set_xlabel("x [nd]")
ax.set_zlabel("z [nd]")
plt.legend()
pltnum = pltnum + 1
| mit | 972bd6f5f3e69a0fb085945e416d36a1 | 27.460317 | 92 | 0.655326 | 2.525352 | false | false | false | false |
poliastro/poliastro | src/poliastro/core/czml_utils.py | 1 | 2708 | import numpy as np
from numba import njit as jit
from poliastro._math.linalg import norm
@jit
def intersection_ellipsoid_line(x, y, z, u1, u2, u3, a, b, c):
"""Intersection of an ellipsoid defined by its axes a, b, c with the
line p + λu.
Parameters
----------
x, y, z : float
A point of the line
u1, u2, u3 : float
The line vector
a, b, c : float
The ellipsoidal axises
Returns
-------
p0, p1: numpy.ndarray
This returns both of the points intersecting the ellipsoid.
"""
# Get rid of one parameter by translating the line's direction vector
k, m = u2 / u1, u3 / u1
t0 = (
-(a**2) * b**2 * m * z
- a**2 * c**2 * k * y
- b**2 * c**2 * x
+ np.sqrt(
a**2
* b**2
* c**2
* (
a**2 * b**2 * m**2
+ a**2 * c**2 * k**2
- a**2 * k**2 * z**2
+ 2 * a**2 * k * m * y * z
- a**2 * m**2 * y**2
+ b**2 * c**2
- b**2 * m**2 * x**2
+ 2 * b**2 * m * x * z
- b**2 * z**2
- c**2 * k**2 * x**2
+ 2 * c**2 * k * x * y
- c**2 * y**2
)
)
) / (a**2 * b**2 * m**2 + a**2 * c**2 * k**2 + b**2 * c**2)
t1 = (
a**2 * b**2 * m * z
+ a**2 * c**2 * k * y
+ b**2 * c**2 * x
+ np.sqrt(
a**2
* b**2
* c**2
* (
a**2 * b**2 * m**2
+ a**2 * c**2 * k**2
- a**2 * k**2 * z**2
+ 2 * a**2 * k * m * y * z
- a**2 * m**2 * y**2
+ b**2 * c**2
- b**2 * m**2 * x**2
+ 2 * b**2 * m * x * z
- b**2 * z**2
- c**2 * k**2 * x**2
+ 2 * c**2 * k * x * y
- c**2 * y**2
)
)
) / (a**2 * b**2 * m**2 + a**2 * c**2 * k**2 + b**2 * c**2)
p0, p1 = np.array([x + t0, y + k * t0, z + m * t0]), np.array(
[x - t1, y - t1 * k, z - t1 * m]
)
return p0, p1
@jit
def project_point_on_ellipsoid(x, y, z, a, b, c):
"""Return the projection of a point on an ellipsoid.
Parameters
----------
x, y, z : float
Cartesian coordinates of point
a, b, c : float
Semi-axes of the ellipsoid
"""
p1, p2 = intersection_ellipsoid_line(x, y, z, x, y, z, a, b, c)
norm_1 = norm(np.array([p1[0] - x, p1[1] - y, p1[2] - z]))
norm_2 = norm(np.array([p2[0] - x, p2[1] - y, p2[2] - z]))
return p1 if norm_1 <= norm_2 else p2
| mit | 5297822c2a5f33be54a21bd9da3cdc50 | 25.80198 | 73 | 0.349095 | 2.840504 | false | false | false | false |
poliastro/poliastro | src/poliastro/frames/equatorial.py | 1 | 4174 | import numpy as np
from astropy import units as u
from astropy.coordinates import (
GCRS as _GCRS,
HCRS as _HCRS,
ICRS as _ICRS,
AffineTransform,
BaseRADecFrame,
CartesianDifferential,
FunctionTransformWithFiniteDifference,
TimeAttribute,
UnitSphericalRepresentation,
frame_transform_graph,
get_body_barycentric,
get_body_barycentric_posvel,
)
from astropy.coordinates.builtin_frames.utils import DEFAULT_OBSTIME
from poliastro.bodies import (
Jupiter,
Mars,
Mercury,
Moon,
Neptune,
Saturn,
Uranus,
Venus,
)
__all__ = [
"ICRS",
"HCRS",
"MercuryICRS",
"VenusICRS",
"GCRS",
"MarsICRS",
"JupiterICRS",
"SaturnICRS",
"UranusICRS",
"NeptuneICRS",
]
# HACK: sphinx-autoapi variable definition
ICRS = _ICRS
HCRS = _HCRS
GCRS = _GCRS
class _PlanetaryICRS(BaseRADecFrame):
obstime = TimeAttribute(default=DEFAULT_OBSTIME)
def __new__(cls, *args, **kwargs):
frame_transform_graph.transform(AffineTransform, cls, ICRS)(
cls.to_icrs
)
frame_transform_graph.transform(AffineTransform, ICRS, cls)(
cls.from_icrs
)
frame_transform_graph.transform(
FunctionTransformWithFiniteDifference, cls, cls
)(cls.self_transform)
return super().__new__(cls)
@staticmethod
def to_icrs(planet_coo, _):
# This is just an origin translation so without a distance it cannot go ahead
if isinstance(planet_coo.data, UnitSphericalRepresentation):
raise u.UnitsError(
_NEED_ORIGIN_HINT.format(planet_coo.__class__.__name__)
)
if planet_coo.data.differentials:
bary_sun_pos, bary_sun_vel = get_body_barycentric_posvel(
planet_coo.body.name, planet_coo.obstime
)
bary_sun_pos = bary_sun_pos.with_differentials(
bary_sun_vel.represent_as(CartesianDifferential)
)
else:
bary_sun_pos = get_body_barycentric(
planet_coo.body.name, planet_coo.obstime
)
bary_sun_vel = None
return None, bary_sun_pos
@staticmethod
def from_icrs(icrs_coo, planet_frame):
# This is just an origin translation so without a distance it cannot go ahead
if isinstance(icrs_coo.data, UnitSphericalRepresentation):
raise u.UnitsError(
_NEED_ORIGIN_HINT.format(icrs_coo.__class__.__name__)
)
if icrs_coo.data.differentials:
bary_sun_pos, bary_sun_vel = get_body_barycentric_posvel(
planet_frame.body.name, planet_frame.obstime
)
# Beware! Negation operation is not supported for differentials
bary_sun_pos = (-bary_sun_pos).with_differentials(
-bary_sun_vel.represent_as(CartesianDifferential)
)
else:
bary_sun_pos = -get_body_barycentric(
planet_frame.body.name, planet_frame.obstime
)
bary_sun_vel = None
return None, bary_sun_pos
@staticmethod
def self_transform(from_coo, to_frame):
if np.all(from_coo.obstime == to_frame.obstime):
return to_frame.realize_frame(from_coo.data)
else:
# Like CIRS, we do this self-transform via ICRS
return from_coo.transform_to(ICRS).transform_to(to_frame)
class MercuryICRS(_PlanetaryICRS):
body = Mercury
class VenusICRS(_PlanetaryICRS):
body = Venus
class MarsICRS(_PlanetaryICRS):
body = Mars
class JupiterICRS(_PlanetaryICRS):
body = Jupiter
class SaturnICRS(_PlanetaryICRS):
body = Saturn
class UranusICRS(_PlanetaryICRS):
body = Uranus
class NeptuneICRS(_PlanetaryICRS):
body = Neptune
class MoonICRS(_PlanetaryICRS):
body = Moon
_NEED_ORIGIN_HINT = (
"The input {0} coordinates do not have length units. This "
"probably means you created coordinates with lat/lon but "
"no distance. PlanetaryICRS<->ICRS transforms cannot "
"function in this case because there is an origin shift."
)
| mit | 038425276643203365b571cb2d72d17a | 25.0875 | 85 | 0.622185 | 3.333866 | false | false | false | false |
poliastro/poliastro | src/poliastro/spacecraft/__init__.py | 1 | 1540 | from astropy import units as u
class Spacecraft:
"""
Class to represent a Spacecraft.
"""
@u.quantity_input(A=u.km**2, C_D=u.one, m=u.kg)
def __init__(self, A, C_D, m, **metadata):
"""
Constructor
Parameters
----------
A : ~astropy.units.Quantity
Area of the spacecraft (km^2)
C_D : ~astropy.units.Quantity
Dimensionless drag coefficient ()
m : ~astropy.units.Quantity
Mass of the Spacecraft (kg)
**metadata : Dict[object, dict]
Optional keyword arguments to Spacecraft
"""
self._A = A
self._C_D = C_D
self._m = m
self._metadata = metadata
@property
def A(self):
"""Returns A, the area of the spacecraft"""
return self._A
@property
def C_D(self):
"""Returns C_D, the drag coefficient"""
return self._C_D
@property
def m(self):
"""Returns m, the mass of the spacecraft"""
return self._m
@property
def ballistic_coefficient(self):
r"""Calculates the Ballistic coefficient (km^2/kg)
Returns
-------
B: ~astropy.units.Quantity
Ballistic coefficient (km^2/kg)
Notes
-----
Be aware that you may encounter alternative definitions of the Ballistic
coefficient, such as the reciprocal:
.. math::
\frac{m}{C_D A}
"""
B = self._C_D * (self._A / self._m)
return B
| mit | 5cdeae95f9d4e627a2bc5d0b85ec87af | 21.647059 | 80 | 0.521429 | 3.948718 | false | false | false | false |
poliastro/poliastro | src/poliastro/plotting/gabbard.py | 1 | 2478 | from astropy import units as u
from matplotlib import pyplot as plt
from poliastro.plotting.util import generate_label
class GabbardPlotter:
"""GabbardPlotter class."""
def __init__(
self, ax=None, dark=False, altitude_unit=u.km, period_unit=u.min
):
self._ax = ax
if not self._ax:
if dark:
with plt.style.context("dark_background"):
_, self._ax = plt.subplots(figsize=(6, 6))
else:
_, self._ax = plt.subplots(figsize=(6, 6))
self._frame = None
self._altitude_unit = altitude_unit
self._period_unit = period_unit
def _get_orbit_property_list(self, orbits):
apogees = []
perigees = []
periods = []
for orbit in orbits:
perigee, apogee, period = orbit.r_p, orbit.r_a, orbit.period
apogees.append(apogee.to_value(self._altitude_unit))
perigees.append(perigee.to_value(self._altitude_unit))
periods.append(period.to_value(self._period_unit))
return apogees, perigees, periods
def _static_gabbard_plot(self, orbits):
"""Plots a Static Gabbard Plot given a list of Orbits
Parameters
----------
orbits : ~poliastro.twobody.orbit.Orbit List
The Orbits whose perigee and apogee will be plotted.
"""
apogees, perigees, periods = self._get_orbit_property_list(orbits)
apogee_paths = plt.scatter(
periods, apogees, marker="o", color="blue", label="Apogee"
)
perigee_paths = plt.scatter(
periods, perigees, marker="o", color="red", label="Perigee"
)
self._ax.set_xlabel(f"Period ({self._period_unit:s})")
self._ax.set_ylabel(f"Altitude ({self._altitude_unit:s})")
return apogee_paths, perigee_paths
def plot_orbits(self, orbits, label=""):
apogee_paths, perigee_paths = self._static_gabbard_plot(orbits)
self._set_legend(orbits[-1].epoch, label)
return apogee_paths, perigee_paths
def _set_legend(self, epoch, label):
label = generate_label(epoch, label)
if not self._ax.get_legend():
size = self._ax.figure.get_size_inches() + [8, 0]
self._ax.figure.set_size_inches(size)
self._ax.legend(
loc="upper left",
bbox_to_anchor=(1.05, 1.015),
title=label,
numpoints=1,
)
| mit | 8d2ecaea7d4b50901d00c6060deb48bc | 31.181818 | 74 | 0.573043 | 3.330645 | false | false | false | false |
poliastro/poliastro | src/poliastro/twobody/elements.py | 1 | 5997 | import numpy as np
from astropy import units as u
from poliastro.core.elements import (
circular_velocity as circular_velocity_fast,
coe2rv as coe2rv_fast,
coe2rv_many as coe2rv_many_fast,
eccentricity_vector as eccentricity_vector_fast,
)
from poliastro.core.propagation.farnocchia import (
delta_t_from_nu as delta_t_from_nu_fast,
)
u_kms = u.km / u.s
u_km3s2 = u.km**3 / u.s**2
@u.quantity_input(k=u_km3s2, a=u.km)
def circular_velocity(k, a):
"""Circular velocity for a given body (k) and semimajor axis (a)."""
return (
circular_velocity_fast(k.to_value(u_km3s2), a.to_value(u.km)) * u_kms
)
@u.quantity_input(k=u_km3s2, a=u.km)
def mean_motion(k, a):
"""Mean motion given body (k) and semimajor axis (a)."""
return np.sqrt(k / abs(a**3)).to(1 / u.s) * u.rad
@u.quantity_input(k=u_km3s2, a=u.km)
def period(k, a):
"""Period given body (k) and semimajor axis (a)."""
n = mean_motion(k, a)
return 2 * np.pi * u.rad / n
@u.quantity_input(k=u_km3s2, r=u.km, v=u_kms)
def energy(k, r, v):
"""Specific energy."""
return v @ v / 2 - k / np.sqrt(r @ r)
@u.quantity_input(k=u_km3s2, r=u.km, v=u_kms)
def eccentricity_vector(k, r, v):
"""Eccentricity vector."""
return (
eccentricity_vector_fast(
k.to_value(u_km3s2), r.to_value(u.km), v.to_value(u_kms)
)
* u.one
)
@u.quantity_input(nu=u.rad, ecc=u.one, k=u_km3s2, r_p=u.km)
def t_p(nu, ecc, k, r_p):
"""Elapsed time since latest perifocal passage."""
# TODO: Make this a propagator method
t_p = (
delta_t_from_nu_fast(
nu.to_value(u.rad),
ecc.value,
k.to_value(u_km3s2),
r_p.to_value(u.km),
)
* u.s
)
return t_p
@u.quantity_input(
k=u_km3s2,
R=u.km,
J2=u.one,
n_sunsync=1 / u.s,
a=u.km,
ecc=u.one,
inc=u.rad,
)
def heliosynchronous(k, R, J2, n_sunsync, a=None, ecc=None, inc=None):
with np.errstate(invalid="raise"):
if a is None and (ecc is not None) and (inc is not None):
a = (
-3
* R**2
* J2
* np.sqrt(k)
/ (2 * n_sunsync * (1 - ecc**2) ** 2)
* np.cos(inc)
) ** (2 / 7)
elif ecc is None and (a is not None) and (inc is not None):
ecc = np.sqrt(
1
- np.sqrt(
-3
* R**2
* J2
* np.sqrt(k)
* np.cos(inc)
/ (2 * a ** (7 / 2) * n_sunsync)
)
)
elif inc is None and (ecc is not None) and (a is not None):
# Inclination is the unknown variable
inc = np.arccos(
-2
* a ** (7 / 2)
* n_sunsync
* (1 - ecc**2) ** 2
/ (3 * R**2 * J2 * np.sqrt(k))
)
else:
raise ValueError("Two parameters of (a, ecc, inc) are required")
return a, ecc, inc
@u.quantity_input(ecc=u.one)
def hyp_nu_limit(ecc, r_max_ratio=np.inf):
r"""Limit true anomaly for hyperbolic orbits.
Parameters
----------
ecc : ~astropy.units.Quantity
Eccentricity, should be larger than 1.
r_max_ratio : float, optional
Value of :math:`r_{\text{max}} / p` for this angle, default to infinity.
"""
return np.arccos(-(1 - 1 / r_max_ratio) / ecc)
@u.quantity_input(R=u.m, J2=u.one, J3=u.one, a=u.m, inc=u.rad)
def get_eccentricity_critical_argp(R, J2, J3, a, inc):
"""Cccentricity for frozen orbits when the argument of perigee is critical.
Parameters
----------
R : ~astropy.units.Quantity
Planet radius.
J2 : ~astropy.units.Quantity
Planet J2 coefficient.
J3 : ~astropy.units.Quantity
Planet J3 coefficient.
a : ~astropy.units.Quantity
Orbit's semimajor axis
inc : ~astropy.units.Quantity
Inclination.
"""
ecc = -J3 * R * np.sin(inc) / 2 / J2 / a
return ecc
@u.quantity_input(R=u.m, J2=u.one, J3=u.one, a=u.m, ecc=u.one)
def get_inclination_critical_argp(R, J2, J3, a, ecc):
"""Inclination for frozen orbits
when the argument of perigee is critical and the eccentricity is given.
Parameters
----------
R : ~astropy.units.Quantity
Planet radius.
J2 : ~astropy.units.Quantity
Planet J2 coefficient.
J3 : ~astropy.units.Quantity
Planet J3 coefficient.
a : ~astropy.units.Quantity
Semimajor axis.
ecc : ~astropy.units.Quantity
Eccentricity.
"""
inc = np.arcsin(-ecc * a * J2 * 2 / R / J3) * u.rad
return inc
@u.quantity_input(ecc=u.one)
def get_eccentricity_critical_inc(ecc=None):
"""Eccentricity for frozen orbits when the inclination is critical.
If ecc is None we set an arbitrary value which is the Moon eccentricity
because it seems reasonable.
Parameters
----------
ecc : ~astropy.units.Quantity, optional
Eccentricity, default to None.
"""
if ecc is None:
ecc = 0.0549 * u.one
return ecc
def coe2rv(k, p, ecc, inc, raan, argp, nu):
rr, vv = coe2rv_fast(
k.to_value(u_km3s2),
p.to_value(u.km),
ecc.to_value(u.one),
inc.to_value(u.rad),
raan.to_value(u.rad),
argp.to_value(u.rad),
nu.to_value(u.rad),
)
rr = rr << u.km
vv = vv << (u.km / u.s)
return rr, vv
def coe2rv_many(k_arr, p_arr, ecc_arr, inc_arr, raan_arr, argp_arr, nu_arr):
rr_arr, vv_arr = coe2rv_many_fast(
k_arr.to_value(u_km3s2),
p_arr.to_value(u.km),
ecc_arr.to_value(u.one),
inc_arr.to_value(u.rad),
raan_arr.to_value(u.rad),
argp_arr.to_value(u.rad),
nu_arr.to_value(u.rad),
)
rr_arr = rr_arr << u.km
vv_arr = vv_arr << (u.km / u.s)
return rr_arr, vv_arr
| mit | ea4a3f4e7bce5a6015b4d48d53166878 | 25.187773 | 80 | 0.538102 | 2.759779 | false | false | false | false |
poliastro/poliastro | src/poliastro/twobody/propagation/farnocchia.py | 1 | 1928 | import sys
import numpy as np
from astropy import units as u
from poliastro.core.propagation.farnocchia import (
farnocchia_coe as farnocchia_coe_fast,
farnocchia_rv as farnocchia_rv_fast,
)
from poliastro.twobody.propagation.enums import PropagatorKind
from poliastro.twobody.states import ClassicalState
from ._compat import OldPropagatorModule
sys.modules[__name__].__class__ = OldPropagatorModule
class FarnocchiaPropagator:
r"""Propagates orbit using Farnocchia's method.
Notes
-----
This method takes initial :math:`\vec{r}, \vec{v}`, calculates classical orbit parameters,
increases mean anomaly and performs inverse transformation to get final :math:`\vec{r}, \vec{v}`
The logic is based on formulae (4), (6) and (7) from http://dx.doi.org/10.1007/s10569-013-9476-9
"""
kind = (
PropagatorKind.ELLIPTIC
| PropagatorKind.PARABOLIC
| PropagatorKind.HYPERBOLIC
)
def propagate(self, state, tof):
state = state.to_classical()
nu = (
farnocchia_coe_fast(
state.attractor.k.to_value(u.km**3 / u.s**2),
*state.to_value(),
tof.to_value(u.s)
)
<< u.rad
)
new_state = ClassicalState(
state.attractor, state.to_tuple()[:5] + (nu,), state.plane
)
return new_state
def propagate_many(self, state, tofs):
state = state.to_vectors()
k = state.attractor.k.to_value(u.km**3 / u.s**2)
rv0 = state.to_value()
# TODO: This should probably return a ClassicalStateArray instead,
# see discussion at https://github.com/poliastro/poliastro/pull/1492
results = np.array(
[farnocchia_rv_fast(k, *rv0, tof) for tof in tofs.to_value(u.s)]
)
return (
results[:, 0] << u.km,
results[:, 1] << (u.km / u.s),
)
| mit | dee71089d65246c81b061a970b062621 | 28.661538 | 100 | 0.60529 | 3.150327 | false | false | false | false |
poliastro/poliastro | src/poliastro/earth/atmosphere/coesa62.py | 1 | 12174 | """ The U.S. Standard Atmosphere 1966 depicts idealized middle-latitude
year-round mean conditions for the range of solar activity that occurs between
sunspot minimum and sunspot maximum.
+--------+---------+---------+-----------+---------------+---------------+
| Z (km) | H (km) | T (K) | p (mbar) | rho (kg / m3) | beta (K / km) |
+--------+---------+---------+-----------+---------------+---------------+
| 0.0 | 0.0 | 288.150 | 1.01325e3 | 1.2250 | -6.5 |
+--------+---------+---------+-----------+---------------+---------------+
| 11.019 | 11.0 | 216.650 | 2.2632e2 | 3.6392e-1 | 0.0 |
+--------+---------+---------+-----------+---------------+---------------+
| 20.063 | 20.0 | 216.650 | 5.4749e1 | 8.8035e-2 | 1.0 |
+--------+---------+---------+-----------+---------------+---------------+
| 32.162 | 32.0 | 228.650 | 8.68014 | 1.3225e-2 | 2.8 |
+--------+---------+---------+-----------+---------------+---------------+
| 47.350 | 47.0 | 270.650 | 1.109050 | 1.4275e-3 | 0.0 |
+--------+---------+---------+-----------+---------------+---------------+
| 51.413 | 52.0 | 270.650 | 5.90005e-1| 7.5943e-4 | -2.8 |
+--------+---------+---------+-----------+---------------+---------------+
| 61.591 | 61.0 | 252.650 | 1.82099e-1| 2.5109e-4 | -2.0 |
+--------+---------+---------+-----------+---------------+---------------+
| 79.994 | 79.0 | 180.650 | 1.0377e-2 | 2.001e-5 | 0.0 |
+--------+---------+---------+-----------+---------------+---------------+
| 90.0 | 88.743 | 180.650 | 1.6438e-3 | 3.170e-6 | 0.0 |
+--------+---------+---------+-----------+---------------+---------------+
| 100.0 | 98.451 | 210.020 | 3.0075e-4 | 4.974e-7 | 5.0 |
+--------+---------+---------+-----------+---------------+---------------+
| 110.0 | 108.129 | 257.000 | 7.3544e-5 | 9.829e-8 | 10.0 |
+--------+---------+---------+-----------+---------------+---------------+
| 120.0 | 117.776 | 349.490 | 2.5217e-5 | 2.436e-8 | 20.0 |
+--------+---------+---------+-----------+---------------+---------------+
| 150.0 | 146.541 | 892.790 | 5.0617e-6 | 1.836e-9 | 15.0 |
+--------+---------+---------+-----------+---------------+---------------+
| 160.0 | 156.071 | 1022.23 | 3.6943e-6 | 1.159e-9 | 10.0 |
+--------+---------+---------+-----------+---------------+---------------+
| 170.0 | 165.571 | 1105.51 | 2.7926e-6 | 8.036e-10 | 7.0 |
+--------+---------+---------+-----------+---------------+---------------+
| 190.0 | 184.485 | 1205.50 | 1.6852e-6 | 4.347e-10 | 5.0 |
+--------+---------+---------+-----------+---------------+---------------+
| 230.0 | 221.967 | 1321.70 | 6.9604e-7 | 1.564e-10 | 4.0 |
+--------+---------+---------+-----------+---------------+---------------+
| 300.0 | 286.476 | 1432.11 | 1.8838e-7 | 3.585e-11 | 3.3 |
+--------+---------+---------+-----------+---------------+---------------+
| 400.0 | 376.312 | 1487.38 | 4.0304e-8 | 6.498e-12 | 2.6 |
+--------+---------+---------+-----------+---------------+---------------+
| 500.0 | 463.526 | 1499.22 | 1.0957e-8 | 1.577e-12 | 1.7 |
+--------+---------+---------+-----------+---------------+---------------+
| 600.0 | 548.230 | 1506.13 | 3.4502e-9 | 4.640e-13 | 1.1 |
+--------+---------+---------+-----------+---------------+---------------+
| 700.0 | 630.530 | 1507.61 | 1.1918e-9 | 1.537e-13 | 0.0 |
+--------+---------+---------+-----------+---------------+---------------+
"""
import numpy as np
from astropy import units as u
from astropy.io import ascii
from astropy.units import imperial
from astropy.utils.data import get_pkg_data_filename
from poliastro._math.integrate import quad
from poliastro.earth.atmosphere.base import COESA
# Constants come from the original paper to achieve pure implementation
r0 = 6356.766 * u.km
p0 = 1.013250e5 * u.Pa
rho0 = 1.2250 * u.K
T0 = 288.15 * u.K
g0 = 9.80665 * u.m / u.s**2
S = 110.4 * u.K
Ti = 273.15 * u.K
beta = 1.458e-6 * u.kg / u.s / u.m / u.K ** (0.5)
_gamma = 1.4
sigma = 3.65e-10 * u.m
N = 6.02257e26 * (u.kg * u.mol) ** -1
R = 8314.32 * u.J / u.kmol / u.K
R_air = 287.053 * u.J / u.kg / u.K
alpha = 34.1632 * u.K / u.km
# Reading layer parameters file
coesa_file = get_pkg_data_filename("data/coesa62.dat")
coesa62_data = ascii.read(coesa_file)
b_levels = coesa62_data["b"].data
zb_levels = coesa62_data["Zb [km]"].data * u.km
hb_levels = coesa62_data["Hb [km]"].data * u.km
Tb_levels = coesa62_data["Tb [K]"].data * u.K
Lb_levels = coesa62_data["Lb [K/km]"].data * u.K / u.km
pb_levels = coesa62_data["pb [mbar]"].data * u.mbar
class COESA62(COESA):
"""Holds the model for U.S Standard Atmosphere 1962."""
def __init__(self):
"""Constructor for the class."""
super().__init__(
b_levels, zb_levels, hb_levels, Tb_levels, Lb_levels, pb_levels
)
def temperature(self, alt, geometric=True):
"""Solves for temperature at given altitude.
Parameters
----------
alt : ~astropy.units.Quantity
Geometric/Geopotential altitude.
geometric : bool
If `True`, assumes geometric altitude kind.
Returns
-------
T: ~astropy.units.Quantity
Kinetic temeperature.
"""
# Test if altitude is inside valid range
z, h = self._check_altitude(alt, r0, geometric=geometric)
# Get base parameters
i = self._get_index(z, self.zb_levels)
zb = self.zb_levels[i]
Tb = self.Tb_levels[i]
Lb = self.Lb_levels[i]
hb = self.hb_levels[i]
# Apply different equations
if z <= 90 * u.km:
T = Tb + Lb * (h - hb)
else:
T = Tb + Lb * (z - zb)
return T.to(u.K)
def pressure(self, alt, geometric=True):
"""Solves pressure at given altitude.
Parameters
----------
alt : ~astropy.units.Quantity
Geometric/Geopotential altitude.
geometric : bool
If `True`, assumes geometric altitude.
Returns
-------
p: ~astropy.units.Quantity
Pressure at given altitude.
"""
# Check if valid range and convert to geopotential
z, h = self._check_altitude(alt, r0, geometric=geometric)
# Get base parameters
i = self._get_index(z, self.zb_levels)
zb = self.zb_levels[i]
hb = self.hb_levels[i]
Tb = self.Tb_levels[i]
Lb = self.Lb_levels[i]
pb = self.pb_levels[i]
# If z <= 90km then apply eqn 1.2.10-(3)
if z <= 90 * u.km:
# If Lb is zero then apply eqn 1.2.10-(4)
if Lb == 0.0:
p = pb * np.exp(-g0 * (h - hb) / Tb / R_air)
else:
T = self.temperature(z)
p = pb * (T / Tb) ** (-g0 / R_air / Lb)
# If 90 < Z < 700 km then eqn 1.2.10-(5) is applied
else:
# Converting all the units into SI unit and taking their magnitude
Lb_v = Lb.to_value(u.K / u.m)
r0_v = r0.to_value(u.m)
z_v = z.to_value(u.m)
zb_v = zb.to_value(u.m)
Tb_v = Tb.value
g0_v = g0.value
R_air_v = R_air.value
# Putting g = (g0*(r0/(r0 +z))**2) in (g * dz / z - zb + Tb/Lb)
# and integrating it.
integrand = quad(
lambda x: (g0_v * (r0_v / (r0_v + x)) ** 2)
/ (x - zb_v + Tb_v / Lb_v),
zb_v,
z_v,
)
pb = pb.to(u.Pa)
p = (pb * np.exp((-1 / R_air_v / Lb_v) * integrand[0])).to(u.mbar)
return p
def density(self, alt, geometric=True):
"""Solves density at given altitude.
Parameters
----------
alt : ~astropy.units.Quantity
Geometric/Geopotential altitude.
geometric : bool
If `True`, assumes geometric altitude.
Returns
-------
rho: ~astropy.units.Quantity
Density at given altitude.
"""
# Check if valid range and convert to geopotential
z, h = self._check_altitude(alt, r0, geometric=geometric)
# Solve temperature and pressure
T = self.temperature(z)
p = self.pressure(z)
rho = p / R_air / T
return rho.to(u.kg / u.m**3)
def properties(self, alt, geometric=True):
"""Solves density at given height.
Parameters
----------
alt : ~astropy.units.Quantity
Geometric/Geopotential height.
geometric : bool
If `True`, assumes that `alt` argument is geometric kind.
Returns
-------
T: ~astropy.units.Quantity
Temperature at given height.
p: ~astropy.units.Quantity
Pressure at given height.
rho: ~astropy.units.Quantity
Density at given height.
"""
T = self.temperature(alt, geometric=geometric)
p = self.pressure(alt, geometric=geometric)
rho = self.density(alt, geometric=geometric)
return T, p, rho
def sound_speed(self, alt, geometric=True):
"""Solves speed of sound at given height.
Parameters
----------
alt : ~astropy.units.Quantity
Geometric/Geopotential height.
geometric : bool
If `True`, assumes that `alt` argument is geometric kind.
Returns
-------
Cs: ~astropy.units.Quantity
Speed of Sound at given height.
"""
# Check if valid range and convert to geopotential
z, h = self._check_altitude(alt, r0, geometric=geometric)
if z > 90 * u.km:
raise ValueError(
"Speed of sound in COESA62 has just been implemented up to 90km."
)
T = self.temperature(alt, geometric).value
# Using eqn-1.3.7-(1)
Cs = ((_gamma * R_air.value * T) ** 0.5) * (u.m / u.s)
return Cs
def viscosity(self, alt, geometric=True):
"""Solves dynamic viscosity at given height.
Parameters
----------
alt : ~astropy.units.Quantity
Geometric/Geopotential height.
geometric : bool
If `True`, assumes that `alt` argument is geometric kind.
Returns
-------
mu: ~astropy.units.Quantity
Dynamic viscosity at given height.
"""
# Check if valid range and convert to geopotential
z, h = self._check_altitude(alt, r0, geometric=geometric)
if z > 90 * u.km:
raise ValueError(
"Dynamic Viscosity in COESA62 has just been implemented up to 90km."
)
T = self.temperature(alt, geometric).value
# Using eqn-1.3.8-(1)
mu = (beta.value * T**1.5 / (T + S.value)) * (u.kg / u.m / u.s)
return mu
def thermal_conductivity(self, alt, geometric=True):
"""Solves coefficient of thermal conductivity at given height.
Parameters
----------
alt : ~astropy.units.Quantity
Geometric/Geopotential height.
geometric : bool
If `True`, assumes that `alt` argument is geometric kind.
Returns
-------
k: ~astropy.units.Quantity
coefficient of thermal conductivity at given height.
"""
# Check if valid range and convert to geopotential
z, h = self._check_altitude(alt, r0, geometric=geometric)
if z > 90 * u.km:
raise ValueError(
"Thermal conductivity in COESA62 has just been implemented up to 90km."
)
T = self.temperature(alt, geometric=geometric).value
# Using eqn-1.3.10-(1)
k = (6.325e-7 * T**1.5 / (T + 245.4 * (10 ** (-12.0 / T)))) * (
imperial.kcal / u.m / u.s / u.K
)
return k
| mit | b345e8a2535b98d3b3fa6c10671ec08a | 35.779456 | 87 | 0.440775 | 3.308152 | false | false | false | false |
poliastro/poliastro | src/poliastro/plotting/_base.py | 1 | 16905 | import warnings
from collections import namedtuple
from typing import List
import numpy as np
from astropy import units as u
from astropy.coordinates import CartesianRepresentation
from poliastro.ephem import Ephem
from poliastro.frames import Planes
from poliastro.plotting.util import BODY_COLORS, generate_label
from poliastro.twobody.mean_elements import get_mean_elements
from poliastro.twobody.sampling import EpochBounds
from poliastro.util import norm, time_range
class Trajectory(
namedtuple(
"Trajectory", ["coordinates", "position", "label", "colors", "dashed"]
)
):
pass
class BaseOrbitPlotter:
"""
Base class for all the OrbitPlotter classes.
"""
def __init__(self, num_points=150, *, plane=None):
self._num_points = num_points
self._trajectories = [] # type: List[Trajectory]
self._attractor = None
self._plane = plane or Planes.EARTH_EQUATOR
self._attractor_radius = np.inf * u.km
@property
def trajectories(self):
return self._trajectories
@property
def plane(self):
return self._plane
def _set_attractor(self, attractor):
if self._attractor is None:
self._attractor = attractor
elif attractor is not self._attractor:
raise NotImplementedError(
f"Attractor has already been set to {self._attractor.name}"
)
def set_attractor(self, attractor):
"""Sets plotting attractor.
Parameters
----------
attractor : ~poliastro.bodies.Body
Central body.
"""
self._set_attractor(attractor)
def _clear_attractor(self):
raise NotImplementedError
def _redraw_attractor(self):
# Select a sensible value for the radius: realistic for low orbits,
# visible for high and very high orbits
min_distance = min(
[
coordinates.norm().min()
for coordinates, *_ in self._trajectories
]
or [0 * u.m]
)
self._attractor_radius = max(
self._attractor.R.to(u.km), min_distance.to(u.km) * 0.15
)
color = BODY_COLORS.get(self._attractor.name, "#999999")
self._clear_attractor()
self._draw_sphere(
self._attractor_radius,
color,
self._attractor.name,
)
def _redraw(self):
for trajectory in self._trajectories:
self.__plot_coordinates_and_position(trajectory)
def _get_colors(self, color, trail):
raise NotImplementedError
def _draw_marker(self, maker, size, color, name, center=None):
raise NotImplementedError
def _draw_point(self, radius, color, name, center=None):
raise NotImplementedError
def _draw_impulse(self, color, name, center=None):
raise NotImplementedError
def _draw_sphere(self, radius, color, name, center=None):
raise NotImplementedError
def _plot_coordinates(self, coordinates, label, colors, dashed):
raise NotImplementedError
def _plot_position(self, position, label, colors):
radius = min(
self._attractor_radius * 0.5,
(norm(position) - self._attractor.R) * 0.5,
) # Arbitrary thresholds
self._draw_point(radius, colors[0], label, center=position)
def __plot_coordinates_and_position(self, trajectory):
coordinates, position, label, colors, dashed = trajectory
trace_coordinates = self._plot_coordinates(
coordinates, label, colors, dashed
)
if position is not None:
trace_position = self._plot_position(position, label, colors)
else:
trace_position = None
return trace_coordinates, trace_position
def __add_trajectory(
self, coordinates, position=None, *, label, colors, dashed
):
trajectory = Trajectory(coordinates, position, label, colors, dashed)
self._trajectories.append(trajectory)
self._redraw_attractor()
(
trace_coordinates,
trace_position,
) = self.__plot_coordinates_and_position(trajectory)
return trace_coordinates, trace_position
def _plot_trajectory(
self, coordinates, *, label=None, color=None, trail=False
):
if self._attractor is None:
raise ValueError(
"An attractor must be set up first, please use "
"set_attractor(Major_Body) or plot(orbit)"
)
colors = self._get_colors(color, trail)
# Ensure that the coordinates are cartesian just in case,
# to avoid weird errors later
coordinates = coordinates.represent_as(CartesianRepresentation)
return self.__add_trajectory(
coordinates, None, label=str(label), colors=colors, dashed=False
)
def _plot_maneuver(
self, initial_orbit, maneuver, *, label=None, color=None, trail=False
):
if self._attractor is None:
raise ValueError(
"An attractor must be set up first, please use "
"set_attractor(Major_Body) or plot(orbit)"
)
# Apply the maneuver, collect all intermediate states and allocate the
# final coordinates list array
*maneuver_phases, final_phase = initial_orbit.apply_maneuver(
maneuver, intermediate=True
)
if len(maneuver_phases) == 0:
# For single-impulse maneuver only draw the impulse marker
impulse_label = f"Impulse 1 - {label}"
impulse_lines = (
[self._draw_impulse(color, impulse_label, final_phase.r)],
)
return [(impulse_label, impulse_lines)]
else:
# Declare for holding (label, lines) for each impulse and trajectory
lines_list = []
# Collect the coordinates for the different maneuver phases
for ith_impulse, orbit_phase in enumerate(maneuver_phases):
# Plot the impulse marker and collect its label and lines
impulse_label = f"Impulse {ith_impulse + 1} - {label}"
impulse_lines = (
[self._draw_impulse(color, impulse_label, orbit_phase.r)],
)
lines_list.append((impulse_label, impulse_lines))
# HACK: if no color is provided, get the one randomly generated
# for previous impulse lines
color = (
impulse_lines[0][0].get_color() if color is None else color
)
# Get the propagation time required before next impulse
time_to_next_impulse, _ = maneuver.impulses[ith_impulse + 1]
# Collect the coordinate points for the i-th orbit phase
# TODO: Remove `.sample()` to return Ephem and use `plot_ephem` instead?
phase_coordinates = orbit_phase.to_ephem(
strategy=EpochBounds(
min_epoch=orbit_phase.epoch,
max_epoch=orbit_phase.epoch + time_to_next_impulse,
)
).sample()
# Plot the phase trajectory and collect its label and lines
trajectory_lines = self._plot_trajectory(
phase_coordinates, label=label, color=color, trail=trail
)
lines_list.append((label, trajectory_lines))
# Finally, draw the impulse at the very beginning of the final phase
impulse_label = f"Impulse {ith_impulse + 2} - {label}"
impulse_lines = (
[self._draw_impulse(color, impulse_label, final_phase.r)],
)
lines_list.append((impulse_label, impulse_lines))
return lines_list
def _plot(self, orbit, *, label=None, color=None, trail=False):
colors = self._get_colors(color, trail)
self.set_attractor(orbit.attractor)
orbit = orbit.change_plane(self.plane)
label = generate_label(orbit.epoch, label)
coordinates = orbit.sample(self._num_points)
return self.__add_trajectory(
coordinates, orbit.r, label=label, colors=colors, dashed=True
)
def _plot_body_orbit(
self,
body,
epoch,
*,
label=None,
color=None,
trail=False,
):
if color is None:
color = BODY_COLORS.get(body.name)
self.set_attractor(body.parent)
# Get approximate, mean value for the period
period = get_mean_elements(body, epoch).period
label = generate_label(epoch, label or str(body))
epochs = time_range(
epoch, num_values=self._num_points, end=epoch + period, scale="tdb"
)
ephem = Ephem.from_body(
body, epochs, attractor=body.parent, plane=self.plane
)
return self._plot_ephem(
ephem, epoch, label=label, color=color, trail=trail
)
def _plot_ephem(
self, ephem, epoch=None, *, label=None, color=None, trail=False
):
if self._attractor is None:
raise ValueError(
"An attractor must be set up first, please use "
"set_attractor(Major_Body) or plot(orbit)"
)
if ephem.plane is not self.plane:
raise ValueError(
f"The ephemerides reference plane is {ephem.plane} "
f"while the plotter is using {self.plane}, "
"sample the ephemerides using a different plane "
"or create a new plotter"
)
colors = self._get_colors(color, trail)
coordinates = ephem.sample()
if epoch is not None:
r0 = ephem.rv(epoch)[0]
else:
r0 = None
return self.__add_trajectory(
coordinates, r0, label=str(label), colors=colors, dashed=False
)
def plot_trajectory(
self, coordinates, *, label=None, color=None, trail=False
):
"""Plots a precomputed trajectory.
An attractor must be set first.
Parameters
----------
coordinates : ~astropy.coordinates.CartesianRepresentation
Trajectory to plot.
label : str, optional
Label of the trajectory.
color : str, optional
Color of the trajectory.
trail : bool, optional
Fade the orbit trail, default to False.
"""
# Do not return the result of self._plot
# This behavior might be overriden by subclasses
self._plot_trajectory(
coordinates, label=label, color=color, trail=trail
)
def plot_maneuver(
self, initial_orbit, maneuver, label=None, color=None, trail=False
):
"""Plots the maneuver trajectory applied to the provided initial orbit.
Parameters
----------
initial_orbit : ~poliastro.twobody.orbit.Orbit
The base orbit for which the maneuver will be applied.
maneuver : ~poliastro.maneuver.Maneuver
The maneuver to be plotted.
label : str, optional
Label of the trajectory.
color : str, optional
Color of the trajectory.
trail : bool, optional
Fade the orbit trail, default to False.
"""
# Do not return the result of self._plot
# This behavior might be overriden by subclasses
self._plot_maneuver(
initial_orbit, maneuver, label=label, color=color, trail=trail
)
def plot(self, orbit, *, label=None, color=None, trail=False):
"""Plots state and osculating orbit in their plane.
Parameters
----------
orbit : ~poliastro.twobody.orbit.Orbit
Orbit to plot.
label : str, optional
Label of the orbit.
color : str, optional
Color of the line and the position.
trail : bool, optional
Fade the orbit trail, default to False.
"""
# Do not return the result of self._plot
# This behavior might be overriden by subclasses
self._plot(orbit, label=label, color=color, trail=trail)
def plot_body_orbit(
self,
body,
epoch,
*,
label=None,
color=None,
trail=False,
):
"""Plots complete revolution of body and current position.
Parameters
----------
body : poliastro.bodies.SolarSystemPlanet
Body.
epoch : astropy.time.Time
Epoch of current position.
label : str, optional
Label of the orbit, default to the name of the body.
color : str, optional
Color of the line and the position.
trail : bool, optional
Fade the orbit trail, default to False.
"""
# Do not return the result of self._plot
# This behavior might be overriden by subclasses
self._plot_body_orbit(
body, epoch, label=label, color=color, trail=trail
)
def plot_ephem(
self, ephem, epoch=None, *, label=None, color=None, trail=False
):
"""Plots Ephem object over its sampling period.
Parameters
----------
ephem : ~poliastro.ephem.Ephem
Ephemerides to plot.
epoch : astropy.time.Time, optional
Epoch of the current position, none will be used if not given.
label : str, optional
Label of the orbit, default to the name of the body.
color : str, optional
Color of the line and the position.
trail : bool, optional
Fade the orbit trail, default to False.
"""
# Do not return the result of self._plot
# This behavior might be overriden by subclasses
self._plot_ephem(ephem, epoch, label=label, color=color, trail=trail)
class Mixin2D:
_trajectories: List[Trajectory]
def _redraw(self):
raise NotImplementedError
def _project(self, rr):
rr_proj = rr - (rr @ self._frame[2])[:, None] * self._frame[2]
x = rr_proj @ self._frame[0]
y = rr_proj @ self._frame[1]
return x, y
def _set_frame(self, p_vec, q_vec, w_vec):
if not np.allclose([norm(v) for v in (p_vec, q_vec, w_vec)], 1):
raise ValueError("Vectors must be unit.")
elif not np.allclose([p_vec @ q_vec, q_vec @ w_vec, w_vec @ p_vec], 0):
raise ValueError("Vectors must be mutually orthogonal.")
else:
self._frame = p_vec, q_vec, w_vec
if self._trajectories:
self._redraw()
def set_frame(self, p_vec, q_vec, w_vec):
"""Sets perifocal frame.
Raises
------
ValueError
If the vectors are not a set of mutually orthogonal unit vectors.
"""
warnings.warn(
"Method set_frame is deprecated and will be removed in a future release, "
"use `set_body_frame` or `set_orbit_frame` instead"
"with your use case",
DeprecationWarning,
stacklevel=2,
)
self._set_frame(p_vec, q_vec, w_vec)
def set_orbit_frame(self, orbit):
"""Sets perifocal frame based on an orbit.
Parameters
----------
orbit : ~poliastro.twobody.Orbit
Orbit to use as frame.
"""
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
self._set_frame(*orbit.pqw())
def set_body_frame(self, body, epoch=None):
"""Sets perifocal frame based on the orbit of a body at a particular epoch if given.
Parameters
----------
body : poliastro.bodies.SolarSystemPlanet
Body.
epoch : astropy.time.Time, optional
Epoch of current position.
"""
from warnings import warn
from astropy import time
from poliastro.bodies import Sun
from poliastro.twobody import Orbit
from ..warnings import TimeScaleWarning
if not epoch:
epoch = time.Time.now().tdb
elif epoch.scale != "tdb":
epoch = epoch.tdb
warn(
"Input time was converted to scale='tdb' with value "
f"{epoch.tdb.value}. Use Time(..., scale='tdb') instead.",
TimeScaleWarning,
stacklevel=2,
)
with warnings.catch_warnings():
ephem = Ephem.from_body(body, epoch, attractor=Sun, plane=self.plane) # type: ignore
orbit = Orbit.from_ephem(Sun, ephem, epoch).change_plane(self.plane) # type: ignore
self.set_orbit_frame(orbit)
| mit | 20b73ab57fc86ecbc64ebe02965f5d53 | 31.385057 | 97 | 0.575747 | 4.123171 | false | false | false | false |
poliastro/poliastro | src/poliastro/core/thrust/change_a_inc.py | 1 | 3037 | import numpy as np
from numba import njit as jit
from numpy import cross
from poliastro._math.linalg import norm
from poliastro.core.elements import circular_velocity
@jit
def extra_quantities(k, a_0, a_f, inc_0, inc_f, f):
"""Extra quantities given by the Edelbaum (a, i) model."""
V_0, V_f, beta_0_ = compute_parameters(k, a_0, a_f, inc_0, inc_f)
delta_V_ = delta_V(V_0, V_f, beta_0_, inc_0, inc_f)
t_f_ = delta_V_ / f
return delta_V_, t_f_
@jit
def beta(t, V_0, f, beta_0):
"""Compute yaw angle (β) as a function of time and the problem parameters."""
return np.arctan2(V_0 * np.sin(beta_0), V_0 * np.cos(beta_0) - f * t)
@jit
def beta_0(V_0, V_f, inc_0, inc_f):
"""Compute initial yaw angle (β) as a function of the problem parameters."""
delta_i_f = abs(inc_f - inc_0)
return np.arctan2(
np.sin(np.pi / 2 * delta_i_f),
V_0 / V_f - np.cos(np.pi / 2 * delta_i_f),
)
@jit
def compute_parameters(k, a_0, a_f, inc_0, inc_f):
"""Compute parameters of the model."""
V_0 = circular_velocity(k, a_0)
V_f = circular_velocity(k, a_f)
beta_0_ = beta_0(V_0, V_f, inc_0, inc_f)
return V_0, V_f, beta_0_
@jit
def delta_V(V_0, V_f, beta_0, inc_0, inc_f):
"""Compute required increment of velocity."""
delta_i_f = abs(inc_f - inc_0)
if delta_i_f == 0:
return abs(V_f - V_0)
return V_0 * np.cos(beta_0) - V_0 * np.sin(beta_0) / np.tan(
np.pi / 2 * delta_i_f + beta_0
)
def change_a_inc(k, a_0, a_f, inc_0, inc_f, f):
"""Change semimajor axis and inclination.
Guidance law from the Edelbaum/Kéchichian theory, optimal transfer between circular inclined orbits
(a_0, i_0) --> (a_f, i_f), ecc = 0.
Parameters
----------
k : float
Gravitational parameter.
a_0 : float
Initial semimajor axis (km).
a_f : float
Final semimajor axis (km).
inc_0 : float
Initial inclination (rad).
inc_f : float
Final inclination (rad).
f : float
Magnitude of constant acceleration (km / s**2).
Returns
-------
a_d : function
delta_V : numpy.ndarray
t_f : float
Notes
-----
Edelbaum theory, reformulated by Kéchichian.
References
----------
* Edelbaum, T. N. "Propulsion Requirements for Controllable
Satellites", 1961.
* Kéchichian, J. A. "Reformulation of Edelbaum's Low-Thrust
Transfer Problem Using Optimal Control Theory", 1997.
"""
V_0, V_f, beta_0_ = compute_parameters(k, a_0, a_f, inc_0, inc_f)
@jit
def a_d(t0, u_, k):
r = u_[:3]
v = u_[3:]
# Change sign of beta with the out-of-plane velocity
beta_ = beta(t0, V_0, f, beta_0_) * np.sign(r[0] * (inc_f - inc_0))
t_ = v / norm(v)
w_ = cross(r, v) / norm(cross(r, v))
accel_v = f * (np.cos(beta_) * t_ + np.sin(beta_) * w_)
return accel_v
delta_V, t_f = extra_quantities(k, a_0, a_f, inc_0, inc_f, f)
return a_d, delta_V, t_f
| mit | 9b3844924915d0e4d6e3d8477726026e | 26.563636 | 106 | 0.565633 | 2.659649 | false | false | false | false |
poliastro/poliastro | src/poliastro/earth/plotting/groundtrack.py | 1 | 7681 | """ Holds ground-track plotter for Earth satellites """
import plotly.graph_objects as go
from astropy import units as u
from astropy.coordinates import (
GCRS,
ITRS,
CartesianDifferential,
CartesianRepresentation,
SphericalRepresentation,
)
from poliastro.bodies import Earth
from poliastro.earth.plotting.utils import EARTH_PALETTE
from poliastro.twobody.sampling import EpochsArray
class GroundtrackPlotter:
"""Generates two-dimensional ground-track"""
def __init__(self, fig=None, color_palette=EARTH_PALETTE):
"""Initializes the ground-track
Parameters
----------
fig : ~plotly.graph_objects.Figure
Figure instance for the canvas
color_palette : dict
A color palette for background map
"""
# Generate custom figure if required
if not fig:
self.fig = go.Figure(go.Scattergeo())
else:
self.fig = fig
# Default configuration is applied
self.update_geos(
showcoastlines=True,
coastlinecolor="Black",
showland=True,
landcolor=color_palette["land_color"],
showocean=True,
oceancolor=color_palette["ocean_color"],
showlakes=False,
showrivers=False,
lataxis={"showgrid": True, "gridcolor": "black"},
lonaxis={"showgrid": True, "gridcolor": "black"},
)
def update_geos(self, **config):
"""Enables user to customize geo figure
Parameters
----------
**config : dict
A collection of custom values for geo figure
"""
self.fig.update_geos(config)
return self.fig
def update_layout(self, **config):
"""Enables user to customize figure layout
Parameters
----------
**config : dict
A collection of custom values for figure layout
"""
self.fig.update_layout(config)
return self.fig
def add_trace(self, trace):
"""Adds trace to custom figure"""
self.fig.add_trace(trace)
def _get_raw_coords(self, orb, t_deltas):
"""Generates raw orbit coordinates for given epochs
Parameters
----------
orb : ~poliastro.twobody.Orbit
Orbit to be propagated
t_deltas : ~astropy.time.DeltaTime
Desired observation time
Returns
-------
raw_xyz : numpy.ndarray
A collection of raw cartesian position vectors
raw_epochs : numpy.ndarray
Associated epoch with previously raw coordinates
"""
# Solve for raw coordinates and epochs
ephem = orb.to_ephem(EpochsArray(orb.epoch + t_deltas))
rr, vv = ephem.rv()
raw_xyz = CartesianRepresentation(
rr,
xyz_axis=-1,
differentials=CartesianDifferential(vv, xyz_axis=-1),
)
raw_epochs = ephem.epochs
return raw_xyz, raw_epochs
def _from_raw_to_ITRS(self, raw_xyz, raw_obstime):
"""Converts raw coordinates to ITRS ones
Parameters
----------
raw_xyz : numpy.ndarray
A collection of rwa position coordinates
raw_obstime : numpy.ndarray
Associated observation time
Returns
-------
itrs_xyz: ~astropy.coordinates.ITRS
A collection of coordinates in ITRS frame
"""
# Build GCRS and ITRS coordinates
gcrs_xyz = GCRS(
raw_xyz,
obstime=raw_obstime,
representation_type=CartesianRepresentation,
)
itrs_xyz = gcrs_xyz.transform_to(ITRS(obstime=raw_obstime))
return itrs_xyz
def _trace_groundtrack(self, orb, t_deltas, label, line_style):
"""Generates a trace for EarthSatellite's orbit grountrack
Parameters
----------
orb : ~poliastro.twobody.Orbit
EarthSatellite's associated Orbit
t_deltas : ~astropy.time.DeltaTime
Collection of epochs
label : string
Name for the trace
line_style : dict
Dictionary for customizing groundtrack line trace
Returns
-------
gnd_trace: ~plotly.graph_objects.Scattergeo
Trace associated to grountrack
"""
# Compute predicted grountrack positions
raw_xyz, raw_obstime = self._get_raw_coords(orb, t_deltas)
itrs_xyz = self._from_raw_to_ITRS(raw_xyz, raw_obstime)
itrs_latlon = itrs_xyz.represent_as(SphericalRepresentation)
# Append predicted positions to map
gnd_trace = go.Scattergeo(
lat=itrs_latlon.lat.to(u.deg),
lon=itrs_latlon.lon.to(u.deg),
mode="lines",
name=label,
line=line_style,
)
return gnd_trace
def _trace_position(self, ss, label, marker):
"""Adds marker trace to self figure showing current position
Parameters
----------
ss : ~poliastro.twobody.Orbit
EarthSatellite's orbit
label : string
Label for the orbit
marker : dict
Dicitonary holding plotly marker configuration
Returns
-------
trace: ~plotly.graph_objects.Scattergeo
Scattergeo trace for current position
"""
# Check if marker available
if not marker:
marker = {"size": 5}
# Solve for actual position within groundtrack
raw_pos, raw_epoch = ss.rv()[0], ss.epoch
itrs_pos = self._from_raw_to_ITRS(raw_pos, raw_epoch)
itrs_latlon_pos = itrs_pos.represent_as(SphericalRepresentation)
# Append predicted positions to map
trace = go.Scattergeo(
lat=itrs_latlon_pos.lat.to(u.deg),
lon=itrs_latlon_pos.lon.to(u.deg),
name=label,
marker=marker,
showlegend=False,
)
return trace
def plot(self, earth_orb, t_span, label, color, line_style={}, marker={}):
"""Plots desired Earth satellite orbit for a given time span.
Parameters
----------
earth_orb : ~poliastro.earth.EarthSatellite
Desired Earth's satellite to who's grountrack will be plotted
t_span : ~astropy.time.TimeDelta
A collection of epochs
label : str
Label for the groundtrack.
color : string
Desired lines and traces color
line_style : dict
Dictionary for customizing groundtrack line trace
marker : dict
Dictionary for customizing groundtrack marker trace
Returns
-------
fig: ~plotly.graph_objects.Figure
Output figure
"""
# Retrieve basic parameters and check for proper attractor
orb = earth_orb.orbit
if orb.attractor != Earth:
raise ValueError(
f"Satellite should be orbiting Earth, not {orb.attractor}."
)
else:
t_deltas = t_span - orb.epoch
# Ensure same line and marker color unless user specifies
for style in [line_style, marker]:
style.setdefault("color", color)
# Generate groundtrack trace and add it to figure
gnd_trace = self._trace_groundtrack(orb, t_deltas, label, line_style)
self.add_trace(gnd_trace)
# Generate position trace and add it to figure
pos_trace = self._trace_position(orb, label, marker)
self.add_trace(pos_trace)
# Return figure
return self.fig
| mit | e35629aba0f09ca6355d80f47bfc5f18 | 28.316794 | 78 | 0.577659 | 4.163144 | false | false | false | false |
python-ivi/python-ivi | ivi/tektronix/tektronixOA5000.py | 2 | 8909 | """
Python Interchangeable Virtual Instrument Library
Copyright (c) 2012-2017 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .. import ivi
class tektronixOA5000(ivi.Driver):
"Tektronix OA5000 series optical attenuator driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', '')
super(tektronixOA5000, self).__init__(*args, **kwargs)
self._identity_description = "Tektronix OA5000 series optical attenuator driver"
self._identity_identifier = ""
self._identity_revision = ""
self._identity_vendor = ""
self._identity_instrument_manufacturer = "Tektronix"
self._identity_instrument_model = ""
self._identity_instrument_firmware_revision = ""
self._identity_specification_major_version = 0
self._identity_specification_minor_version = 0
self._identity_supported_instrument_models = ['OA5002', 'OA5012', 'OA5022', 'OA5032']
self._attenuation = 0.0
self._reference = 0.0
self._wavelength = 1300.0
self._disable = False
self._add_property('attenuation',
self._get_attenuation,
self._set_attenuation,
None,
ivi.Doc("""
Specifies the attenuation of the optical path. The units are dB.
"""))
self._add_property('reference',
self._get_reference,
self._set_reference,
None,
ivi.Doc("""
Specifies the zero dB reference level for the attenuation setting. The
units are dB.
"""))
self._add_property('wavelength',
self._get_wavelength,
self._set_wavelength,
None,
ivi.Doc("""
Specifies the wavelength of light used for accurate attenuation. The
units are meters.
"""))
self._add_property('disable',
self._get_disable,
self._set_disable,
None,
ivi.Doc("""
Controls a shutter in the optical path. Shutter is closed when disable is
set to True.
"""))
def _initialize(self, resource = None, id_query = False, reset = False, **keywargs):
"Opens an I/O session to the instrument."
super(tektronixOA5000, self)._initialize(resource, id_query, reset, **keywargs)
# interface clear
if not self._driver_operation_simulate:
self._clear()
# check ID
if id_query and not self._driver_operation_simulate:
id = self.identity.instrument_model
id_check = self._instrument_id
id_short = id[:len(id_check)]
if id_short != id_check:
raise Exception("Instrument ID mismatch, expecting %s, got %s", id_check, id_short)
# reset
if reset:
self.utility_reset()
def _load_id_string(self):
if self._driver_operation_simulate:
self._identity_instrument_manufacturer = "Not available while simulating"
self._identity_instrument_model = "Not available while simulating"
self._identity_instrument_firmware_revision = "Not available while simulating"
else:
lst = self._ask("*IDN?").split(",")
self._identity_instrument_manufacturer = lst[0]
self._identity_instrument_model = lst[1]
self._identity_instrument_firmware_revision = lst[3]
self._set_cache_valid(True, 'identity_instrument_manufacturer')
self._set_cache_valid(True, 'identity_instrument_model')
self._set_cache_valid(True, 'identity_instrument_firmware_revision')
def _get_identity_instrument_manufacturer(self):
if self._get_cache_valid():
return self._identity_instrument_manufacturer
self._load_id_string()
return self._identity_instrument_manufacturer
def _get_identity_instrument_model(self):
if self._get_cache_valid():
return self._identity_instrument_model
self._load_id_string()
return self._identity_instrument_model
def _get_identity_instrument_firmware_revision(self):
if self._get_cache_valid():
return self._identity_instrument_firmware_revision
self._load_id_string()
return self._identity_instrument_firmware_revision
def _utility_disable(self):
pass
def _utility_error_query(self):
error_code = 0
error_message = "No error"
if not self._driver_operation_simulate:
error_message = self._ask("err?").strip('"')
error_code = 1
if error_message == '0':
error_code = 0
return (error_code, error_message)
def _utility_lock_object(self):
pass
def _utility_reset(self):
if not self._driver_operation_simulate:
self._write("*RST")
self._clear()
self.driver_operation.invalidate_all_attributes()
def _utility_reset_with_defaults(self):
self._utility_reset()
def _utility_self_test(self):
code = 0
message = "Self test passed"
if not self._driver_operation_simulate:
code = int(self._ask("*TST?"))
if code != 0:
message = "Self test failed"
return (code, message)
def _utility_unlock_object(self):
pass
def _get_attenuation(self):
if not self._driver_operation_simulate and not self._get_cache_valid():
resp = self._ask("attenuation:dbr?").split(' ')[1]
self._attenuation = float(resp)
self._set_cache_valid()
return self._attenuation
def _set_attenuation(self, value):
value = float(value)
if not self._driver_operation_simulate:
self._write("attenuation:dbr %e" % (value))
self._attenuation = value
self._set_cache_valid()
def _get_reference(self):
if not self._driver_operation_simulate and not self._get_cache_valid():
resp = self._ask("reference?").split(' ')[1]
self._reference = float(resp)
self._set_cache_valid()
return self._reference
def _set_reference(self, value):
value = float(value)
if not self._driver_operation_simulate:
self._write("reference %e" % (value))
self._reference = value
self._set_cache_valid()
self._set_cache_valid(False, 'attenuation')
def _get_wavelength(self):
if not self._driver_operation_simulate and not self._get_cache_valid():
resp = self._ask("wavelength?").split(' ')[1]
self._wavelength = float(resp)
self._set_cache_valid()
return self._wavelength
def _set_wavelength(self, value):
value = float(value)
if not self._driver_operation_simulate:
self._write("wavelength %e" % (value))
self._wavelength = value
self._set_cache_valid()
def _get_disable(self):
if not self._driver_operation_simulate and not self._get_cache_valid():
resp = self._ask("disable?").split(' ')[1]
self._disable = bool(int(resp))
self._set_cache_valid()
return self._disable
def _set_disable(self, value):
value = bool(value)
if not self._driver_operation_simulate:
self._write("disable %d" % (int(value)))
self._disable = value
self._set_cache_valid()
| mit | d9054af31ea555c8f290e5bbe4f07093 | 36.910638 | 99 | 0.584465 | 4.347975 | false | false | false | false |
python-ivi/python-ivi | ivi/tektronix/tektronixAWG2000.py | 2 | 23133 | """
Python Interchangeable Virtual Instrument Library
Copyright (c) 2012-2017 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import time
import struct
from numpy import *
from .. import ivi
from .. import fgen
StandardWaveformMapping = {
'sine': 'sin',
'square': 'squ',
'triangle': 'tri',
'ramp_up': 'ramp',
#'ramp_down',
#'dc'
}
class tektronixAWG2000(ivi.Driver, fgen.Base, fgen.StdFunc, fgen.ArbWfm,
fgen.ArbSeq, fgen.SoftwareTrigger, fgen.Burst,
fgen.ArbChannelWfm):
"Tektronix AWG2000 series arbitrary waveform generator driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', '')
super(tektronixAWG2000, self).__init__(*args, **kwargs)
self._output_count = 1
self._arbitrary_sample_rate = 0
self._arbitrary_waveform_number_waveforms_max = 0
self._arbitrary_waveform_size_max = 256*1024
self._arbitrary_waveform_size_min = 64
self._arbitrary_waveform_quantum = 8
self._arbitrary_sequence_number_sequences_max = 0
self._arbitrary_sequence_loop_count_max = 0
self._arbitrary_sequence_length_max = 0
self._arbitrary_sequence_length_min = 0
self._catalog_names = list()
self._arbitrary_waveform_n = 0
self._arbitrary_sequence_n = 0
self._identity_description = "Tektronix AWG2000 series arbitrary waveform generator driver"
self._identity_identifier = ""
self._identity_revision = ""
self._identity_vendor = ""
self._identity_instrument_manufacturer = "Tektronix"
self._identity_instrument_model = ""
self._identity_instrument_firmware_revision = ""
self._identity_specification_major_version = 5
self._identity_specification_minor_version = 0
self._identity_supported_instrument_models = ['AWG2005','AWG2020','AWG2021','AWG2040','AWG2041']
self._init_outputs()
def _initialize(self, resource = None, id_query = False, reset = False, **keywargs):
"Opens an I/O session to the instrument."
super(tektronixAWG2000, self)._initialize(resource, id_query, reset, **keywargs)
# interface clear
if not self._driver_operation_simulate:
self._clear()
# check ID
if id_query and not self._driver_operation_simulate:
id = self.identity.instrument_model
id_check = self._instrument_id
id_short = id[:len(id_check)]
if id_short != id_check:
raise Exception("Instrument ID mismatch, expecting %s, got %s", id_check, id_short)
# reset
if reset:
self.utility_reset()
def _load_id_string(self):
if self._driver_operation_simulate:
self._identity_instrument_manufacturer = "Not available while simulating"
self._identity_instrument_model = "Not available while simulating"
self._identity_instrument_firmware_revision = "Not available while simulating"
else:
lst = self._ask("*IDN?").split(",")
self._identity_instrument_manufacturer = lst[0]
self._identity_instrument_model = lst[1]
self._identity_instrument_firmware_revision = lst[3]
self._set_cache_valid(True, 'identity_instrument_manufacturer')
self._set_cache_valid(True, 'identity_instrument_model')
self._set_cache_valid(True, 'identity_instrument_firmware_revision')
def _get_identity_instrument_manufacturer(self):
if self._get_cache_valid():
return self._identity_instrument_manufacturer
self._load_id_string()
return self._identity_instrument_manufacturer
def _get_identity_instrument_model(self):
if self._get_cache_valid():
return self._identity_instrument_model
self._load_id_string()
return self._identity_instrument_model
def _get_identity_instrument_firmware_revision(self):
if self._get_cache_valid():
return self._identity_instrument_firmware_revision
self._load_id_string()
return self._identity_instrument_firmware_revision
def _utility_disable(self):
pass
def _utility_error_query(self):
error_code = 0
error_message = "No error"
if not self._driver_operation_simulate:
error_code, error_message = self._ask(":evmsg?").split(',')
error_code = int(error_code.split(' ', 1)[1])
if error_code == 1:
self._ask("*esr?")
error_code, error_message = self._ask(":evmsg?").split(',')
error_code = int(error_code.split(' ', 1)[1])
error_message = error_message.strip(' "')
return (error_code, error_message)
def _utility_lock_object(self):
pass
def _utility_reset(self):
if not self._driver_operation_simulate:
self._write("*RST")
self.driver_operation.invalidate_all_attributes()
def _utility_reset_with_defaults(self):
self._utility_reset()
def _utility_self_test(self):
code = 0
message = "Self test passed"
if not self._driver_operation_simulate:
self._write("*TST?")
# wait for test to complete
time.sleep(60)
code = int(self._read())
if code != 0:
message = "Self test failed"
return (code, message)
def _utility_unlock_object(self):
pass
def _init_outputs(self):
try:
super(tektronixAWG2000, self)._init_outputs()
except AttributeError:
pass
self._output_enabled = list()
for i in range(self._output_count):
self._output_enabled.append(False)
def _load_catalog(self):
self._catalog = list()
self._catalog_names = list()
if not self._driver_operation_simulate:
raw = self._ask(":memory:catalog:all?").lower()
raw = raw.split(' ', 1)[1]
l = raw.split(',')
l = [s.strip('"') for s in l]
self._catalog = [l[i:i+3] for i in range(0, len(l), 3)]
self._catalog_names = [l[0] for l in self._catalog]
def _get_output_operation_mode(self, index):
index = ivi.get_index(self._output_name, index)
return self._output_operation_mode[index]
def _set_output_operation_mode(self, index, value):
index = ivi.get_index(self._output_name, index)
if value not in OperationMode:
raise ivi.ValueNotSupportedException()
self._output_operation_mode[index] = value
def _get_output_enabled(self, index):
index = ivi.get_index(self._output_name, index)
if not self._driver_operation_simulate and not self._get_cache_valid(index=index):
resp = self._ask(":output:ch%d:state?" % (index+1)).split(' ', 1)[1]
self._output_enabled[index] = bool(int(resp))
self._set_cache_valid(index=index)
return self._output_enabled[index]
def _set_output_enabled(self, index, value):
index = ivi.get_index(self._output_name, index)
value = bool(value)
if not self._driver_operation_simulate:
self._write(":output:ch%d:state %d" % (index+1, value))
self._output_enabled[index] = value
self._set_cache_valid(index=index)
def _get_output_impedance(self, index):
index = ivi.get_index(self._output_name, index)
self._output_impedance[index] = 50
return self._output_impedance[index]
def _set_output_impedance(self, index, value):
index = ivi.get_index(self._output_name, index)
value = 50
self._output_impedance[index] = value
def _get_output_mode(self, index):
index = ivi.get_index(self._output_name, index)
if not self._driver_operation_simulate and not self._get_cache_valid(index=index):
resp = self._ask(":fg:state?").split(' ', 1)[1]
if int(resp):
self._output_mode[index] = 'function'
else:
self._output_mode[index] = 'arbitrary'
self._set_cache_valid(index=index)
return self._output_mode[index]
def _set_output_mode(self, index, value):
index = ivi.get_index(self._output_name, index)
if value not in fgen.OutputMode:
raise ivi.ValueNotSupportedException()
if not self._driver_operation_simulate:
if value == 'function':
self._write(":fg:state 1")
elif value == 'arbitrary':
self._write(":fg:state 0")
self._output_mode[index] = value
for k in range(self._output_count):
self._set_cache_valid(valid=False,index=k)
self._set_cache_valid(index=index)
def _get_output_reference_clock_source(self, index):
index = ivi.get_index(self._output_name, index)
if not self._driver_operation_simulate and not self._get_cache_valid(index=index):
resp = self._ask(":clock:source?").split(' ', 1)[1]
value = resp.lower()
self._output_reference_clock_source[index] = value
self._set_cache_valid(index=index)
return self._output_reference_clock_source[index]
def _set_output_reference_clock_source(self, index, value):
index = ivi.get_index(self._output_name, index)
if value not in fgen.SampleClockSource:
raise ivi.ValueNotSupportedException()
if not self._driver_operation_simulate:
self._write(":clock:source %s" % value)
self._output_reference_clock_source[index] = value
for k in range(self._output_count):
self._set_cache_valid(valid=False,index=k)
self._set_cache_valid(index=index)
def abort_generation(self):
pass
def initiate_generation(self):
pass
def _get_output_standard_waveform_amplitude(self, index):
index = ivi.get_index(self._output_name, index)
if not self._driver_operation_simulate and not self._get_cache_valid(index=index):
resp = self._ask(":fg:ch%d:amplitude?" % (index+1)).split(' ', 1)[1]
self._output_standard_waveform_amplitude[index] = float(resp)
self._set_cache_valid(index=index)
return self._output_standard_waveform_amplitude[index]
def _set_output_standard_waveform_amplitude(self, index, value):
index = ivi.get_index(self._output_name, index)
value = float(value)
if not self._driver_operation_simulate:
self._write(":fg:ch%d:amplitude %e" % (index+1, value))
self._output_standard_waveform_amplitude[index] = value
self._set_cache_valid(index=index)
def _get_output_standard_waveform_dc_offset(self, index):
index = ivi.get_index(self._output_name, index)
if not self._driver_operation_simulate and not self._get_cache_valid(index=index):
resp = self._ask(":fg:ch%d:offset?" % (index+1)).split(' ', 1)[1]
self._output_standard_waveform_dc_offset[index] = float(resp)
self._set_cache_valid(index=index)
return self._output_standard_waveform_dc_offset[index]
def _set_output_standard_waveform_dc_offset(self, index, value):
index = ivi.get_index(self._output_name, index)
value = float(value)
if not self._driver_operation_simulate:
self._write(":fg:ch%d:offset %e" % (index+1, value))
self._output_standard_waveform_dc_offset[index] = value
self._set_cache_valid(index=index)
def _get_output_standard_waveform_duty_cycle_high(self, index):
index = ivi.get_index(self._output_name, index)
return self._output_standard_waveform_duty_cycle_high[index]
def _set_output_standard_waveform_duty_cycle_high(self, index, value):
index = ivi.get_index(self._output_name, index)
value = float(value)
self._output_standard_waveform_duty_cycle_high[index] = value
def _get_output_standard_waveform_start_phase(self, index):
index = ivi.get_index(self._output_name, index)
return self._output_standard_waveform_start_phase[index]
def _set_output_standard_waveform_start_phase(self, index, value):
index = ivi.get_index(self._output_name, index)
value = float(value)
self._output_standard_waveform_start_phase[index] = value
def _get_output_standard_waveform_frequency(self, index):
index = ivi.get_index(self._output_name, index)
if not self._driver_operation_simulate and not self._get_cache_valid(index=index):
resp = self._ask(":fg:frequency?").split(' ', 1)[1]
self._output_standard_waveform_frequency[index] = float(resp)
self._set_cache_valid(index=index)
return self._output_standard_waveform_frequency[index]
def _set_output_standard_waveform_frequency(self, index, value):
index = ivi.get_index(self._output_name, index)
value = float(value)
if not self._driver_operation_simulate:
self._write(":fg:frequency %e" % value)
self._output_standard_waveform_frequency[index] = value
for k in range(self._output_count):
self._set_cache_valid(valid=False,index=k)
self._set_cache_valid(index=index)
def _get_output_standard_waveform_waveform(self, index):
index = ivi.get_index(self._output_name, index)
if not self._driver_operation_simulate and not self._get_cache_valid(index=index):
resp = self._ask(":fg:ch%d:shape?" % (index+1)).split(' ', 1)[1]
value = resp.lower()
value = [k for k,v in StandardWaveformMapping.items() if v==value][0]
self._output_standard_waveform_waveform[index] = value
self._set_cache_valid(index=index)
return self._output_standard_waveform_waveform[index]
def _set_output_standard_waveform_waveform(self, index, value):
index = ivi.get_index(self._output_name, index)
if value not in StandardWaveformMapping:
raise ivi.ValueNotSupportedException()
if not self._driver_operation_simulate:
self._write(":fg:ch%d:shape %s" % (index+1, StandardWaveformMapping[value]))
self._output_standard_waveform_waveform[index] = value
self._set_cache_valid(index=index)
def _get_output_arbitrary_gain(self, index):
index = ivi.get_index(self._output_name, index)
if not self._driver_operation_simulate and not self._get_cache_valid(index=index):
resp = self._ask(":ch%d:amplitude?" % (index+1)).split(' ', 1)[1]
self._output_arbitrary_gain[index] = float(resp)
self._set_cache_valid(index=index)
return self._output_arbitrary_gain[index]
def _set_output_arbitrary_gain(self, index, value):
index = ivi.get_index(self._output_name, index)
value = float(value)
if not self._driver_operation_simulate:
self._write(":ch%d:amplitude %e" % (index+1, value))
self._output_arbitrary_gain[index] = value
self._set_cache_valid(index=index)
def _get_output_arbitrary_offset(self, index):
index = ivi.get_index(self._output_name, index)
if not self._driver_operation_simulate and not self._get_cache_valid(index=index):
resp = self._ask(":ch%d:offset?" % (index+1)).split(' ', 1)[1]
self._output_arbitrary_offset[index] = float(resp)
self._set_cache_valid(index=index)
return self._output_arbitrary_offset[index]
def _set_output_arbitrary_offset(self, index, value):
index = ivi.get_index(self._output_name, index)
value = float(value)
if not self._driver_operation_simulate:
self._write(":ch%d:offset %e" % (index+1, value))
self._output_arbitrary_offset[index] = value
self._set_cache_valid(index=index)
def _get_output_arbitrary_waveform(self, index):
index = ivi.get_index(self._output_name, index)
if not self._driver_operation_simulate and not self._get_cache_valid(index=index):
resp = self._ask(":ch%d:waveform?" % (index+1)).split(' ', 1)[1]
self._output_arbitrary_waveform[index] = resp.strip('"').lower()
self._set_cache_valid(index=index)
return self._output_arbitrary_waveform[index]
def _set_output_arbitrary_waveform(self, index, value):
index = ivi.get_index(self._output_name, index)
value = str(value).lower()
# extension must be wfm
ext = value.split('.').pop()
if ext != 'wfm':
raise ivi.ValueNotSupportedException()
# waveform must exist on arb
self._load_catalog()
if value not in self._catalog_names:
raise ivi.ValueNotSupportedException()
if not self._driver_operation_simulate:
self._write(":ch%d:waveform \"%s\"" % (index+1, value))
self._output_arbitrary_waveform[index] = value
def _get_arbitrary_sample_rate(self):
if not self._driver_operation_simulate and not self._get_cache_valid():
resp = self._ask(":clock:frequency?").split(' ', 1)[1]
self._arbitrary_sample_rate = float(resp)
self._set_cache_valid()
return self._arbitrary_sample_rate
def _set_arbitrary_sample_rate(self, value):
value = float(value)
if not self._driver_operation_simulate:
self._write(":clock:frequency %e" % value)
self._arbitrary_sample_rate = value
self._set_cache_valid()
def _get_arbitrary_waveform_number_waveforms_max(self):
return self._arbitrary_waveform_number_waveforms_max
def _get_arbitrary_waveform_size_max(self):
return self._arbitrary_waveform_size_max
def _get_arbitrary_waveform_size_min(self):
return self._arbitrary_waveform_size_min
def _get_arbitrary_waveform_quantum(self):
return self._arbitrary_waveform_quantum
def _arbitrary_waveform_clear(self, handle):
pass
def _arbitrary_waveform_create(self, data):
y = None
x = None
if type(data) == list and type(data[0]) == float:
# list
y = array(data)
elif type(data) == ndarray and len(data.shape) == 1:
# 1D array
y = data
elif type(data) == ndarray and len(data.shape) == 2 and data.shape[0] == 1:
# 2D array, hieght 1
y = data[0]
elif type(data) == ndarray and len(data.shape) == 2 and data.shape[1] == 1:
# 2D array, width 1
y = data[:,0]
else:
x, y = ivi.get_sig(data)
if x is None:
x = arange(0,len(y)) / 10e6
if len(y) % self._arbitrary_waveform_quantum != 0:
raise ivi.ValueNotSupportedException()
xincr = ivi.rms(diff(x))
# get unused handle
self._load_catalog()
have_handle = False
while not have_handle:
self._arbitrary_waveform_n += 1
handle = "w%04d.wfm" % self._arbitrary_waveform_n
have_handle = handle not in self._catalog_names
self._write(":data:destination \"%s\"" % handle)
self._write(":wfmpre:bit_nr 12")
self._write(":wfmpre:bn_fmt rp")
self._write(":wfmpre:byt_nr 2")
self._write(":wfmpre:byt_or msb")
self._write(":wfmpre:encdg bin")
self._write(":wfmpre:pt_fmt y")
self._write(":wfmpre:yzero 0")
self._write(":wfmpre:ymult %e" % (2/(1<<12)))
self._write(":wfmpre:xincr %e" % xincr)
raw_data = b''
for f in y:
# clip at -1 and 1
if f > 1.0: f = 1.0
if f < -1.0: f = -1.0
f = (f + 1) / 2
# scale to 12 bits
i = int(f * ((1 << 12) - 2) + 0.5) & 0x000fffff
# add to raw data, MSB first
raw_data = raw_data + struct.pack('>H', i)
self._write_ieee_block(raw_data, ':curve ')
return handle
def _get_arbitrary_sequence_number_sequences_max(self):
return self._arbitrary_sequence_number_sequences_max
def _get_arbitrary_sequence_loop_count_max(self):
return self._arbitrary_sequence_loop_count_max
def _get_arbitrary_sequence_length_max(self):
return self._arbitrary_sequence_length_max
def _get_arbitrary_sequence_length_min(self):
return self._arbitrary_sequence_length_min
def _arbitrary_clear_memory(self):
pass
def _arbitrary_sequence_clear(self, handle):
pass
def _arbitrary_sequence_configure(self, index, handle, gain, offset):
pass
def _arbitrary_sequence_create(self, handle_list, loop_count_list):
return "handle"
def send_software_trigger(self):
if not self._driver_operation_simulate:
self._write("*TRG")
def _get_output_burst_count(self, index):
index = ivi.get_index(self._output_name, index)
return self._output_burst_count[index]
def _set_output_burst_count(self, index, value):
index = ivi.get_index(self._output_name, index)
value = int(value)
self._output_burst_count[index] = value
def _arbitrary_waveform_create_channel_waveform(self, index, data):
handle = self._arbitrary_waveform_create(data)
self._set_output_arbitrary_waveform(index, handle)
return handle
| mit | 49bda99d1ef31048a55b6bfcfc464b05 | 39.655536 | 104 | 0.601176 | 3.743204 | false | false | false | false |
python-ivi/python-ivi | setup.py | 7 | 2700 |
from __future__ import with_statement
# http://docs.python.org/distutils/
# http://packages.python.org/distribute/
try:
from setuptools import setup
except:
from distutils.core import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
raise SystemExit(errno)
import os.path
version_py = os.path.join(os.path.dirname(__file__), 'ivi', 'version.py')
with open(version_py, 'r') as f:
d = dict()
exec(f.read(), d)
version = d['__version__']
setup(
name = 'python-ivi',
description = 'Python Interchangeable Virtual Instrument Library',
version = version,
long_description = '''This package is a Python-based interpretation of the
Interchangeable Virtual Instrument standard, a software abstraction for
electronic test equipment that is remotely controllable.''',
author = 'Alex Forencich',
author_email = 'alex@alexforencich.com',
url = 'http://alexforencich.com/wiki/en/python-ivi/start',
download_url = 'http://github.com/python-ivi/python-ivi/tarball/master',
keywords = 'IVI measurement instrument',
license = 'MIT License',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: Interface Engine/Protocol Translator',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Hardware :: Hardware Drivers',
'Topic :: System :: Networking',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3'
],
packages = ['ivi',
'ivi.interface',
'ivi.extra',
'ivi.scpi',
'ivi.agilent',
'ivi.chroma',
'ivi.colby',
'ivi.dicon',
'ivi.ics',
'ivi.jdsu',
'ivi.lecroy',
'ivi.rigol',
'ivi.tektronix',
'ivi.testequity'],
requires = ['numpy'],
extras_require = {
'vxi11': ['python-vxi11'],
'usbtmc': ['python-usbtmc'],
'serial': ['pyserial']
},
tests_require = ['pytest'],
cmdclass = {'test': PyTest}
)
| mit | 19c5b34efb1ea32053df516af2230559 | 31.926829 | 82 | 0.589259 | 3.851641 | false | true | false | false |
python-ivi/python-ivi | ivi/agilent/agilent603xA.py | 2 | 13995 | """
Python Interchangeable Virtual Instrument Library
Copyright (c) 2012-2017 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .. import ivi
from .. import dcpwr
TrackingType = set(['floating'])
TriggerSourceMapping = {
'immediate': 'imm',
'bus': 'bus'}
class agilent603xA(ivi.Driver, dcpwr.Base, dcpwr.Measurement):
"Agilent 603xA series IVI DC power supply driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', '')
super(agilent603xA, self).__init__(*args, **kwargs)
self._output_count = 1
self._output_spec = [
{
'range': {
'P200V': (204.750, 17.403)
},
'ovp_max': 214.0,
'voltage_max': 204.750,
'current_max': 17.403
}
]
self._identity_description = "Agilent 603xA series DC power supply driver"
self._identity_identifier = ""
self._identity_revision = ""
self._identity_vendor = ""
self._identity_instrument_manufacturer = "Agilent Technologies"
self._identity_instrument_model = ""
self._identity_instrument_firmware_revision = ""
self._identity_specification_major_version = 3
self._identity_specification_minor_version = 0
self._identity_supported_instrument_models = ['6030A', '6031A', '6032A', '6033A', '6035A', '6038A']
self._init_outputs()
def _initialize(self, resource = None, id_query = False, reset = False, **keywargs):
"Opens an I/O session to the instrument."
super(agilent603xA, self)._initialize(resource, id_query, reset, **keywargs)
# interface clear
if not self._driver_operation_simulate:
self._clear()
# check ID
if id_query and not self._driver_operation_simulate:
id = self.identity.instrument_model
id_check = self._instrument_id
id_short = id[:len(id_check)]
if id_short != id_check:
raise Exception("Instrument ID mismatch, expecting %s, got %s", id_check, id_short)
# reset
if reset:
self.utility_reset()
def _load_id_string(self):
if self._driver_operation_simulate:
self._identity_instrument_manufacturer = "Not available while simulating"
self._identity_instrument_model = "Not available while simulating"
self._identity_instrument_firmware_revision = "Not available while simulating"
else:
lst = self._ask("ID?").split(",")
self._identity_instrument_model = lst[0].split(" ")[1]
self._set_cache_valid(True, 'identity_instrument_manufacturer')
self._set_cache_valid(True, 'identity_instrument_model')
self._set_cache_valid(True, 'identity_instrument_firmware_revision')
def _get_identity_instrument_manufacturer(self):
if self._get_cache_valid():
return self._identity_instrument_manufacturer
self._load_id_string()
return self._identity_instrument_manufacturer
def _get_identity_instrument_model(self):
if self._get_cache_valid():
return self._identity_instrument_model
self._load_id_string()
return self._identity_instrument_model
def _get_identity_instrument_firmware_revision(self):
if self._get_cache_valid():
return self._identity_instrument_firmware_revision
self._load_id_string()
return self._identity_instrument_firmware_revision
def _utility_disable(self):
pass
def _utility_error_query(self):
error_code = 0
error_message = "No error"
if not self._driver_operation_simulate:
error_code, error_message = self._ask(":system:error?").split(',')
error_code = int(error_code)
error_message = error_message.strip(' "')
return (error_code, error_message)
def _utility_lock_object(self):
pass
def _utility_reset(self):
if not self._driver_operation_simulate:
self._write("CLR")
self.driver_operation.invalidate_all_attributes()
def _utility_reset_with_defaults(self):
self._utility_reset()
def _utility_self_test(self):
code = 0
message = "Self test passed"
if not self._driver_operation_simulate:
code = int(self._ask("TEST?"))
if code != 0:
message = "Self test failed"
return (code, message)
def _utility_unlock_object(self):
pass
def _init_outputs(self):
try:
super(agilent603xA, self)._init_outputs()
except AttributeError:
pass
self._output_current_limit = list()
self._output_current_limit_behavior = list()
self._output_enabled = list()
self._output_ovp_enabled = list()
self._output_ovp_limit = list()
self._output_voltage_level = list()
for i in range(self._output_count):
self._output_current_limit.append(0)
self._output_current_limit_behavior.append('trip')
self._output_enabled.append(False)
self._output_ovp_enabled.append(True)
self._output_ovp_limit.append(0)
self._output_voltage_level.append(0)
def _get_output_current_limit(self, index):
index = ivi.get_index(self._output_name, index)
if not self._driver_operation_simulate and not self._get_cache_valid(index=index):
self._output_current_limit[index] = float(self._ask("ISET?").split(" ", 1)[1])
self._set_cache_valid(index=index)
return self._output_current_limit[index]
def _set_output_current_limit(self, index, value):
index = ivi.get_index(self._output_name, index)
value = float(value)
if value < 0 or value > self._output_spec[index]['current_max']:
raise ivi.OutOfRangeException()
if not self._driver_operation_simulate:
self._write("ISET %e" % value)
self._output_current_limit[index] = value
self._set_cache_valid(index=index)
def _get_output_current_limit_behavior(self, index):
index = ivi.get_index(self._output_name, index)
if not self._driver_operation_simulate and not self._get_cache_valid(index=index):
self._write(":instrument:nselect %d" % (index+1))
value = bool(int(self._ask(":source:current:protection:state?")))
if value:
self._output_current_limit_behavior[index] = 'trip'
else:
self._output_current_limit_behavior[index] = 'regulate'
self._set_cache_valid(index=index)
return self._output_current_limit_behavior[index]
def _set_output_current_limit_behavior(self, index, value):
index = ivi.get_index(self._output_name, index)
if value not in dcpwr.CurrentLimitBehavior:
raise ivi.ValueNotSupportedException()
if not self._driver_operation_simulate:
self._write(":instrument:nselect %d" % (index+1))
self._write(":source:current:protection:state %d" % int(value == 'trip'))
self._output_current_limit_behavior[index] = value
for k in range(self._output_count):
self._set_cache_valid(valid=False,index=k)
self._set_cache_valid(index=index)
def _get_output_enabled(self, index):
index = ivi.get_index(self._output_name, index)
if not self._driver_operation_simulate and not self._get_cache_valid(index=index):
self._output_enabled[index] = bool(int(self._ask("OUT?").split(" ", 1)[1]))
self._set_cache_valid(index=index)
return self._output_enabled[index]
def _set_output_enabled(self, index, value):
index = ivi.get_index(self._output_name, index)
value = bool(value)
if not self._driver_operation_simulate:
self._write("OUT %d" % int(value))
self._output_enabled[index] = value
for k in range(self._output_count):
self._set_cache_valid(valid=False,index=k)
self._set_cache_valid(index=index)
def _get_output_ovp_enabled(self, index):
index = ivi.get_index(self._output_name, index)
# Cannot disable OVP
self._output_ovp_enabled[index] = True
return self._output_ovp_enabled[index]
def _set_output_ovp_enabled(self, index, value):
index = ivi.get_index(self._output_name, index)
value = bool(value)
# do nothing - cannot disable OVP
def _get_output_ovp_limit(self, index):
index = ivi.get_index(self._output_name, index)
if not self._driver_operation_simulate and not self._get_cache_valid(index=index):
self._output_ovp_limit[index] = float(self._ask("OVP?").split(" ", 1)[1])
self._set_cache_valid(index=index)
return self._output_ovp_limit[index]
def _set_output_ovp_limit(self, index, value):
index = ivi.get_index(self._output_name, index)
value = float(value)
# do nothing - set from front panel
def _get_output_voltage_level(self, index):
index = ivi.get_index(self._output_name, index)
if not self._driver_operation_simulate and not self._get_cache_valid(index=index):
self._output_voltage_level[index] = float(self._ask("VSET?").split(" ", 1)[1])
self._set_cache_valid(index=index)
return self._output_voltage_level[index]
def _set_output_voltage_level(self, index, value):
index = ivi.get_index(self._output_name, index)
value = float(value)
if value < 0 or value > self._output_spec[index]['voltage_max']:
raise ivi.OutOfRangeException()
if not self._driver_operation_simulate:
self._write("VSET %e" % value)
self._output_voltage_level[index] = value
self._set_cache_valid(index=index)
def _output_configure_range(self, index, range_type, range_val):
index = ivi.get_index(self._output_name, index)
if range_type not in dcpwr.RangeType:
raise ivi.ValueNotSupportedException()
if range_type == 'voltage':
t = 0
elif range_type == 'current':
t = 1
k = dcpwr.get_range(self._output_spec[index]['range'], t, range_val)
if k < 0:
raise ivi.OutOfRangeException()
self._output_voltage_max[index] = self._output_range[index][k][0]
self._output_current_max[index] = self._output_range[index][k][1]
# do nothing - autoranging supply; no command to set range
def _output_query_current_limit_max(self, index, voltage_level):
index = ivi.get_index(self._output_name, index)
if voltage_level < 0 or voltage_level > self._output_spec[index]['voltage_max']:
raise ivi.OutOfRangeException()
return self._output_current_max[index]
def _output_query_voltage_level_max(self, index, current_limit):
index = ivi.get_index(self._output_name, index)
if current_limit < 0 or current_limit > self._output_spec[index]['current_max']:
raise ivi.OutOfRangeException()
return self._output_voltage_max[index]
def _output_query_output_state(self, index, state):
index = ivi.get_index(self._output_name, index)
if state not in dcpwr.OutputState:
raise ivi.ValueNotSupportedException()
status = 0
if not self._driver_operation_simulate:
status = int(self._ask("STS?").split(" ", 1)[1])
if state == 'constant_voltage':
return status & (1 << 0) != 0
elif state == 'constant_current':
return status & (1 << 1) != 0
elif state == 'over_voltage':
return status & (1 << 3) != 0
elif state == 'over_current':
# no overcurrent state
return False
elif state == 'unregulated':
return status & (1 << 2) != 0
return False
def _output_reset_output_protection(self, index):
if not self._driver_operation_simulate:
self._write("RST")
def _output_measure(self, index, type):
index = ivi.get_index(self._output_name, index)
if type not in dcpwr.MeasurementType:
raise ivi.ValueNotSupportedException()
if type == 'voltage':
if not self._driver_operation_simulate:
return float(self._ask("VOUT?").split(" ", 1)[1])
elif type == 'current':
if not self._driver_operation_simulate:
return float(self._ask("IOUT?").split(" ", 1)[1])
return 0
| mit | 8d3f45e93da8264e0ffa0f13c833d087 | 40.161765 | 107 | 0.60393 | 3.909218 | false | false | false | false |
python-ivi/python-ivi | ivi/agilent/agilentBaseInfiniiVision.py | 2 | 2400 | """
Python Interchangeable Virtual Instrument Library
Copyright (c) 2012-2017 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .agilentBaseScope import *
class agilentBaseInfiniiVision(agilentBaseScope):
"Agilent InfiniiVision series IVI oscilloscope driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', '')
super(agilentBaseInfiniiVision, self).__init__(*args, **kwargs)
self._analog_channel_name = list()
self._analog_channel_count = 4
self._digital_channel_name = list()
self._digital_channel_count = 16
self._channel_count = self._analog_channel_count + self._digital_channel_count
self._bandwidth = 1e9
self._horizontal_divisions = 10
self._vertical_divisions = 8
self._identity_description = "Agilent InfiniiVision series IVI oscilloscope driver"
self._identity_supported_instrument_models = ['DSO7012A','DSO7014A','DSO7032A',
'DSO7034A','DSO7052A','DSO7054A','DSO7104A','MSO7012A','MSO7014A','MSO7032A',
'MSO7034A','MSO7052A','MSO7054A','MSO7104A','DSO7012B','DSO7014B','DSO7032B',
'DSO7034B','DSO7052B','DSO7054B','DSO7104B','MSO7012B','MSO7014B','MSO7032B',
'MSO7034B','MSO7052B','MSO7054B','MSO7104B']
self._init_channels()
| mit | 6c54698f4dc560b6108c5f5826df8fd7 | 41.857143 | 93 | 0.703333 | 3.675345 | false | false | false | false |
chapmanb/cloudbiolinux | cloudbio/deploy/vmlauncher/__init__.py | 1 | 17511 | from __future__ import print_function
import os
import time
from libcloud.compute.ssh import SSHClient
from libcloud.compute.base import NodeImage, NodeSize
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
import six
# Ubuntu 10.04 LTS (Lucid Lynx) Daily Build [20120302]
DEFAULT_AWS_IMAGE_ID = "ami-0bf6af4e"
DEFAULT_AWS_SIZE_ID = "m1.large"
DEFAULT_AWS_AVAILABILITY_ZONE = "us-west-1"
from fabric.api import local, env, sudo, put, run
class VmLauncher:
def __init__(self, driver_options_key, options):
self.driver_options_key = driver_options_key
self.options = options
self.__set_and_verify_key()
def __set_and_verify_key(self):
key_file = self.options.get('key_file', None)
if not key_file:
key_file = self._driver_options()['key_file']
# Expand tildes in path
self.key_file = os.path.expanduser(key_file)
if not os.path.exists(self.key_file):
raise Exception("Invalid or unspecified key_file option: %s" % self.key_file)
def _get_driver_options(self, driver_option_keys):
driver_options = {}
for key in driver_option_keys:
if key in self._driver_options():
driver_options[key] = self._driver_options()[key]
return driver_options
def _driver_options(self):
return self.options[self.driver_options_key]
def get_key_file(self):
return self.key_file
def boot_and_connect(self):
conn = self._connect_driver()
node = self._boot() # Subclasses should implement this, and return libcloud node like object
self.conn = conn
self.node = node
self.uuid = node.uuid
self.connect(conn)
def _connect_driver(self):
if not getattr(self, 'conn', None):
self.conn = self._get_connection()
return self.conn
def _wait_for_node_info(self, f):
initial_value = f(self.node)
if initial_value:
return self._parse_node_info(initial_value)
while True:
time.sleep(10)
refreshed_node = self._find_node()
refreshed_value = f(refreshed_node)
if refreshed_value and not refreshed_value == []:
return self._parse_node_info(refreshed_value)
def _parse_node_info(self, value):
if isinstance(value, six.string_types):
return value
else:
return value[0]
def _find_node(self):
nodes = self.conn.list_nodes()
node_uuid = self.node.uuid
for node in nodes:
if node.uuid == node_uuid:
return node
def destroy(self, node=None):
self._connect_driver()
if node == None:
node = self.node
self.conn.destroy_node(node)
def __get_ssh_client(self):
ip = self.get_ip() # Subclasses should implement this
key_file = self.get_key_file()
ssh_client = SSHClient(hostname=ip,
port=self.get_ssh_port(),
username=self.get_user(),
key=key_file)
return ssh_client
def get_user(self):
return "ubuntu"
def get_ssh_port(self):
return 22
def connect(self, conn, tries=5):
i = 0
while i < tries:
try:
ssh_client = self.__get_ssh_client()
conn._ssh_client_connect(ssh_client=ssh_client, timeout=60)
return
except:
i = i + 1
def list(self):
self._connect_driver()
return self.conn.list_nodes()
def _boot(self):
conn = self.conn
boot_new = True
last_instance_path = None
if 'use_existing_instance' in self._driver_options():
boot_new = False
instance_id = self._driver_options()['use_existing_instance']
if instance_id == "__auto__":
last_instance_path = ".vmlauncher_last_instance_%s" % self.driver_options_key
if not os.path.exists(last_instance_path):
boot_new = True
else:
instance_id = open(last_instance_path, "r").read()
if not boot_new:
nodes = conn.list_nodes()
nodes_with_id = [node for node in nodes if node.uuid == instance_id]
if not nodes_with_id:
err_msg_template = "Specified use_existing_instance with instance id %s, but no such instance found."
raise Exception(err_msg_template % instance_id)
node = nodes_with_id[0]
if boot_new:
node = self._boot_new(conn)
if last_instance_path:
open(last_instance_path, "w").write(node.uuid)
return node
def _image_from_id(self, image_id=None):
image = NodeImage(id=image_id, name="", driver="")
return image
def _get_image_id(self, image_id=None):
if not image_id:
if 'image_id' in self._driver_options():
image_id = self._driver_options()['image_id']
else:
image_id = self._default_image_id()
return image_id
def _default_image_id(self):
return None
def _get_default_size_id(self):
return None
def _get_size_id_option(self):
return "size_id"
def _size_from_id(self, size_id):
size = NodeSize(id=size_id, name="", ram=None, disk=None, bandwidth=None, price=None, driver="")
return size
def _get_size_id(self, size_id=None):
if not size_id:
size_id_option = self._get_size_id_option()
if size_id_option in self._driver_options():
size_id = self._driver_options()[size_id_option]
else:
size_id = self._get_default_size_id()
return size_id
def _boot_new(self, conn):
hostname = self.options.get("hostname", "vm_launcher_instance")
node = self.create_node(hostname)
return node
def access_id(self):
return self._driver_options()["access_id"]
def secret_key(self):
return self._driver_options()["secret_key"]
def package_image_name(self):
name = self._driver_options()["package_image_name"] or "cloudbiolinux"
return name
def package_image_description(self, default=""):
description = self._driver_options().get("package_image_description", default)
return description
class VagrantConnection:
"""'Fake' connection type to mimic libcloud's but for Vagrant"""
def _ssh_client_connect(self, ssh_client):
pass
def destroy_node(self, node=None):
local("vagrant halt")
def list_nodes(self):
return [VagrantNode()]
class VagrantNode:
def __init__(self):
self.name = "vagrant"
self.uuid = "vagrant"
class VagrantVmLauncher(VmLauncher):
"""Launches vagrant VMs."""
def _get_connection():
return VagrantConnection()
def __init__(self, driver_options_key, options):
if not 'key_file' in options:
options['key_file'] = os.path.join(os.environ["HOME"], ".vagrant.d", "insecure_private_key")
VmLauncher.__init__(self, driver_options_key, options)
self.uuid = "test"
def _boot(self):
local("vagrant up")
return VagrantNode()
def get_ip(self):
return "33.33.33.11"
def get_user(self):
return "vagrant"
def package(self, **kwds):
local("vagrant package")
class OpenstackVmLauncher(VmLauncher):
""" Wrapper around libcloud's openstack API. """
def get_ip(self):
return self._wait_for_node_info(lambda node: node.public_ips + node.private_ips)
def _get_size_id_option(self):
return "flavor_id"
def create_node(self, hostname, image_id=None, size_id=None, **kwds):
image_id = self._get_image_id()
image = self._image_from_id(image_id)
size_id = self._get_size_id()
size = self._size_from_id(size_id)
if 'ex_keyname' not in kwds:
kwds['ex_keyname'] = self._driver_options()['ex_keyname']
node = self.conn.create_node(name=hostname,
image=image,
size=size,
**kwds)
return node
def _get_connection(self):
driver = get_driver(Provider.OPENSTACK)
openstack_username = self._driver_options()['username']
openstack_api_key = self._driver_options()['password']
driver_option_keys = ['host',
'secure',
'port',
'ex_force_auth_url',
'ex_force_auth_version',
'ex_force_base_url',
'ex_tenant_name']
driver_options = self._get_driver_options(driver_option_keys)
conn = driver(openstack_username,
openstack_api_key,
**driver_options)
return conn
def package(self, **kwds):
name = kwds.get("name", self.package_image_name())
self.conn.ex_save_image(self.node, name)
def attach_public_ip(self, public_ip=None):
if not public_ip:
public_ip = self._driver_options()["public_ip"]
self.conn._node_action(self.node, "addFloatingIp", address=public_ip)
class EucalyptusVmLauncher(VmLauncher):
def get_ip(self):
return self._wait_for_node_info(lambda node: node.public_ips)
def _get_connection(self):
driver = get_driver(Provider.EUCALYPTUS)
driver_option_keys = ['secret',
'secure',
'port',
'host',
'path']
driver_options = self._get_driver_options(driver_option_keys)
ec2_access_id = self.access_id()
conn = driver(ec2_access_id, **driver_options)
return conn
def create_node(self, hostname, image_id=None, size_id=None, **kwds):
image_id = self._get_image_id()
image = self._image_from_id(image_id)
size_id = self._get_size_id()
size = self._size_from_id(size_id)
if 'ex_keyname' not in kwds:
kwds['ex_keyname'] = self._driver_options()["keypair_name"]
node = self.conn.create_node(name=hostname,
image=image,
size=size,
**kwds)
return node
class Ec2VmLauncher(VmLauncher):
def get_ip(self):
return self._wait_for_node_info(lambda node: node.extra['dns_name'])
def boto_connection(self):
"""
Establish a boto library connection (for functionality not available in libcloud).
"""
import boto.ec2
region = boto.ec2.get_region(self._availability_zone())
ec2_access_id = self.access_id()
ec2_secret_key = self.secret_key()
return region.connect(aws_access_key_id=ec2_access_id, aws_secret_access_key=ec2_secret_key)
def boto_s3_connection(self):
from boto.s3.connection import S3Connection
ec2_access_id = self.access_id()
ec2_secret_key = self.secret_key()
return S3Connection(ec2_access_id, ec2_secret_key)
def _default_image_id(self):
return DEFAULT_AWS_IMAGE_ID
def package(self, **kwds):
package_type = self._driver_options().get('package_type', 'default')
if package_type == "create_image":
self._create_image(**kwds)
else:
self._default_package(**kwds)
def _create_image(self, **kwds):
ec2_conn = self.boto_connection()
instance_id = run("curl --silent http://169.254.169.254/latest/meta-data/instance-id")
if "name" not in kwds:
name = self.package_image_name()
else:
name = kwds["name"]
if "description" not in kwds:
description = self.package_image_description(default="")
else:
description = kwds["description"]
image_id = ec2_conn.create_image(instance_id, name=name, description=description)
if self._driver_options().get("make_public", False):
ec2_conn.modify_image_attribute(image_id, attribute='launchPermission', operation='add', groups=['all'])
def _default_package(self, **kwds):
env.packaging_dir = "/mnt/packaging"
sudo("mkdir -p %s" % env.packaging_dir)
self._copy_keys()
self._install_ec2_tools()
self._install_packaging_scripts()
def _install_ec2_tools(self):
sudo("apt-add-repository ppa:awstools-dev/awstools")
sudo("apt-get update")
sudo('export DEBIAN_FRONTEND=noninteractive; sudo -E apt-get install ec2-api-tools ec2-ami-tools -y --force-yes')
def _install_packaging_scripts(self):
user_id = self._driver_options()["user_id"]
bundle_cmd = "sudo ec2-bundle-vol -k %s/ec2_key -c%s/ec2_cert -u %s" % \
(env.packaging_dir, env.packaging_dir, user_id)
self._write_script("%s/bundle_image.sh" % env.packaging_dir, bundle_cmd)
bucket = self._driver_options()["package_bucket"]
upload_cmd = "sudo ec2-upload-bundle -b %s -m /tmp/image.manifest.xml -a %s -s %s" % \
(bucket, self.access_id(), self.secret_key())
self._write_script("%s/upload_bundle.sh" % env.packaging_dir, upload_cmd)
name = self.package_image_name()
manifest = "image.manifest.xml"
register_cmd = "sudo ec2-register -K %s/ec2_key -C %s/ec2_cert %s/%s -n %s" % (env.packaging_dir, env.packaging_dir, bucket, manifest, name)
self._write_script("%s/register_bundle.sh" % env.packaging_dir, register_cmd)
def _write_script(self, path, contents):
full_contents = "#!/bin/bash\n%s" % contents
sudo("echo '%s' > %s" % (full_contents, path))
sudo("chmod +x %s" % path)
def _copy_keys(self):
ec2_key_path = self._driver_options()["x509_key"]
ec2_cert_path = self._driver_options()["x509_cert"]
put(ec2_key_path, "%s/ec2_key" % env.packaging_dir, use_sudo=True)
put(ec2_cert_path, "%s/ec2_cert" % env.packaging_dir, use_sudo=True)
def _availability_zone(self):
if "availability_zone" in self._driver_options():
availability_zone = self._driver_options()["availability_zone"]
else:
availability_zone = DEFAULT_AWS_AVAILABILITY_ZONE
return availability_zone
def _get_default_size_id(self):
return DEFAULT_AWS_SIZE_ID
def _get_location(self):
availability_zone = self._availability_zone()
locations = self.conn.list_locations()
for location in locations:
if location.availability_zone.name == availability_zone:
break
return location
def create_node(self, hostname, image_id=None, size_id=None, location=None, **kwds):
self._connect_driver()
image_id = self._get_image_id(image_id)
image = self._image_from_id(image_id)
size_id = self._get_size_id(size_id)
size = self._size_from_id(size_id)
if not location:
location = self._get_location()
if not "ex_keyname" in kwds:
keyname = self._driver_options()["keypair_name"]
kwds["ex_keyname"] = keyname
node = self.conn.create_node(name=hostname,
image=image,
size=size,
location=location,
**kwds)
return node
def attach_public_ip(self, public_ip=None):
if not public_ip:
public_ip = self._driver_options()["public_ip"]
self.conn.ex_associate_addresses(self.node, public_ip)
def _get_connection(self):
driver = get_driver(Provider.EC2)
ec2_access_id = self.access_id()
ec2_secret_key = self.secret_key()
conn = driver(ec2_access_id, ec2_secret_key)
return conn
def build_vm_launcher(options):
provider_option_key = 'vm_provider'
# HACK to maintain backward compatibity on vm_host option
if not 'vm_provider' in options and 'vm_host' in options:
print("Using deprecated 'vm_host' setting, please change this to 'vm_provider'")
provider_option_key = 'vm_host'
driver = options.get(provider_option_key, 'aws') # Will just fall back on EC2
driver_options_key = driver
if driver in options:
# Allow multiple sections or providers per driver (e.g. aws-project-1).
# Assume the driver is just the provider name unless the provider
# section sets an explict driver option. In above example,
# the aws-project-1 would have to have a "driver: 'aws'" option
# set.
provider_options = options.get(driver)
driver = provider_options.get('driver', driver)
driver_classes = {'openstack': OpenstackVmLauncher,
'vagrant': VagrantVmLauncher,
'eucalyptus': EucalyptusVmLauncher}
driver_class = driver_classes.get(driver, Ec2VmLauncher)
vm_launcher = driver_class(driver_options_key, options)
return vm_launcher
| mit | 4b65977b7e75f022b9fc8c20b7b1f0be | 34.51927 | 148 | 0.574153 | 3.745668 | false | false | false | false |
chapmanb/cloudbiolinux | cloudbio/custom/phylogeny.py | 10 | 2360 | """Install instructions for non-packaged phyologeny programs.
"""
import os
from fabric.api import *
from fabric.contrib.files import *
from cloudbio.custom.shared import _if_not_installed, _make_tmp_dir
def install_tracer(env):
"""A program for analysing results from Bayesian MCMC programs such as BEAST & MrBayes.
http://tree.bio.ed.ac.uk/software/tracer/
"""
version = "1.5"
install_dir = os.path.join(env.system_install, "bioinf")
final_exe = os.path.join(env.system_install, "bin", "tracer")
if env.safe_exists(final_exe):
return
if not env.safe_exists(final_exe):
with _make_tmp_dir() as work_dir:
with cd(work_dir):
env.safe_run("wget -O Tracer_v{0}.tgz 'http://tree.bio.ed.ac.uk/download.php?id=80&num=3'".format(
version))
env.safe_run("tar xvzf Tracer_v{0}.tgz".format(version))
env.safe_run("chmod a+x Tracer_v{0}/bin/tracer".format(version))
env.safe_sudo("mkdir -p %s" % install_dir)
env.safe_sudo("rm -rvf %s/tracer" % install_dir)
env.safe_sudo("mv -f Tracer_v%s %s/tracer" % (version, install_dir))
env.safe_sudo("ln -sf %s/tracer/bin/tracer %s" % (install_dir, final_exe))
@_if_not_installed("beast -help")
def install_beast(env):
"""BEAST: Bayesian MCMC analysis of molecular sequences.
http://beast.bio.ed.ac.uk
"""
version = "1.7.4"
install_dir = os.path.join(env.system_install, "bioinf")
final_exe = os.path.join(env.system_install, "bin", "beast")
if not env.safe_exists(final_exe):
with _make_tmp_dir() as work_dir:
with cd(work_dir):
env.safe_run("wget -c http://beast-mcmc.googlecode.com/files/BEASTv%s.tgz" % version)
env.safe_run("tar xvzf BEASTv%s.tgz" % version)
env.safe_sudo("mkdir -p %s" % install_dir)
env.safe_sudo("rm -rvf %s/beast" % install_dir)
env.safe_sudo("mv -f BEASTv%s %s/beast" % (version, install_dir))
for l in ["beast","beauti","loganalyser","logcombiner","treeannotator","treestat"]:
env.safe_sudo("ln -sf %s/beast/bin/%s %s/bin/%s" % (install_dir, l,
env.system_install, l))
| mit | edf2472bb774b9f808eb864dc33e0866 | 46.2 | 114 | 0.577542 | 3.101183 | false | false | false | false |
chapmanb/cloudbiolinux | cloudbio/deploy/vmlauncher/transfer.py | 1 | 11794 | from __future__ import print_function
import os
import gzip
from operator import itemgetter
from sys import exit
from threading import Thread
from threading import Condition
from Queue import Queue
from fabric.api import local, put, sudo, cd
from fabric.colors import red
class FileSplitter:
"""
Works like the UNIX split command break up a file into parts like:
filename_aaaaaaaaa
filename_aaaaaaaab
etc...
"""
def __init__(self, chunk_size, destination_directory, callback):
self.chunk_size = chunk_size * 1024 * 1024
self.destination_directory = destination_directory
self.chunk_callback = callback
def split_file(self, path, compress, transfer_target):
basename = os.path.basename(path)
file_size = os.path.getsize(path)
total_bytes = 0
chunk_num = 0
suffix = ''
if compress:
suffix = '.gz'
input = open(path, 'rb')
while True:
chunk_name = "%s_part%08d%s" % (basename, chunk_num, suffix)
chunk_path = os.path.join(self.destination_directory, chunk_name)
this_chunk_size = min(self.chunk_size, file_size - total_bytes)
if this_chunk_size <= 0:
break
chunk = input.read(this_chunk_size)
total_bytes += len(chunk)
if compress:
chunk_output = gzip.open(chunk_path, 'wb')
else:
chunk_output = file(chunk_path, 'wb')
chunk_output.write(chunk)
chunk_output.close()
self.chunk_callback.handle_chunk(chunk_path, transfer_target)
chunk_num += 1
class TransferTarget:
def __init__(self, file, precompressed, transfer_manager):
self.file = file
self.precompressed = precompressed
self.do_compress = transfer_manager.compress
self.do_split = transfer_manager.chunk_size > 0
self.local_temp = transfer_manager.local_temp
basename = os.path.basename(file)
if len(basename) < 1:
print(red(Exception("Invalid file specified - %s" % file)))
exit(-1)
self.basename = basename
def should_compress(self):
return not self.precompressed and self.do_compress
def split_up(self):
return self.do_split
def clean(self):
if self.should_compress():
local("rm -rf '%s'" % self.compressed_file())
def compressed_basename(self):
if not self.precompressed:
compressed_basename = "%s.gz" % self.basename
else:
compressed_basename = self.basename
return compressed_basename
def decompressed_basename(self):
basename = self.basename
if basename.endswith(".gz"):
decompressed_basename = basename[:-len(".gz")]
else:
decompressed_basename = basename
return decompressed_basename
def compressed_file(self):
compressed_file = "%s/%s.gz" % (self.local_temp, self.basename)
return compressed_file
def build_simple_chunk(self):
if self.should_compress():
compressed_file = self.compressed_file()
local("gzip -f -9 '%s' -c > '%s'" % (self.file, compressed_file))
return TransferChunk(compressed_file, self)
else:
return TransferChunk(self.file, self)
class TransferChunk:
def __init__(self, chunk_path, transfer_target):
self.chunk_path = chunk_path
self.transfer_target = transfer_target
def clean_up(self):
was_split = self.transfer_target.split_up()
was_compressed = self.transfer_target.should_compress()
if was_split or was_compressed:
local("rm '%s'" % self.chunk_path)
class FileTransferManager:
def __init__(self,
compress=True,
num_compress_threads=1,
num_transfer_threads=1,
num_decompress_threads=1,
chunk_size=0,
transfer_retries=3,
destination="/tmp",
transfer_as="root",
local_temp=None):
self.compress = compress
self.num_compress_threads = num_compress_threads
self.num_transfer_threads = num_transfer_threads
self.num_decompress_threads = num_decompress_threads
self.chunk_size = chunk_size
self.transfer_retries = transfer_retries
self.destination = destination
self.transfer_as = transfer_as
self.local_temp = local_temp
if not self.local_temp:
self.local_temp = "/tmp"
local("mkdir -p '%s'" % self.local_temp)
self.file_splitter = FileSplitter(self.chunk_size, self.local_temp, self)
def handle_chunk(self, chunk, transfer_target):
self._enqueue_chunk(TransferChunk(chunk, transfer_target))
def transfer_files(self, files=[], compressed_files=[]):
self.transfer_complete = False
self.transfer_complete_condition = Condition()
self._setup_destination_directory()
self._setup_workers()
self._enqueue_files(files, compressed_files)
self._wait_for_completion()
def _setup_workers(self):
self._setup_compress_threads()
self._setup_transfer_threads()
self._setup_decompress_threads()
def _setup_destination_directory(self):
sudo("mkdir -p %s" % self.destination)
self._chown(self.destination)
def _setup_compress_threads(self):
self.compress_queue = Queue()
self._launch_threads(self.num_compress_threads, self._compress_files)
def _setup_decompress_threads(self):
self.decompress_queue = Queue()
self._launch_threads(self.num_decompress_threads, self._decompress_files)
def _setup_transfer_threads(self):
self.transfer_queue = Queue() # For now just transfer one file at a time
self._launch_threads(self.num_transfer_threads, self._put_files)
def _launch_threads(self, num_threads, func):
for thread_index in range(num_threads):
t = Thread(target=func)
t.daemon = True
t.start()
def _enqueue_files(self, files, compressed_files):
transfer_targets = []
for file in files:
transfer_target = TransferTarget(file, False, self)
transfer_targets.append(transfer_target)
for compressed_file in compressed_files:
transfer_target = TransferTarget(compressed_file, True, self)
transfer_targets.append(transfer_target)
transfer_targets = self._sort_transfer_targets(transfer_targets)
for transfer_target in transfer_targets:
self.compress_queue.put(transfer_target)
def _sort_transfer_targets(self, transfer_targets):
for i in range(len(transfer_targets)):
transfer_target = transfer_targets[i]
transfer_targets[i] = transfer_target, os.stat(transfer_target.file).st_size
transfer_targets.sort(key=itemgetter(1), reverse=True)
return [transfer_target[0] for transfer_target in transfer_targets]
def _wait_for_completion(self):
self.compress_queue.join()
self.transfer_queue.join()
self.transfer_complete_condition.acquire()
self.transfer_complete = True
self.transfer_complete_condition.notifyAll()
self.transfer_complete_condition.release()
self.decompress_queue.join()
def _compress_files(self):
while True:
try:
transfer_target = self.compress_queue.get()
file = transfer_target.file
if self.chunk_size > 0:
should_compress = transfer_target.should_compress()
self.file_splitter.split_file(file, should_compress, transfer_target)
self.decompress_queue.put(transfer_target)
else:
simple_chunk = transfer_target.build_simple_chunk()
self._enqueue_chunk(simple_chunk)
except Exception as e:
print(red("Failed to compress a file to transfer"))
print(red(e))
finally:
self.compress_queue.task_done()
def _decompress_files(self):
if self.chunk_size > 0:
self.transfer_complete_condition.acquire()
while not self.transfer_complete:
self.transfer_complete_condition.wait()
self.transfer_complete_condition.release()
while True:
try:
transfer_target = self.decompress_queue.get()
basename = transfer_target.basename
chunked = transfer_target.split_up()
compressed = transfer_target.do_compress or transfer_target.precompressed
with cd(self.destination):
if compressed and chunked:
destination = transfer_target.decompressed_basename()
if transfer_target.precompressed:
sudo("cat '%s_part'* | gunzip -c > %s" % (basename, destination), user=self.transfer_as)
else:
sudo("zcat '%s_part'* > %s" % (basename, destination), user=self.transfer_as)
sudo("rm '%s_part'*" % (basename), user=self.transfer_as)
elif compressed:
sudo("gunzip -f '%s'" % transfer_target.compressed_basename(), user=self.transfer_as)
elif chunked:
sudo("cat '%s'_part* > '%s'" % (basename, basename), user=self.transfer_as)
sudo("rm '%s_part'*" % (basename), user=self.transfer_as)
except Exception as e:
print(red("Failed to decompress or unsplit a transfered file."))
print(red(e))
finally:
self.decompress_queue.task_done()
def _put_files(self):
while True:
try:
transfer_chunk = self.transfer_queue.get()
transfer_target = transfer_chunk.transfer_target
compressed_file = transfer_chunk.chunk_path
basename = os.path.basename(compressed_file)
self._put_as_user(compressed_file, "%s/%s" % (self.destination, basename))
if not transfer_target.split_up():
self.decompress_queue.put(transfer_target)
except Exception as e:
print(red("Failed to upload a file."))
print(red(e))
finally:
transfer_chunk.clean_up()
self.transfer_queue.task_done()
def _chown(self, destination):
sudo("chown %s:%s '%s'" % (self.transfer_as, self.transfer_as, destination))
def _put_as_user(self, source, destination):
for attempt in range(self.transfer_retries):
retry = False
try:
put(source, destination, use_sudo=True)
self._chown(destination)
except BaseException as e:
retry = True
print(red(e))
print(red("Failed to upload %s on attempt %d" % (source, attempt + 1)))
except:
# Should never get here, delete this block when more confident
retry = True
print(red("Failed to upload %s on attempt %d" % (source, attempt + 1)))
finally:
if not retry:
return
print(red("Failed to transfer file %s, exiting..." % source))
exit(-1)
def _enqueue_chunk(self, transfer_chunk):
self.transfer_queue.put(transfer_chunk)
| mit | 01e3d9ce7575ab59ed3a36d35d517e50 | 36.44127 | 116 | 0.58216 | 4.309098 | false | false | false | false |
chapmanb/cloudbiolinux | cloudbio/galaxy/applications.py | 10 | 19563 | """
This file is largely derived from a similar file in mi-deployment written Dr.
Enis Afgan.
https://bitbucket.org/afgane/mi-deployment/src/8cba95baf98f/tools_fabfile.py
Long term it will be best to install these packages for Galaxy via the Tool
Shed, however many of these tools are not yet in the tool shed and the tool
shed installation is not currently available via the Galaxy API. Until such a
time as that is available, Galaxy dependencies may be installed via these
functions.
I have taken a first crack at harmonizing this with the rest of CloudBioLinux.
Wasn't able to reuse fastx_toolkit, tophat, cufflinks.
"""
import os
from fabric.api import cd
from cloudbio.custom.shared import _make_tmp_dir, _if_not_installed, _set_default_config
from cloudbio.custom.shared import _get_install, _configure_make, _fetch_and_unpack, _get_bin_dir
@_if_not_installed(None)
def install_fastx_toolkit(env):
version = env.tool_version
gtext_version = "0.6.1"
url_base = "http://hannonlab.cshl.edu/fastx_toolkit/"
fastx_url = "%sfastx_toolkit-%s.tar.bz2" % (url_base, version)
gtext_url = "%slibgtextutils-%s.tar.bz2" % (url_base, gtext_version)
pkg_name = 'fastx_toolkit'
install_dir = os.path.join(env.galaxy_tools_dir, pkg_name, version)
with _make_tmp_dir() as work_dir:
with cd(work_dir):
env.safe_run("wget %s" % gtext_url)
env.safe_run("tar -xjvpf %s" % (os.path.split(gtext_url)[-1]))
install_cmd = env.safe_sudo if env.use_sudo else env.safe_run
with cd("libgtextutils-%s" % gtext_version):
env.safe_run("./configure --prefix=%s" % (install_dir))
env.safe_run("make")
install_cmd("make install")
env.safe_run("wget %s" % fastx_url)
env.safe_run("tar -xjvpf %s" % os.path.split(fastx_url)[-1])
with cd("fastx_toolkit-%s" % version):
env.safe_run("export PKG_CONFIG_PATH=%s/lib/pkgconfig; ./configure --prefix=%s" % (install_dir, install_dir))
env.safe_run("make")
install_cmd("make install")
## TODO: Rework to use more of custom enhancements
@_if_not_installed("maq")
def install_maq(env):
version = env["tool_version"]
url = "http://downloads.sourceforge.net/project/maq/maq/%s/maq-%s.tar.bz2" \
% (version, version)
_get_install(url, env, _configure_make)
@_if_not_installed("macs14")
def install_macs(env):
from cloudbio.custom.bio_nextgen import install_macs as cbl_install_macs
install_dir = env.system_install
cbl_install_macs(env)
env.safe_sudo("echo 'PATH=%s/bin:$PATH' > %s/env.sh" % (install_dir, install_dir))
env.safe_sudo("echo 'PYTHONPATH=%s/lib/python%s/site-packages:$PYTHONPATH' >> %s/env.sh" % (env.python_version, install_dir, install_dir))
_update_default(env, install_dir)
@_if_not_installed("megablast")
def install_megablast(env):
version = env.tool_version
url = 'ftp://ftp.ncbi.nlm.nih.gov/blast/executables/release/%s/blast-%s-x64-linux.tar.gz' % (version, version)
install_dir = env.system_install
install_cmd = env.safe_sudo if env.use_sudo else env.safe_run
if not env.safe_exists(install_dir):
install_cmd("mkdir -p %s" % install_dir)
with _make_tmp_dir() as work_dir:
with cd(work_dir):
env.safe_run("wget %s" % url)
env.safe_run("tar -xvzf %s" % os.path.split(url)[-1])
with cd('blast-%s/bin' % version):
install_cmd("mv * %s" % install_dir)
@_if_not_installed("blastn")
def install_blast(env):
version = env.tool_version
url = 'ftp://ftp.ncbi.nlm.nih.gov/blast/executables/blast+/%s/ncbi-blast-%s-x64-linux.tar.gz' % (version[:-1], version)
install_dir = env.system_install
install_cmd = env.safe_sudo if env.use_sudo else env.safe_run
if not env.safe_exists(install_dir):
install_cmd("mkdir -p %s" % install_dir)
with _make_tmp_dir() as work_dir:
with cd(work_dir):
env.safe_run("wget %s" % url)
env.safe_run("tar -xvzf %s" % os.path.split(url)[-1])
with cd('ncbi-blast-%s/bin' % version):
bin_dir = _get_bin_dir(env)
install_cmd("mv * '%s'" % bin_dir)
@_if_not_installed("sputnik")
def install_sputnik(env):
version = env.tool_version
url = 'http://bitbucket.org/natefoo/sputnik-mononucleotide/downloads/sputnik_%s_linux2.6_x86_64' % version
install_dir = env.system_install
install_cmd = env.safe_sudo if env.use_sudo else env.safe_run
if not env.safe_exists(install_dir):
install_cmd("mkdir -p %s" % install_dir)
with _make_tmp_dir() as work_dir:
with cd(work_dir):
env.safe_run("wget -O sputnik %s" % url)
install_cmd("mv sputnik %s" % install_dir)
@_if_not_installed("taxonomy2tree")
def install_taxonomy(env):
version = env.tool_version
url = 'http://bitbucket.org/natefoo/taxonomy/downloads/taxonomy_%s_linux2.6_x86_64.tar.gz' % version
install_dir = env.system_install
install_cmd = env.safe_sudo if env.use_sudo else env.safe_run
if not env.safe_exists(install_dir):
install_cmd("mkdir -p %s" % install_dir)
with _make_tmp_dir() as work_dir:
with cd(work_dir):
env.safe_run("wget %s" % url)
env.safe_run("tar -xvzf %s" % os.path.split(url)[-1])
with cd(os.path.split(url)[-1].split('.tar.gz')[0]):
install_cmd("mv * %s" % install_dir)
@_if_not_installed("add_scores")
def install_add_scores(env):
version = env.tool_version
url = 'http://bitbucket.org/natefoo/add_scores/downloads/add_scores_%s_linux2.6_x86_64' % version
install_dir = env.system_install
install_cmd = env.safe_sudo if env.use_sudo else env.safe_run
if not env.safe_exists(install_dir):
install_cmd("mkdir -p %s" % install_dir)
with _make_tmp_dir() as work_dir:
with cd(work_dir):
env.safe_run("wget -O add_scores %s" % url)
install_cmd("mv add_scores %s" % install_dir)
@_if_not_installed("HYPHY")
def install_hyphy(env):
version = env.tool_version
url = 'http://www.datam0nk3y.org/svn/hyphy'
install_dir = env.system_install
install_cmd = env.safe_sudo if env.use_sudo else env.safe_run
if not env.safe_exists(install_dir):
install_cmd("mkdir -p %s" % install_dir)
with _make_tmp_dir() as work_dir:
with cd(work_dir):
env.safe_run("svn co -r %s %s src" % (version, url))
env.safe_run("mkdir -p build/Source/Link")
env.safe_run("mkdir build/Source/SQLite")
env.safe_run("cp src/trunk/Core/*.{h,cp,cpp} build/Source")
env.safe_run("cp src/trunk/HeadlessLink/*.{h,cpp} build/Source/SQLite")
env.safe_run("cp src/trunk/NewerFunctionality/*.{h,cpp} build/Source/")
env.safe_run("cp src/SQLite/trunk/*.{c,h} build/Source/SQLite/")
env.safe_run("cp src/trunk/Scripts/*.sh build/")
env.safe_run("cp src/trunk/Mains/main-unix.cpp build/Source/main-unix.cxx")
env.safe_run("cp src/trunk/Mains/hyphyunixutils.cpp build/Source/hyphyunixutils.cpp")
env.safe_run("cp -R src/trunk/{ChartAddIns,DatapanelAddIns,GeneticCodes,Help,SubstitutionClasses,SubstitutionModels,TemplateBatchFiles,TopologyInference,TreeAddIns,UserAddins} build")
env.safe_run("rm build/Source/preferences.cpp")
with cd("build"):
env.safe_run("bash build.sh SP")
install_cmd("mv build/* %s" % install_dir)
_update_default(env, install_dir)
@_if_not_installed(None)
def install_gatk(env):
version = env.tool_version
url = 'ftp://ftp.broadinstitute.org/pub/gsa/GenomeAnalysisTK/GenomeAnalysisTK-%s.tar.bz2' % version
pkg_name = 'gatk'
install_dir = os.path.join(env.galaxy_tools_dir, pkg_name, version)
install_cmd = env.safe_sudo if env.use_sudo else env.safe_run
if not env.safe_exists(install_dir):
install_cmd("mkdir -p %s" % install_dir)
install_cmd("mkdir -p %s/bin" % install_dir)
with _make_tmp_dir() as work_dir:
with cd(work_dir):
env.safe_run("wget -O gatk.tar.bz2 %s" % url)
env.safe_run("tar -xjf gatk.tar.bz2")
install_cmd("cp GenomeAnalysisTK-%s/*.jar %s/bin" % (version, install_dir))
# Create shell script to wrap jar
env.safe_sudo("echo '#!/bin/sh' > %s/bin/gatk" % (install_dir))
env.safe_sudo("echo 'java -jar %s/bin/GenomeAnalysisTK.jar $@' >> %s/bin/gatk" % (install_dir, install_dir))
env.safe_sudo("chmod +x %s/bin/gatk" % install_dir)
# env file
env.safe_sudo("echo 'PATH=%s/bin:$PATH' > %s/env.sh" % (install_dir, install_dir))
_update_default(env, install_dir)
# Link jar to Galaxy's jar dir
jar_dir = os.path.join(env.galaxy_jars_dir, pkg_name)
if not env.safe_exists(jar_dir):
install_cmd("mkdir -p %s" % jar_dir)
tool_dir = os.path.join(env.galaxy_tools_dir, pkg_name, 'default', 'bin')
install_cmd('ln --force --symbolic %s/*.jar %s/.' % (tool_dir, jar_dir))
install_cmd('chown --recursive %s:%s %s' % (env.galaxy_user, env.galaxy_user, jar_dir))
@_if_not_installed("srma.jar")
def install_srma(env):
version = env.tool_version
mirror_info = "?use_mirror=voxel"
url = 'http://downloads.sourceforge.net/project/srma/srma/%s/srma-%s.jar' \
% (version[:3], version)
install_dir = env.system_install
install_cmd = env.safe_sudo if env.use_sudo else env.safe_run
if not env.safe_exists(install_dir):
install_cmd("mkdir -p %s" % install_dir)
with _make_tmp_dir() as work_dir:
with cd(work_dir):
env.safe_run("wget %s%s -O %s" % (url, mirror_info, os.path.split(url)[-1]))
install_cmd("mv srma-%s.jar %s" % (version, install_dir))
install_cmd("ln -f -s srma-%s.jar %s/srma.jar" % (version, install_dir))
env.safe_sudo("touch %s/env.sh" % install_dir)
_update_default(env, install_dir)
@_if_not_installed("BEAM2")
def install_beam(env):
url = 'http://www.stat.psu.edu/~yuzhang/software/beam2.tar'
install_dir = env.system_install
install_cmd = env.safe_sudo if env.use_sudo else env.safe_run
if not env.safe_exists(install_dir):
install_cmd("mkdir -p %s" % install_dir)
with _make_tmp_dir() as work_dir:
with cd(work_dir):
env.safe_run("wget %s -O %s" % (url, os.path.split(url)[-1]))
env.safe_run("tar xf %s" % (os.path.split(url)[-1]))
install_cmd("mv BEAM2 %s" % install_dir)
env.safe_sudo("echo 'PATH=%s:$PATH' > %s/env.sh" % (install_dir, install_dir))
_update_default(env, install_dir)
@_if_not_installed("pass2")
def install_pass(env):
url = 'http://www.stat.psu.edu/~yuzhang/software/pass2.tar'
install_dir = env.system_install
install_cmd = env.safe_sudo if env.use_sudo else env.safe_run
if not env.safe_exists(install_dir):
install_cmd("mkdir -p %s" % install_dir)
with _make_tmp_dir() as work_dir:
with cd(work_dir):
env.safe_run("wget %s -O %s" % (url, os.path.split(url)[-1]))
env.safe_run("tar xf %s" % (os.path.split(url)[-1]))
install_cmd("mv pass2 %s" % install_dir)
env.safe_sudo("echo 'PATH=%s:$PATH' > %s/env.sh" % (install_dir, install_dir))
_update_default(env, install_dir)
@_if_not_installed("lps_tool")
def install_lps_tool(env):
version = env.tool_version
url = 'http://www.bx.psu.edu/miller_lab/dist/lps_tool.%s.tar.gz' % version
install_dir = env.system_install
install_cmd = env.safe_sudo if env.use_sudo else env.safe_run
if not env.safe_exists(install_dir):
install_cmd("mkdir -p %s" % install_dir)
with _make_tmp_dir() as work_dir:
with cd(work_dir):
env.safe_run("wget %s -O %s" % (url, os.path.split(url)[-1]))
env.safe_run("tar zxf %s" % (os.path.split(url)[-1]))
install_cmd("./lps_tool.%s/MCRInstaller.bin -P bean421.installLocation=\"%s/MCR\" -silent" % (version, install_dir))
install_cmd("mv lps_tool.%s/lps_tool %s" % (version, install_dir))
env.safe_sudo("echo 'PATH=%s:$PATH' > %s/env.sh" % (install_dir, install_dir))
env.safe_sudo("echo 'MCRROOT=%s/MCR/v711; export MCRROOT' >> %s/env.sh" % (install_dir, install_dir))
_update_default(env, install_dir)
@_if_not_installed("plink")
def install_plink(env):
version = env.tool_version
url = 'http://pngu.mgh.harvard.edu/~purcell/plink/dist/plink-%s-x86_64.zip' % version
install_dir = env.system_install
install_cmd = env.safe_sudo if env.use_sudo else env.safe_run
if not env.safe_exists(install_dir):
install_cmd("mkdir -p %s" % install_dir)
with _make_tmp_dir() as work_dir:
with cd(work_dir):
env.safe_run("wget %s -O %s" % (url, os.path.split(url)[-1]))
env.safe_run("unzip %s" % (os.path.split(url)[-1]))
install_cmd("mv plink-%s-x86_64/plink %s" % (version, install_dir))
env.safe_sudo("echo 'PATH=%s:$PATH' > %s/env.sh" % (install_dir, install_dir))
_update_default(env, install_dir)
@_if_not_installed(None)
def install_fbat(env):
version = env.tool_version
url = 'http://www.biostat.harvard.edu/~fbat/software/fbat%s_linux64.tar.gz' % version.replace('.', '')
install_dir = env.system_install
install_cmd = env.safe_sudo if env.use_sudo else env.safe_run
if not env.safe_exists(install_dir):
install_cmd("mkdir -p %s" % install_dir)
with _make_tmp_dir() as work_dir:
with cd(work_dir):
env.safe_run("wget %s -O %s" % (url, os.path.split(url)[-1]))
env.safe_run("tar zxf %s" % (os.path.split(url)[-1]))
install_cmd("mv fbat %s" % install_dir)
env.safe_sudo("echo 'PATH=%s:$PATH' > %s/env.sh" % (install_dir, install_dir))
_update_default(env, install_dir)
@_if_not_installed("Haploview_beta.jar")
def install_haploview(env):
url = 'http://www.broadinstitute.org/ftp/pub/mpg/haploview/Haploview_beta.jar'
install_dir = env.system_install
install_cmd = env.safe_sudo if env.use_sudo else env.safe_run
if not env.safe_exists(install_dir):
install_cmd("mkdir -p %s" % install_dir)
with _make_tmp_dir() as work_dir:
with cd(work_dir):
env.safe_run("wget %s -O %s" % (url, os.path.split(url)[-1]))
install_cmd("mv %s %s" % (os.path.split(url)[-1], install_dir))
install_cmd("ln -s %s %s/haploview.jar" % (os.path.split(url)[-1], install_dir))
_update_default(env, install_dir)
@_if_not_installed("eigenstrat")
def install_eigenstrat(env):
version = env.tool_version
url = 'http://www.hsph.harvard.edu/faculty/alkes-price/files/EIG%s.tar.gz' % version
install_dir = env.system_install
install_cmd = env.safe_sudo if env.use_sudo else env.safe_run
if not env.safe_exists(install_dir):
install_cmd("mkdir -p %s" % install_dir)
with _make_tmp_dir() as work_dir:
with cd(work_dir):
env.safe_run("wget %s -O %s" % (url, os.path.split(url)[-1]))
env.safe_run("tar zxf %s" % (os.path.split(url)[-1]))
install_cmd("mv bin %s" % install_dir)
env.safe_sudo("echo 'PATH=%s/bin:$PATH' > %s/env.sh" % (install_dir, install_dir))
_update_default(env, install_dir)
@_if_not_installed("augustus")
def install_augustus(env):
default_version = "2.7"
version = env.get('tool_version', default_version)
url = "http://bioinf.uni-greifswald.de/augustus/binaries/augustus.%s.tar.gz" % version
install_dir = env.system_install
with _make_tmp_dir() as work_dir:
with cd(work_dir):
_fetch_and_unpack(url, need_dir=False)
env.safe_sudo("mkdir -p '%s'" % install_dir)
env.safe_sudo("mv augustus.%s/* '%s'" % (version, install_dir))
@_if_not_installed("SortSam.jar")
def install_picard(env):
version = env.tool_version
mirror_info = "?use_mirror=voxel"
url = 'http://downloads.sourceforge.net/project/picard/picard-tools/%s/picard-tools-%s.zip' % (version, version)
pkg_name = 'picard'
install_dir = env.system_install
install_cmd = env.safe_sudo if env.use_sudo else env.safe_run
if not env.safe_exists(install_dir):
install_cmd("mkdir -p %s" % install_dir)
with _make_tmp_dir() as work_dir:
with cd(work_dir):
env.safe_run("wget %s%s -O %s" % (url, mirror_info, os.path.split(url)[-1]))
env.safe_run("unzip %s" % (os.path.split(url)[-1]))
install_cmd("mv picard-tools-%s/*.jar %s" % (version, install_dir))
_update_default(env, install_dir)
# set up the jars directory
jar_dir = os.path.join(env.galaxy_jars_dir, 'picard')
if not env.safe_exists(jar_dir):
install_cmd("mkdir -p %s" % jar_dir)
tool_dir = os.path.join(env.galaxy_tools_dir, pkg_name, 'default')
install_cmd('ln --force --symbolic %s/*.jar %s/.' % (tool_dir, jar_dir))
install_cmd('chown --recursive %s:%s %s' % (env.galaxy_user, env.galaxy_user, jar_dir))
@_if_not_installed("fastqc")
def install_fastqc(env):
""" This tool is installed in Galaxy's jars dir """
version = env.tool_version
url = 'http://www.bioinformatics.bbsrc.ac.uk/projects/fastqc/fastqc_v%s.zip' % version
pkg_name = 'FastQC'
install_dir = os.path.join(env.galaxy_jars_dir)
install_cmd = env.safe_sudo if env.use_sudo else env.safe_run
if not env.safe_exists(install_dir):
install_cmd("mkdir -p %s" % install_dir)
with cd(install_dir):
install_cmd("wget %s -O %s" % (url, os.path.split(url)[-1]))
install_cmd("unzip -u %s" % (os.path.split(url)[-1]))
install_cmd("rm %s" % (os.path.split(url)[-1]))
with cd(pkg_name):
install_cmd('chmod 755 fastqc')
install_cmd('chown --recursive %s:%s %s' % (env.galaxy_user, env.galaxy_user, pkg_name))
def _update_default(env, install_dir):
env.safe_sudo("touch %s/env.sh" % install_dir)
env.safe_sudo("chmod +x %s/env.sh" % install_dir)
_set_default_config(env, install_dir)
#@if_tool_not_found()
#def install_emboss(env):
# version = env.tool_version
# url = 'ftp://emboss.open-bio.org/pub/EMBOSS/old/%s/EMBOSS-%s.tar.gz' % (version, version)
# pkg_name = 'emboss'
# install_dir = os.path.join(env.galaxy_tools_dir, pkg_name, version)
# install_cmd = env.safe_sudo if env.use_sudo else env.safe_run
# if not env.safe_exists(install_dir):
# install_cmd("mkdir -p %s" % install_dir)
# with _make_tmp_dir() as work_dir:
# with cd(work_dir):
# env.safe_run("wget %s" % url)
# env.safe_run("tar -xvzf %s" % os.path.split(url)[-1])
# with cd(os.path.split(url)[-1].split('.tar.gz')[0]):
# env.safe_run("./configure --prefix=%s" % install_dir)
# env.safe_run("make")
# install_cmd("make install")
# phylip_version = '3.6b'
# url = 'ftp://emboss.open-bio.org/pub/EMBOSS/old/%s/PHYLIP-%s.tar.gz' % (version, phylip_version)
# with _make_tmp_dir() as work_dir:
# with cd(work_dir):
# env.safe_run("wget %s" % url)
# env.safe_run("tar -xvzf %s" % os.path.split(url)[-1])
# with cd(os.path.split(url)[-1].split('.tar.gz')[0]):
# env.safe_run("./configure --prefix=%s" % install_dir)
# env.safe_run("make")
# install_cmd("make install")
| mit | edccad2c302ca1f3f7b25a995a2c502c | 44.389791 | 195 | 0.61785 | 2.882422 | false | false | false | false |
chapmanb/cloudbiolinux | cloudbio/utils.py | 10 | 7086 | """Utilities for logging and progress tracking.
"""
import logging
import os
import sys
from fabric.main import load_settings
from fabric.colors import yellow, red, green, magenta
from fabric.api import settings, hide, cd, run
from fabric.contrib.files import exists
from cloudbio.distribution import _setup_distribution_environment
from cloudbio.flavor import Flavor
from cloudbio.flavor.config import get_config_file
class ColorFormatter(logging.Formatter):
""" Format log message based on the message level
http://stackoverflow.com/questions/1343227/can-pythons-logging-format-be-modified-depending-on-the-message-log-level
"""
# Setup formatters for each of the levels
err_fmt = red("ERR [%(filename)s(%(lineno)d)] %(msg)s")
warn_fmt = magenta("WARN [%(filename)s(%(lineno)d)]: %(msg)s")
dbg_fmt = yellow("DBG [%(filename)s]: %(msg)s")
info_fmt = green("INFO: %(msg)s")
def __init__(self, fmt="%(name)s %(levelname)s: %(msg)s"):
logging.Formatter.__init__(self, fmt)
def format(self, record):
# Save the original format configured by the user
# when the logger formatter was instantiated
format_orig = self._fmt
# Replace the original format with one customized by logging level
if record.levelno == 10: # DEBUG
self._fmt = ColorFormatter.dbg_fmt
elif record.levelno == 20: # INFO
self._fmt = ColorFormatter.info_fmt
elif record.levelno == 30: # WARN
self._fmt = ColorFormatter.warn_fmt
elif record.levelno == 40: # ERROR
self._fmt = ColorFormatter.err_fmt
# Call the original formatter class to do the grunt work
result = logging.Formatter.format(self, record)
# Restore the original format configured by the user
self._fmt = format_orig
return result
def _setup_logging(env):
env.logger = logging.getLogger("cloudbiolinux")
env.logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# Use custom formatter
ch.setFormatter(ColorFormatter())
env.logger.addHandler(ch)
def _update_biolinux_log(env, target, flavor):
"""Updates the VM so it contains information on the latest BioLinux
update in /var/log/biolinux.log.
The latest information is appended to the file and can be used to see if
an installation/update has completed (see also ./test/test_vagrant).
"""
if not target:
target = env.get("target", None)
if not target:
target = "unknown"
else:
target = target.name
if not flavor:
flavor = env.get("flavor", None)
if not flavor:
flavor = "unknown"
else:
flavor = flavor.name
logfn = "/var/log/biolinux.log"
info = "Target="+target+"; Flavor="+flavor
env.logger.info(info)
if env.use_sudo:
env.safe_sudo("date +\"%D %T - Updated "+info+"\" >> "+logfn)
def _configure_fabric_environment(env, flavor=None, fabricrc_loader=None,
ignore_distcheck=False):
if not fabricrc_loader:
fabricrc_loader = _parse_fabricrc
_setup_flavor(env, flavor)
fabricrc_loader(env)
# get parameters for distro, packages etc.
_setup_distribution_environment(ignore_distcheck=ignore_distcheck)
_create_local_paths(env)
def _setup_flavor(env, flavor):
"""Setup a flavor, providing customization hooks to modify CloudBioLinux installs.
Specify flavor as a name, in which case we look it up in the standard
flavor directory (contrib/flavor/your_flavor), or as an absolute path to a
flavor directory outside of cloudbiolinux.
"""
env.flavor = Flavor(env)
env.flavor_dir = None
if flavor:
# set the directory for flavor customizations
if os.path.isabs(flavor) or os.path.exists(flavor):
flavor_dir = flavor
else:
flavor_dir = os.path.join(os.path.dirname(__file__), '..', 'contrib', 'flavor', flavor)
assert os.path.exists(flavor_dir), \
"Did not find directory {0} for flavor {1}".format(flavor_dir, flavor)
env.flavor_dir = flavor_dir
flavor_name = os.path.split(flavor_dir)[-1]
# Reinstantiate class if custom defined
import cloudbio.flavor
try:
env.flavor = getattr(cloudbio.flavor, flavor_name.capitalize())(env)
except AttributeError:
pass
env.flavor.name = flavor_name
# Load python customizations to base configuration if present
for ext in ["", "flavor"]:
py_flavor = os.path.split(os.path.realpath(flavor_dir))[1] + ext
flavor_custom_py = os.path.join(flavor_dir, "{0}.py".format(py_flavor))
if os.path.exists(flavor_custom_py):
sys.path.append(flavor_dir)
mod = __import__(py_flavor, fromlist=[py_flavor])
env.logger.info(env.flavor)
env.logger.info("This is a %s flavor" % env.flavor.name)
def _parse_fabricrc(env):
"""Defaults from fabricrc.txt file; loaded if not specified at commandline.
"""
env.config_dir = os.path.join(os.path.dirname(__file__), "..", "config")
env.tool_data_table_conf_file = os.path.join(env.config_dir, "..",
"installed_files",
"tool_data_table_conf.xml")
if not env.has_key("distribution") and not env.has_key("system_install"):
env.logger.info("Reading default fabricrc.txt")
env.update(load_settings(get_config_file(env, "fabricrc.txt").base))
if "shell_config" not in env:
env.shell_config = "~/.bashrc"
if "shell" not in env:
env.shell = "/bin/bash -i -c"
def _create_local_paths(env):
"""Expand any paths defined in terms of shell shortcuts (like ~).
"""
with settings(hide('warnings', 'running', 'stdout', 'stderr'),
warn_only=True):
# This is the first point we call into a remote host - make sure
# it does not fail silently by calling a dummy run
env.logger.info("Now, testing connection to host...")
test = env.safe_run("pwd")
# If there is a connection failure, the rest of the code is (sometimes) not
# reached - for example with Vagrant the program just stops after above run
# command.
if test != None:
env.logger.info("Connection to host appears to work!")
else:
raise NotImplementedError("Connection to host failed")
env.logger.debug("Expand paths")
if "local_install" in env:
if not env.safe_exists(env.local_install):
env.safe_sudo("mkdir -p %s" % env.local_install)
user = env.safe_run_output("echo $USER")
env.safe_sudo("chown -R %s %s" % (user, env.local_install))
with cd(env.local_install):
result = env.safe_run_output("pwd")
env.local_install = result
| mit | ecc80c9fc01f7ae339abbbfdba509793 | 40.438596 | 124 | 0.620519 | 3.880613 | false | true | false | false |
chapmanb/cloudbiolinux | cloudbio/custom/python.py | 10 | 1897 | """Install instructions for python libraries not ready for easy_install.
"""
import os
from fabric.api import *
from fabric.contrib.files import *
from shared import (_if_not_python_lib, _get_install, _python_make, _pip_cmd,
_is_anaconda)
@_if_not_python_lib("bx")
def install_bx_python(env):
"""Tools for manipulating biological data, particularly multiple sequence alignments
https://bitbucket.org/james_taylor/bx-python/wiki/Home
"""
version = "bitbucket"
url = "https://bitbucket.org/james_taylor/bx-python/get/tip.tar.bz2"
cmd = env.safe_run if _is_anaconda(env) else env.safe_sudo
if not _is_anaconda(env):
cmd("%s install --upgrade distribute" % _pip_cmd(env))
cmd("%s install --upgrade %s" % (_pip_cmd(env), url))
@_if_not_python_lib("rpy")
def install_rpy(env):
"""RPy is a very simple, yet robust, Python interface to the R Programming Language.
http://rpy.sourceforge.net/
"""
version = "1.0.3"
ext = "a"
url = "http://downloads.sourceforge.net/project/rpy/rpy/" \
"%s/rpy-%s%s.zip" % (version, version, ext)
def _fix_libraries(env):
env.safe_run("""sed -i.bak -r -e "s/,'Rlapack'//g" setup.py""")
with settings(hide('warnings', 'running', 'stdout', 'stderr'),
warn_only=True):
result = env.safe_run("R --version")
if result.failed:
return
_get_install(url, env, _python_make, post_unpack_fn=_fix_libraries)
@_if_not_python_lib("netsa")
def install_netsa_python(env):
"""A suite of open source tools for monitoring large-scale networks using flow data.
http://tools.netsa.cert.org/index.html
"""
version = "1.3"
url = "http://tools.netsa.cert.org/releases/netsa-python-%s.tar.gz" % version
cmd = env.safe_run if _is_anaconda(env) else env.safe_sudo
cmd("%s install %s" % (_pip_cmd(env), url))
| mit | 92e1e036babe728721a70507eb3c6252 | 37.714286 | 88 | 0.635213 | 3.172241 | false | false | false | false |
chapmanb/cloudbiolinux | cloudbio/package/brew.py | 1 | 18385 | """Install packages via the MacOSX Homebrew and Linux Linuxbrew package manager.
https://github.com/mxcl/homebrew
https://github.com/Homebrew/linuxbrew
"""
from __future__ import print_function
import contextlib
from distutils.version import LooseVersion
import os
import sys
import yaml
from cloudbio.custom import system, shared
from cloudbio.flavor.config import get_config_file
from cloudbio.fabutils import quiet, find_cmd
from cloudbio.package import cpan
from cloudbio.package.shared import _yaml_to_packages
from fabric.api import cd, settings
BOTTLE_URL = "https://s3.amazonaws.com/cloudbiolinux/brew_bottles/{pkg}-{version}.x86_64-linux.bottle.tar.gz"
BOTTLE_SUPPORTED = set(["isaac-aligner", "isaac-variant-caller", "cmake"])
def install_packages(env, to_install=None, packages=None):
"""Install packages using the home brew package manager.
Handles upgrading brew, tapping required repositories and installing or upgrading
packages as appropriate.
`to_install` is a CloudBioLinux compatible set of top level items to add,
alternatively `packages` is a list of raw package names.
"""
config_file = get_config_file(env, "packages-homebrew.yaml")
if to_install:
(packages, _) = _yaml_to_packages(config_file.base, to_install, config_file.dist)
# if we have no packages to install, do not try to install or update brew
if len(packages) == 0:
_remove_old(env, config_file.base)
return
system.install_homebrew(env)
brew_cmd = _brew_cmd(env)
formula_repos = ["homebrew/science", "chapmanb/cbl", "homebrew/dupes"]
current_taps = set([x.strip() for x in env.safe_run_output("%s tap" % brew_cmd).split()])
_safe_update(env, brew_cmd, formula_repos, current_taps)
current_taps = set([x.strip() for x in env.safe_run_output("%s tap" % brew_cmd).split()])
for repo in formula_repos:
if repo not in current_taps:
env.safe_run("%s tap %s" % (brew_cmd, repo))
env.safe_run("%s tap --repair" % brew_cmd)
ipkgs = {"outdated": set([x.strip() for x in env.safe_run_output("%s outdated" % brew_cmd).split()]),
"current": _get_current_pkgs(env, brew_cmd)}
_install_brew_baseline(env, brew_cmd, ipkgs, packages)
ipkgs = {"outdated": set([x.strip() for x in env.safe_run_output("%s outdated" % brew_cmd).split()]),
"current": _get_current_pkgs(env, brew_cmd)}
for pkg_str in packages:
_install_pkg(env, pkg_str, brew_cmd, ipkgs)
for pkg_str in ["pkg-config", "openssl", "cmake", "unzip"]:
_safe_unlink_pkg(env, pkg_str, brew_cmd)
with open(config_file.base) as in_handle:
to_remove = yaml.safe_load(in_handle).get("to_remove", [])
for pkg_str in ["curl"] + to_remove:
_safe_uninstall_pkg(env, pkg_str, brew_cmd)
def _remove_old(env, config_file):
"""Temporary approach to remove an old brew install migrated to conda packages.
"""
brew_cmd = os.path.join(env.system_install, "bin", "brew")
if env.safe_exists(brew_cmd):
baseline = ["pkg-config", "openssl", "cmake", "unzip", "curl"]
with open(config_file) as in_handle:
to_remove = yaml.safe_load(in_handle).get("to_remove", [])
for pkg_str in baseline + to_remove:
_safe_uninstall_pkg(env, pkg_str, brew_cmd)
def _safe_update(env, brew_cmd, formula_repos, cur_taps):
"""Revert any taps if we fail to update due to local changes.
"""
with _git_stash(env, brew_cmd):
with quiet():
with settings(warn_only=True):
out = env.safe_run("%s update" % brew_cmd)
if out.failed:
for repo in formula_repos:
if repo in cur_taps:
env.safe_run("%s untap %s" % (brew_cmd, repo))
with settings(warn_only=True):
out = env.safe_run("%s update" % brew_cmd)
if out.failed:
print("\n\nHomebrew update failed.")
print("You might need to upgrade git by installing inside bcbio with:")
print("'brew install git --env=inherit --ignore-dependences'\n\n")
@contextlib.contextmanager
def _git_stash(env, brew_cmd):
"""Perform a safe git stash around an update.
This circumvents brews internal stash approach which doesn't work on older versions
of git and is sensitive to missing config.emails.
"""
brew_prefix = env.safe_run_output("{brew_cmd} --prefix".format(**locals()))
with cd(brew_prefix):
with quiet():
with settings(warn_only=True):
env.safe_run("git config user.email 'stash@brew.sh'")
check_diff = env.safe_run("git diff --quiet")
git_version = env.safe_run_output("git --version").strip().split()[-1]
if git_version and LooseVersion(git_version) < LooseVersion("1.7"):
if check_diff.return_code > 0:
with cd(brew_prefix):
with settings(warn_only=True):
env.safe_run("git stash --quiet")
try:
yield None
finally:
if check_diff.return_code > 0:
with cd(brew_prefix):
with settings(warn_only=True):
env.safe_run("git stash pop --quiet")
else:
yield None
def _get_current_pkgs(env, brew_cmd):
out = {}
with quiet():
which_out = env.safe_run_output("{brew_cmd} list --versions".format(**locals()))
for line in which_out.split("\n"):
if line:
parts = line.rstrip().split()
if len(parts) == 2:
pkg, version = line.rstrip().split()
if pkg.endswith(":"):
pkg = pkg[:-1]
out[pkg] = version
return out
def _safe_unlink_pkg(env, pkg_str, brew_cmd):
"""Unlink packages which can cause issues with a Linux system.
"""
with settings(warn_only=True):
with quiet():
env.safe_run("{brew_cmd} unlink {pkg_str}".format(**locals()))
def _safe_link_pkg(env, pkg_str, brew_cmd):
"""Link packages required for builds, but not necessarily installed
"""
with settings(warn_only=True):
with quiet():
env.safe_run("{brew_cmd} link --overwrite {pkg_str}".format(**locals()))
def _safe_uninstall_pkg(env, pkg_str, brew_cmd):
"""Uninstall packages which get pulled in even when unlinked by brew.
"""
with settings(warn_only=True):
with quiet():
env.safe_run("{brew_cmd} uninstall {pkg_str}".format(**locals()))
def _install_pkg(env, pkg_str, brew_cmd, ipkgs):
"""Install a specific brew package, handling versioning and existing packages.
"""
pkg, version, args = _get_pkg_version_args(pkg_str)
installed = False
if version:
_install_pkg_version(env, pkg, args, version, brew_cmd, ipkgs)
installed = True
if pkg in BOTTLE_SUPPORTED and not env.distribution == "macosx":
installed = _install_bottle(env, brew_cmd, pkg, ipkgs)
if not installed:
_install_pkg_latest(env, pkg, args, brew_cmd, ipkgs)
def _install_pkg_version(env, pkg, args, version, brew_cmd, ipkgs):
"""Install a specific version of a package by retrieving from git history.
https://gist.github.com/gcatlin/1847248
Handles both global packages and those installed via specific taps.
"""
if ipkgs["current"].get(pkg.split("/")[-1]) == version:
return
if version == "HEAD":
args = " ".join(args)
brew_install = _get_brew_install_cmd(brew_cmd, env, pkg)
env.safe_run("{brew_install} {args} --HEAD {pkg}".format(**locals()))
else:
raise ValueError("Cannot currently handle installing brew packages by version.")
with _git_pkg_version(env, brew_cmd, pkg, version):
if pkg.split("/")[-1] in ipkgs["current"]:
with settings(warn_only=True):
env.safe_run("{brew_cmd} unlink {pkg}".format(
brew_cmd=brew_cmd, pkg=pkg.split("/")[-1]))
# if we have a more recent version, uninstall that first
cur_version_parts = env.safe_run_output("{brew_cmd} list --versions {pkg}".format(
brew_cmd=brew_cmd, pkg=pkg.split("/")[-1])).strip().split()
if len(cur_version_parts) > 1 and LooseVersion(cur_version_parts[1]) > LooseVersion(version):
with settings(warn_only=True):
env.safe_run("{brew_cmd} uninstall {pkg}".format(**locals()))
env.safe_run("{brew_cmd} install {pkg}".format(**locals()))
with settings(warn_only=True):
env.safe_run("{brew_cmd} switch {pkg} {version}".format(**locals()))
env.safe_run("%s link --overwrite %s" % (brew_cmd, pkg))
@contextlib.contextmanager
def _git_pkg_version(env, brew_cmd, pkg, version):
"""Convert homebrew Git to previous revision to install a specific package version.
"""
git_cmd = _git_cmd_for_pkg_version(env, brew_cmd, pkg, version)
git_fname = git_cmd.split()[-1]
brew_prefix = env.safe_run_output("{brew_cmd} --prefix".format(**locals()))
if git_fname.startswith("{brew_prefix}/Library/Taps/".format(**locals())):
brew_prefix = os.path.dirname(git_fname)
try:
with cd(brew_prefix):
if version != "HEAD":
env.safe_run(git_cmd)
yield
finally:
# reset Git back to latest
with cd(brew_prefix):
if version != "HEAD":
cmd_parts = git_cmd.split()
env.safe_run("%s reset HEAD %s" % (cmd_parts[0], cmd_parts[-1]))
cmd_parts[2] = "--"
env.safe_run(" ".join(cmd_parts))
def _git_cmd_for_pkg_version(env, brew_cmd, pkg, version):
"""Retrieve git command to check out a specific version from homebrew.
"""
git_cmd = None
for git_line in env.safe_run_output("{brew_cmd} versions {pkg}".format(**locals())).split("\n"):
if git_line.startswith(version):
git_cmd = " ".join(git_line.rstrip().split()[1:])
break
if git_cmd is None:
raise ValueError("Did not find version %s for %s" % (version, pkg))
return git_cmd
def _latest_pkg_version(env, brew_cmd, pkg, devel=False):
"""Retrieve the latest available version of a package and if it is linked.
"""
i = 0
version, is_linked = None, False
with settings(warn_only=True):
info_str = env.safe_run_output("{brew_cmd} info {pkg}".format(**locals()))
for i, git_line in enumerate(info_str.split("\n")):
if git_line.strip():
if i == 0:
_, version_str = git_line.split(":")
versions = version_str.split(",")
if devel:
dev_strs = [x for x in versions if x.strip().startswith("devel")]
version = dev_strs[0].split()[-1].strip()
else:
version = versions[0].replace("(bottled)", "").split()[-1].strip()
elif git_line.find("Cellar/%s" % pkg) > 0 and git_line.find(" files,") > 0:
is_linked = git_line.strip().split()[-1] == "*"
return version, is_linked
def _get_brew_install_cmd(brew_cmd, env, pkg):
perl_setup = "export PERL5LIB=%s/lib/perl5:${PERL5LIB}" % env.system_install
compiler_setup = "export CC=${CC:-`which gcc`} && export CXX=${CXX:-`which g++`}"
shell_setup = "export SHELL=${SHELL:-/bin/bash}"
extra_args = ""
if pkg in ["cmake"]:
extra_args += " --without-docs"
if pkg in ["lumpy-sv", "bamtools", "freebayes", "git"]:
extra_args += " --ignore-dependencies"
return "%s && %s && %s && %s install --env=inherit %s" % (compiler_setup, shell_setup, perl_setup,
brew_cmd, extra_args)
def _install_pkg_latest(env, pkg, args, brew_cmd, ipkgs):
"""Install the latest version of the given package.
"""
short_pkg = pkg.split("/")[-1]
do_install = True
is_linked = True
remove_old = False
if pkg in ipkgs["outdated"] or short_pkg in ipkgs["outdated"]:
remove_old = True
elif pkg in ipkgs["current"] or short_pkg in ipkgs["current"]:
do_install = False
pkg_version, is_linked = _latest_pkg_version(env, brew_cmd, pkg, devel="--devel" in args)
cur_version = ipkgs["current"].get(pkg, ipkgs["current"][short_pkg])
if cur_version != pkg_version and cur_version.split("_")[0] != pkg_version:
remove_old = True
do_install = True
if do_install:
if remove_old:
env.safe_run("{brew_cmd} remove --force {short_pkg}".format(**locals()))
flags = " ".join(args)
with settings(warn_only=True):
cmd = "%s %s %s" % (_get_brew_install_cmd(brew_cmd, env, pkg), flags, pkg)
with _custom_unlink(env, brew_cmd, pkg):
result = env.safe_run_output(cmd)
if result.failed and not result.find("Could not symlink") > 0:
sys.tracebacklimit = 1
raise ValueError("Failed to install brew formula: %s\n" % pkg +
"To debug, please try re-running the install command with verbose output:\n" +
cmd.replace("brew install", "brew install -v"))
env.safe_run("%s link --overwrite %s" % (brew_cmd, pkg))
# installed but not linked
elif not is_linked:
env.safe_run("%s link --overwrite %s" % (brew_cmd, pkg))
@contextlib.contextmanager
def _custom_unlink(env, brew_cmd, pkg):
"""Handle custom unlinking of packages that can break builds of others.
Does a temporary unlink and relink of packages while building.
"""
unlinks = {"lumpy-sv": ["bamtools"]}
for upkg in unlinks.get(pkg, []):
_safe_unlink_pkg(env, upkg, brew_cmd)
try:
yield None
finally:
for upkg in unlinks.get(pkg, []):
with settings(warn_only=True):
with quiet():
env.safe_run("%s link --overwrite %s" % (brew_cmd, upkg))
def _get_pkg_version_args(pkg_str):
"""Uses Python style package==0.1 version specifications and args separated with ';'
"""
arg_parts = pkg_str.split(";")
if len(arg_parts) == 1:
args = []
else:
pkg_str = arg_parts[0]
args = arg_parts[1:]
parts = pkg_str.split("==")
if len(parts) == 1:
return parts[0], None, args
else:
assert len(parts) == 2
name, version = parts
return name, version, args
def _install_bottle(env, brew_cmd, pkg, ipkgs):
"""Install Linux bottles for brew packages that can be tricky to build.
"""
if env.distribution == "macosx": # Only Linux bottles, build away on Mac
return False
pkg_version, is_linked = _latest_pkg_version(env, brew_cmd, pkg)
install_version = ipkgs["current"].get(pkg)
if pkg_version == install_version: # Up to date
if not is_linked:
env.safe_run("%s link --overwrite %s" % (brew_cmd, pkg))
return True
elif install_version or pkg in ipkgs["outdated"]:
env.safe_run("{brew_cmd} remove --force {pkg}".format(**locals()))
url = BOTTLE_URL.format(pkg=pkg, version=pkg_version)
brew_cachedir = env.safe_run_output("%s --cache" % brew_cmd)
brew_cellar = os.path.join(env.safe_run_output("%s --prefix" % brew_cmd), "Cellar")
with quiet():
env.safe_run("mkdir -p %s" % brew_cellar)
out_file = os.path.join(brew_cachedir, os.path.basename(url))
if env.safe_exists(out_file):
env.safe_run("rm -f %s" % out_file)
bottle_file = shared._remote_fetch(env, url, out_file=out_file,
allow_fail=True, samedir=True)
if bottle_file:
with cd(brew_cellar):
env.safe_run("tar -xf %s" % bottle_file)
env.safe_run("%s link --overwrite %s" % (brew_cmd, pkg))
return True
else:
return False
def _install_brew_baseline(env, brew_cmd, ipkgs, packages):
"""Install baseline brew components not handled by dependency system.
- Installation of required Perl libraries.
- Upgrades any package dependencies
"""
for dep in ["openssl"]:
_safe_link_pkg(env, dep, brew_cmd)
for dep in ["expat", "pkg-config", "xz", "unzip"]:
_install_pkg(env, dep, brew_cmd, ipkgs)
# check if we have an older git and need to install it from brew
git_version = None
with quiet():
with settings(warn_only=True):
git_version = env.safe_run_output("git --version").strip().split()[-1]
if git_version and LooseVersion(git_version) < LooseVersion("1.7"):
_install_pkg(env, "git", brew_cmd, ipkgs)
for dep in ["sambamba"]: # Avoid conflict with homebrew-science sambamba
env.safe_run("{brew_cmd} remove --force {dep}".format(**locals()))
for dependency in ["htslib"]:
if dependency in packages:
if (dependency in ipkgs["outdated"] or "chapmanb/cbl/%s" % dependency in ipkgs["outdated"]
or dependency not in ipkgs["current"]):
_install_pkg_latest(env, dependency, [], brew_cmd, ipkgs)
if "cpanminus" in packages:
_install_pkg_latest(env, "cpanminus", [], brew_cmd, ipkgs)
_install_pkg_latest(env, "samtools-library-0.1", [], brew_cmd, ipkgs)
cpan.install_packages(env)
# Ensure paths we may have missed on install are accessible to regular user
if env.use_sudo:
paths = ["share", "share/java"]
for path in paths:
with quiet():
test_access = env.safe_run("test -d %s/%s && test -O %s/%s" % (env.system_install, path,
env.system_install, path))
if test_access.failed and env.safe_exists("%s/%s" % (env.system_install, path)):
env.safe_sudo("chown %s %s/%s" % (env.user, env.system_install, path))
def _brew_cmd(env):
"""Retrieve brew command for installing homebrew packages.
"""
cmd = find_cmd(env, "brew", "--version")
if cmd is None:
raise ValueError("Did not find working installation of Linuxbrew/Homebrew. "
"Please check if you have ruby available.")
else:
return cmd
| mit | 6586424a1f26b20a2f54b13b229eb525 | 43.623786 | 111 | 0.591569 | 3.541025 | false | false | false | false |
crossbario/autobahn-python | autobahn/websocket/util.py | 2 | 6422 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from autobahn.util import public
from urllib import parse as urlparse
# The Python urlparse module currently does not contain the ws/wss
# schemes, so we add those dynamically (which is a hack of course).
#
# Important: if you change this stuff (you shouldn't), make sure
# _all_ our unit tests for WS URLs succeed
#
wsschemes = ["ws", "wss"]
urlparse.uses_relative.extend(wsschemes)
urlparse.uses_netloc.extend(wsschemes)
urlparse.uses_params.extend(wsschemes)
urlparse.uses_query.extend(wsschemes)
urlparse.uses_fragment.extend(wsschemes)
__all__ = (
"create_url",
"parse_url",
)
@public
def create_url(hostname, port=None, isSecure=False, path=None, params=None):
"""
Create a WebSocket URL from components.
:param hostname: WebSocket server hostname (for TCP/IP sockets) or
filesystem path (for Unix domain sockets).
:type hostname: str
:param port: For TCP/IP sockets, WebSocket service port or ``None`` (to select default
ports ``80`` or ``443`` depending on ``isSecure``. When ``hostname=="unix"``,
this defines the path to the Unix domain socket instead of a TCP/IP network socket.
:type port: int or str
:param isSecure: Set ``True`` for secure WebSocket (``wss`` scheme).
:type isSecure: bool
:param path: WebSocket URL path of addressed resource (will be
properly URL escaped). Ignored for RawSocket.
:type path: str
:param params: A dictionary of key-values to construct the query
component of the addressed WebSocket resource (will be properly URL
escaped). Ignored for RawSocket.
:type params: dict
:returns: Constructed WebSocket URL.
:rtype: str
"""
# assert type(hostname) == str
assert type(isSecure) == bool
if hostname == 'unix':
netloc = "unix:%s" % port
else:
assert port is None or (type(port) == int and port in range(0, 65535))
if port is not None:
netloc = "%s:%d" % (hostname, port)
else:
if isSecure:
netloc = "%s:443" % hostname
else:
netloc = "%s:80" % hostname
if isSecure:
scheme = "wss"
else:
scheme = "ws"
if path is not None:
ppath = urlparse.quote(path)
else:
ppath = "/"
if params is not None:
query = urlparse.urlencode(params)
else:
query = None
return urlparse.urlunparse((scheme, netloc, ppath, None, query, None))
@public
def parse_url(url):
"""
Parses as WebSocket URL into it's components and returns a tuple:
- ``isSecure`` is a flag which is ``True`` for ``wss`` URLs.
- ``host`` is the hostname or IP from the URL.
and for TCP/IP sockets:
- ``tcp_port`` is the port from the URL or standard port derived from
scheme (``rs`` => ``80``, ``rss`` => ``443``).
or for Unix domain sockets:
- ``uds_path`` is the path on the local host filesystem.
:param url: A valid WebSocket URL, i.e. ``ws://localhost:9000`` for TCP/IP sockets or
``ws://unix:/tmp/file.sock`` for Unix domain sockets (UDS).
:type url: str
:returns: A 6-tuple ``(isSecure, host, tcp_port, resource, path, params)`` (TCP/IP) or
``(isSecure, host, uds_path, resource, path, params)`` (UDS).
:rtype: tuple
"""
parsed = urlparse.urlparse(url)
if parsed.scheme not in ["ws", "wss"]:
raise ValueError("invalid WebSocket URL: protocol scheme '{}' is not for WebSocket".format(parsed.scheme))
if not parsed.hostname or parsed.hostname == "":
raise ValueError("invalid WebSocket URL: missing hostname")
if parsed.fragment is not None and parsed.fragment != "":
raise ValueError("invalid WebSocket URL: non-empty fragment '%s" % parsed.fragment)
if parsed.path is not None and parsed.path != "":
ppath = parsed.path
path = urlparse.unquote(ppath)
else:
ppath = "/"
path = ppath
if parsed.query is not None and parsed.query != "":
resource = ppath + "?" + parsed.query
params = urlparse.parse_qs(parsed.query)
else:
resource = ppath
params = {}
if parsed.hostname == "unix":
# Unix domain sockets sockets
# ws://unix:/tmp/file.sock => unix:/tmp/file.sock => /tmp/file.sock
fp = parsed.netloc + parsed.path
uds_path = fp.split(':')[1]
# note: we don't interpret "path" in any further way: it needs to be
# a path on the local host with a listening Unix domain sockets at the other end ..
return parsed.scheme == "wss", parsed.hostname, uds_path, resource, path, params
else:
# TCP/IP sockets
if parsed.port is None or parsed.port == "":
if parsed.scheme == "ws":
tcp_port = 80
else:
tcp_port = 443
else:
tcp_port = int(parsed.port)
if tcp_port < 1 or tcp_port > 65535:
raise ValueError("invalid port {}".format(tcp_port))
return parsed.scheme == "wss", parsed.hostname, tcp_port, resource, path, params
| mit | b37334417f612be8cbf2a1a6a4c5afb5 | 33.342246 | 114 | 0.627686 | 4.072289 | false | false | false | false |
crossbario/autobahn-python | autobahn/wamp/test/test_wamp_serializer.py | 2 | 23177 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import os
import unittest
import random
import decimal
from decimal import Decimal
from autobahn.wamp import message
from autobahn.wamp import role
from autobahn.wamp import serializer
def generate_test_messages():
"""
List of WAMP test message used for serializers. Expand this if you add more
options or messages.
This list of WAMP message does not contain any binary app payloads!
"""
some_bytes = os.urandom(32)
some_unicode = '\u3053\u3093\u306b\u3061\u306f\u4e16\u754c'
some_uri = 'com.myapp.foobar'
some_unicode_uri = 'com.myapp.\u4f60\u597d\u4e16\u754c.baz'
some_args = [1, 2, 3, 'hello', some_bytes, some_unicode, {'foo': 23, 'bar': 'hello', 'baz': some_bytes, 'moo': some_unicode}]
some_kwargs = {'foo': 23, 'bar': 'hello', 'baz': some_bytes, 'moo': some_unicode, 'arr': some_args}
msgs = [
message.Hello("realm1", {'subscriber': role.RoleSubscriberFeatures()}),
message.Hello("realm1", {'publisher': role.RolePublisherFeatures()}),
message.Hello("realm1", {'caller': role.RoleCallerFeatures()}),
message.Hello("realm1", {'callee': role.RoleCalleeFeatures()}),
message.Hello("realm1", {
'subscriber': role.RoleSubscriberFeatures(),
'publisher': role.RolePublisherFeatures(),
'caller': role.RoleCallerFeatures(),
'callee': role.RoleCalleeFeatures(),
}),
message.Goodbye(),
message.Yield(123456),
message.Yield(123456, args=some_args),
message.Yield(123456, args=[], kwargs=some_kwargs),
message.Yield(123456, args=some_args, kwargs=some_kwargs),
message.Yield(123456, progress=True),
message.Interrupt(123456),
message.Interrupt(123456, mode=message.Interrupt.KILL),
message.Invocation(123456, 789123),
message.Invocation(123456, 789123, args=some_args),
message.Invocation(123456, 789123, args=[], kwargs=some_kwargs),
message.Invocation(123456, 789123, args=some_args, kwargs=some_kwargs),
message.Invocation(123456, 789123, timeout=10000),
message.Result(123456),
message.Result(123456, args=some_args),
message.Result(123456, args=[], kwargs=some_kwargs),
message.Result(123456, args=some_args, kwargs=some_kwargs),
message.Result(123456, progress=True),
message.Cancel(123456),
message.Cancel(123456, mode=message.Cancel.KILL),
message.Call(123456, some_uri),
message.Call(123456, some_uri, args=some_args),
message.Call(123456, some_uri, args=[], kwargs=some_kwargs),
message.Call(123456, some_uri, args=some_args, kwargs=some_kwargs),
message.Call(123456, some_uri, timeout=10000),
message.Call(123456, some_unicode_uri),
message.Call(123456, some_unicode_uri, args=some_args),
message.Call(123456, some_unicode_uri, args=[], kwargs=some_kwargs),
message.Call(123456, some_unicode_uri, args=some_args, kwargs=some_kwargs),
message.Call(123456, some_unicode_uri, timeout=10000),
message.Unregistered(123456),
message.Unregister(123456, 789123),
message.Registered(123456, 789123),
message.Register(123456, some_uri),
message.Register(123456, some_uri, match='prefix'),
message.Register(123456, some_uri, invoke='roundrobin'),
message.Register(123456, some_unicode_uri),
message.Register(123456, some_unicode_uri, match='prefix'),
message.Register(123456, some_unicode_uri, invoke='roundrobin'),
message.Event(123456, 789123),
message.Event(123456, 789123, args=some_args),
message.Event(123456, 789123, args=[], kwargs=some_kwargs),
message.Event(123456, 789123, args=some_args, kwargs=some_kwargs),
message.Event(123456, 789123, publisher=300),
message.Published(123456, 789123),
message.Publish(123456, some_uri),
message.Publish(123456, some_uri, args=some_args),
message.Publish(123456, some_uri, args=[], kwargs=some_kwargs),
message.Publish(123456, some_uri, args=some_args, kwargs=some_kwargs),
message.Publish(123456, some_uri, exclude_me=False, exclude=[300], eligible=[100, 200, 300]),
message.Publish(123456, some_unicode_uri),
message.Publish(123456, some_unicode_uri, args=some_args),
message.Publish(123456, some_unicode_uri, args=[], kwargs=some_kwargs),
message.Publish(123456, some_unicode_uri, args=some_args, kwargs=some_kwargs),
message.Publish(123456, some_unicode_uri, exclude_me=False, exclude=[300], eligible=[100, 200, 300]),
message.Unsubscribed(123456),
message.Unsubscribe(123456, 789123),
message.Subscribed(123456, 789123),
message.Subscribe(123456, some_uri),
message.Subscribe(123456, some_uri, match=message.Subscribe.MATCH_PREFIX),
message.Subscribe(123456, some_unicode_uri),
message.Subscribe(123456, some_unicode_uri, match=message.Subscribe.MATCH_PREFIX),
message.Error(message.Call.MESSAGE_TYPE, 123456, some_uri),
message.Error(message.Call.MESSAGE_TYPE, 123456, some_uri, args=some_args),
message.Error(message.Call.MESSAGE_TYPE, 123456, some_uri, args=[], kwargs=some_kwargs),
message.Error(message.Call.MESSAGE_TYPE, 123456, some_uri, args=some_args, kwargs=some_kwargs),
message.Error(message.Call.MESSAGE_TYPE, 123456, some_unicode_uri),
message.Error(message.Call.MESSAGE_TYPE, 123456, some_unicode_uri, args=some_args),
message.Error(message.Call.MESSAGE_TYPE, 123456, some_unicode_uri, args=[], kwargs=some_kwargs),
message.Error(message.Call.MESSAGE_TYPE, 123456, some_unicode_uri, args=some_args, kwargs=some_kwargs),
message.Result(123456),
message.Result(123456, args=some_args),
message.Result(123456, args=some_args, kwargs=some_kwargs),
]
return [(False, msg) for msg in msgs]
def generate_test_messages_binary():
"""
Generate WAMP test messages which contain binary app payloads.
With the JSON serializer, this currently only works on Python 3 (both CPython3 and PyPy3),
because even on Python 3, we need to patch the stdlib JSON, and on Python 2, the patching
would be even hackier.
"""
msgs = []
for binary in [b'',
b'\x00',
b'\30',
os.urandom(4),
os.urandom(16),
os.urandom(128),
os.urandom(256),
os.urandom(512),
os.urandom(1024)]:
msgs.append(message.Event(123456, 789123, args=[binary]))
msgs.append(message.Event(123456, 789123, args=[binary], kwargs={'foo': binary}))
return [(True, msg) for msg in msgs]
def create_serializers(decimal_support=False):
_serializers = []
_serializers.append(serializer.JsonSerializer(use_decimal_from_str=decimal_support))
_serializers.append(serializer.JsonSerializer(batched=True, use_decimal_from_str=decimal_support))
_serializers.append(serializer.CBORSerializer())
_serializers.append(serializer.CBORSerializer(batched=True))
if not decimal_support:
# builtins.OverflowError: Integer value out of range
_serializers.append(serializer.MsgPackSerializer())
_serializers.append(serializer.MsgPackSerializer(batched=True))
# roundtrip error
_serializers.append(serializer.UBJSONSerializer())
_serializers.append(serializer.UBJSONSerializer(batched=True))
# FIXME: implement full FlatBuffers serializer for WAMP
# WAMP-FlatBuffers currently only supports Python 3
# _serializers.append(serializer.FlatBuffersSerializer())
# _serializers.append(serializer.FlatBuffersSerializer(batched=True))
return _serializers
class TestFlatBuffersSerializer(unittest.TestCase):
def test_basic(self):
messages = [
message.Event(123456,
789123,
args=[1, 2, 3],
kwargs={'foo': 23, 'bar': 'hello'},
publisher=666,
retained=True),
message.Publish(123456,
'com.example.topic1',
args=[1, 2, 3],
kwargs={'foo': 23, 'bar': 'hello'},
retain=True)
]
ser = serializer.FlatBuffersSerializer()
# from pprint import pprint
for msg in messages:
# serialize message
payload, binary = ser.serialize(msg)
# unserialize message again
msg2 = ser.unserialize(payload, binary)[0]
# pprint(msg.marshal())
# pprint(msg2.marshal())
# must be equal: message roundtrips via the serializer
self.assertEqual(msg, msg2)
# self.assertEqual(msg.subscription, msg2.subscription)
# self.assertEqual(msg.publication, msg2.publication)
class TestDecimalSerializer(unittest.TestCase):
"""
binary fixed-point
binary floating-point: float (float32), double (float64)
decimal floating-point: decimal128, decimal256
decimal fixed-point: NUMERIC(precision, scale)
decimal arbitrary precision: NUMERIC, decimal.Decimal
https://developer.nvidia.com/blog/implementing-high-precision-decimal-arithmetic-with-cuda-int128/
https://github.com/johnmcfarlane/cnl
"""
def setUp(self) -> None:
self._test_serializers = create_serializers(decimal_support=True)
# enough for decimal256 precision arithmetic (76 significand decimal digits)
decimal.getcontext().prec = 76
self._test_messages_no_dec = [
(True,
{
'a': random.random(),
'b': random.randint(0, 2 ** 53),
'c': random.randint(0, 2 ** 64),
'd': random.randint(0, 2 ** 128),
'e': random.randint(0, 2 ** 256),
# float64: 52 binary digits, precision of 15-17 significant decimal digits
'f': 0.12345678901234567,
'g': 0.8765432109876545,
'y': os.urandom(8),
'z': [
-1,
0,
1,
True,
None,
0.12345678901234567,
0.8765432109876545,
os.urandom(8)
]
})
]
self._test_messages_dec = [
(True,
{
'a': random.random(),
'b': random.randint(0, 2 ** 53),
'c': random.randint(0, 2 ** 64),
'd': random.randint(0, 2 ** 128),
'e': random.randint(0, 2 ** 256),
# float64: 52 binary digits, precision of 15-17 significant decimal digits
'f': 0.12345678901234567,
'g': 0.8765432109876545,
# decimal128: precision of 38 significant decimal digits
'h': Decimal('0.1234567890123456789012345678901234567'),
'i': Decimal('0.8765432109876543210987654321098765434'),
# decimal256: precision of 76 significant decimal digits
'j': Decimal('0.123456789012345678901234567890123456701234567890123456789012345678901234567'),
'k': Decimal('0.876543210987654321098765432109876543298765432109876543210987654321098765434'),
'y': os.urandom(8),
'z': [
-1,
0,
1,
True,
None,
0.12345678901234567,
0.8765432109876545,
Decimal('0.1234567890123456789012345678901234567'),
Decimal('0.8765432109876543210987654321098765434'),
Decimal('0.123456789012345678901234567890123456701234567890123456789012345678901234567'),
Decimal('0.876543210987654321098765432109876543298765432109876543210987654321098765434'),
os.urandom(8)
]
})
]
def test_json_no_decimal(self):
"""
Test without ``use_decimal_from_str`` feature of JSON object serializer.
"""
ser = serializer.JsonObjectSerializer(use_decimal_from_str=False)
for contains_binary, obj in self._test_messages_no_dec:
_obj = ser.unserialize(ser.serialize(obj))[0]
self.assertEqual(obj, _obj)
self.assertEqual(1.0000000000000002, _obj['f'] + _obj['g'])
def test_json_decimal(self):
"""
Test ``use_decimal_from_str`` feature of JSON object serializer.
"""
ser = serializer.JsonObjectSerializer(use_decimal_from_str=True)
for contains_binary, obj in self._test_messages_dec:
_obj = ser.unserialize(ser.serialize(obj))[0]
self.assertEqual(obj, _obj)
self.assertEqual(1.0000000000000002, _obj['f'] + _obj['g'])
self.assertEqual(Decimal('1.0000000000000000000000000000000000001'), _obj['h'] + _obj['i'])
self.assertEqual(Decimal('1.000000000000000000000000000000000000000000000000000000000000000000000000001'), _obj['j'] + _obj['k'])
def test_roundtrip_msg(self):
for wamp_ser in self._test_serializers:
ser = wamp_ser._serializer
for contains_binary, msg in self._test_messages_no_dec + self._test_messages_dec:
payload = ser.serialize(msg)
msg2 = ser.unserialize(payload)
self.assertEqual(msg, msg2[0])
def test_crosstrip_msg(self):
for wamp_ser1 in self._test_serializers:
ser1 = wamp_ser1._serializer
for contains_binary, msg in self._test_messages_no_dec + self._test_messages_dec:
payload1 = ser1.serialize(msg)
msg1 = ser1.unserialize(payload1)
msg1 = msg1[0]
for wamp_ser2 in self._test_serializers:
ser2 = wamp_ser2._serializer
payload2 = ser2.serialize(msg1)
msg2 = ser2.unserialize(payload2)
msg2 = msg2[0]
self.assertEqual(msg, msg2)
# print(ser1, len(payload1), ser2, len(payload2))
class TestSerializer(unittest.TestCase):
def setUp(self):
self._test_messages = generate_test_messages() + generate_test_messages_binary()
self._test_serializers = create_serializers()
# print('Testing WAMP serializers {} with {} WAMP test messages'.format([ser.SERIALIZER_ID for ser in self._test_serializers], len(self._test_messages)))
def test_deep_equal_msg(self):
"""
Test deep object equality assert (because I am paranoid).
"""
v = os.urandom(10)
o1 = [1, 2, {'foo': 'bar', 'bar': v, 'baz': [9, 3, 2], 'goo': {'moo': [1, 2, 3]}}, v]
o2 = [1, 2, {'goo': {'moo': [1, 2, 3]}, 'bar': v, 'baz': [9, 3, 2], 'foo': 'bar'}, v]
self.assertEqual(o1, o2)
def test_roundtrip_msg(self):
"""
Test round-tripping over each serializer.
"""
for ser in self._test_serializers:
for contains_binary, msg in self._test_messages:
# serialize message
payload, binary = ser.serialize(msg)
# unserialize message again
msg2 = ser.unserialize(payload, binary)
# must be equal: message roundtrips via the serializer
self.assertEqual([msg], msg2)
def test_crosstrip_msg(self):
"""
Test cross-tripping over 2 serializers (as is done by WAMP routers).
"""
for ser1 in self._test_serializers:
for contains_binary, msg in self._test_messages:
# serialize message
payload, binary = ser1.serialize(msg)
# unserialize message again
msg1 = ser1.unserialize(payload, binary)
msg1 = msg1[0]
for ser2 in self._test_serializers:
# serialize message
payload, binary = ser2.serialize(msg1)
# unserialize message again
msg2 = ser2.unserialize(payload, binary)
# must be equal: message crosstrips via
# the serializers ser1 -> ser2
self.assertEqual([msg], msg2)
def test_cache_msg(self):
"""
Test message serialization caching.
"""
for contains_binary, msg in self._test_messages:
# message serialization cache is initially empty
self.assertEqual(msg._serialized, {})
for ser in self._test_serializers:
# verify message serialization is not yet cached
self.assertFalse(ser._serializer in msg._serialized)
payload, binary = ser.serialize(msg)
# now the message serialization must be cached
self.assertTrue(ser._serializer in msg._serialized)
self.assertEqual(msg._serialized[ser._serializer], payload)
# and after resetting the serialization cache, message
# serialization is gone
msg.uncache()
self.assertFalse(ser._serializer in msg._serialized)
def test_initial_stats(self):
"""
Test initial serializer stats are indeed empty.
"""
for ser in self._test_serializers:
stats = ser.stats(details=True)
self.assertEqual(stats['serialized']['bytes'], 0)
self.assertEqual(stats['serialized']['messages'], 0)
self.assertEqual(stats['serialized']['rated_messages'], 0)
self.assertEqual(stats['unserialized']['bytes'], 0)
self.assertEqual(stats['unserialized']['messages'], 0)
self.assertEqual(stats['unserialized']['rated_messages'], 0)
def test_serialize_stats(self):
"""
Test serializer stats are non-empty after serializing/unserializing messages.
"""
for ser in self._test_serializers:
for contains_binary, msg in self._test_messages:
# serialize message
payload, binary = ser.serialize(msg)
# unserialize message again
ser.unserialize(payload, binary)
stats = ser.stats(details=False)
self.assertTrue(stats['bytes'] > 0)
self.assertTrue(stats['messages'] > 0)
self.assertTrue(stats['rated_messages'] > 0)
def test_serialize_stats_with_details(self):
"""
Test serializer stats - with details - are non-empty after serializing/unserializing messages.
"""
for ser in self._test_serializers:
for contains_binary, msg in self._test_messages:
# serialize message
payload, binary = ser.serialize(msg)
# unserialize message again
ser.unserialize(payload, binary)
stats = ser.stats(details=True)
# {'serialized': {'bytes': 7923, 'messages': 59, 'rated_messages': 69}, 'unserialized': {'bytes': 7923, 'messages': 59, 'rated_messages': 69}}
# print(stats)
self.assertTrue(stats['serialized']['bytes'] > 0)
self.assertTrue(stats['serialized']['messages'] > 0)
self.assertTrue(stats['serialized']['rated_messages'] > 0)
self.assertTrue(stats['unserialized']['bytes'] > 0)
self.assertTrue(stats['unserialized']['messages'] > 0)
self.assertTrue(stats['unserialized']['rated_messages'] > 0)
self.assertEqual(stats['serialized']['bytes'], stats['unserialized']['bytes'])
self.assertEqual(stats['serialized']['messages'], stats['unserialized']['messages'])
self.assertEqual(stats['serialized']['rated_messages'], stats['unserialized']['rated_messages'])
def test_reset_stats(self):
"""
Test serializer stats are reset after fetching stats - depending on option.
"""
for ser in self._test_serializers:
for contains_binary, msg in self._test_messages:
# serialize message
payload, binary = ser.serialize(msg)
# unserialize message again
ser.unserialize(payload, binary)
ser.stats()
stats = ser.stats(details=True)
self.assertEqual(stats['serialized']['bytes'], 0)
self.assertEqual(stats['serialized']['messages'], 0)
self.assertEqual(stats['serialized']['rated_messages'], 0)
self.assertEqual(stats['unserialized']['bytes'], 0)
self.assertEqual(stats['unserialized']['messages'], 0)
self.assertEqual(stats['unserialized']['rated_messages'], 0)
def test_auto_stats(self):
"""
Test serializer stats are non-empty after serializing/unserializing messages.
"""
for ser in self._test_serializers:
def on_stats(stats):
self.assertTrue(stats['bytes'] > 0)
self.assertTrue(stats['messages'] > 0)
self.assertTrue(stats['rated_messages'] > 0)
ser.set_stats_autoreset(10, 0, on_stats)
for contains_binary, msg in self._test_messages:
# serialize message
payload, binary = ser.serialize(msg)
# unserialize message again
ser.unserialize(payload, binary)
| mit | d25a8df378e47f5dd83fd97aec990a3d | 41.448718 | 161 | 0.595332 | 4.225524 | false | true | false | false |
crossbario/autobahn-python | examples/asyncio/wamp/pubsub/options/frontend.py | 1 | 2341 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from os import environ
import asyncio
from autobahn.wamp.types import PublishOptions, EventDetails, SubscribeOptions
from autobahn.asyncio.wamp import ApplicationSession, ApplicationRunner
class Component(ApplicationSession):
"""
An application component that subscribes and receives events, and
stop after having received 5 events.
"""
async def onJoin(self, details):
self.received = 0
def on_event(i, details=None):
print("Got event, publication ID {}, publisher {}: {}".format(details.publication, details.publisher, i))
self.received += 1
if self.received > 5:
self.leave()
await self.subscribe(on_event, 'com.myapp.topic1', options=SubscribeOptions(details_arg='details'))
def onDisconnect(self):
asyncio.get_event_loop().stop()
if __name__ == '__main__':
url = environ.get("AUTOBAHN_DEMO_ROUTER", "ws://127.0.0.1:8080/ws")
realm = "crossbardemo"
runner = ApplicationRunner(url, realm)
runner.run(Component)
| mit | b4d2f629b2915d4c3bc366770e6b79c7 | 38.677966 | 117 | 0.673217 | 4.467557 | false | false | false | false |
crossbario/autobahn-python | examples/asyncio/wamp/rpc/slowsquare/backend.py | 3 | 2060 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import asyncio
from os import environ
from autobahn.asyncio.wamp import ApplicationSession, ApplicationRunner
class Component(ApplicationSession):
"""
A math service application component.
"""
async def onJoin(self, details):
def square(x):
return x * x
await self.register(square, 'com.math.square')
async def slowsquare(x, delay=1):
await asyncio.sleep(delay)
return x * x
await self.register(slowsquare, 'com.math.slowsquare')
print("Registered com.math.slowsquare")
if __name__ == '__main__':
url = environ.get("AUTOBAHN_DEMO_ROUTER", "ws://127.0.0.1:8080/ws")
realm = "crossbardemo"
runner = ApplicationRunner(url, realm)
runner.run(Component)
| mit | bd26d7ae713cbf43eb5d80b63a808441 | 35.785714 | 79 | 0.664563 | 4.497817 | false | false | false | false |
crossbario/autobahn-python | examples/asyncio/wamp/rpc/progress/backend.py | 3 | 2190 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import asyncio
from os import environ
from autobahn.wamp.types import CallOptions, RegisterOptions
from autobahn.asyncio.wamp import ApplicationSession, ApplicationRunner
class Component(ApplicationSession):
"""
Application component that produces progressive results.
"""
async def onJoin(self, details):
async def longop(n, details=None):
if details.progress:
for i in range(n):
details.progress(i)
await asyncio.sleep(1)
else:
await asyncio.sleep(1 * n)
return n
await self.register(longop, 'com.myapp.longop', RegisterOptions(details_arg='details'))
if __name__ == '__main__':
url = environ.get("AUTOBAHN_DEMO_ROUTER", "ws://127.0.0.1:8080/ws")
realm = "crossbardemo"
runner = ApplicationRunner(url, realm)
runner.run(Component)
| mit | 93d39dceb1a17ae3118d5d2584e58fc6 | 38.107143 | 95 | 0.658904 | 4.553015 | false | false | false | false |
crossbario/autobahn-python | examples/asyncio/wamp/component/backend.py | 1 | 1145 |
from autobahn.asyncio.component import Component, run
from autobahn.wamp.types import RegisterOptions
import asyncio
import ssl
context = ssl.create_default_context(
purpose=ssl.Purpose.SERVER_AUTH,
cafile='../../../router/.crossbar/server.crt',
)
component = Component(
transports=[
{
"type": "websocket",
"url": "wss://localhost:8083/ws",
"endpoint": {
"type": "tcp",
"host": "localhost",
"port": 8083,
"tls": context,
},
"options": {
"open_handshake_timeout": 100,
}
},
],
realm="crossbardemo",
)
@component.on_join
def join(session, details):
print("joined {}".format(details))
@component.register(
"example.foo",
options=RegisterOptions(details_arg='details'),
)
async def foo(*args, **kw):
print("foo({}, {})".format(args, kw))
for x in range(5, 0, -1):
print(" returning in {}".format(x))
await asyncio.sleep(1)
print("returning '42'")
return 42
if __name__ == "__main__":
run([component])
| mit | 5005a17082005168a26d8f4b4c50641d | 20.603774 | 53 | 0.537118 | 3.778878 | false | false | false | false |
crossbario/autobahn-python | autobahn/twisted/util.py | 2 | 13640 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import os
import hashlib
import threading
from typing import Optional, Union, Dict, Any
from twisted.internet.defer import Deferred
from twisted.internet.address import IPv4Address, UNIXAddress
from twisted.internet.interfaces import ITransport, IProcessTransport
from autobahn.wamp.types import TransportDetails
try:
from twisted.internet.stdio import PipeAddress
except ImportError:
# stdio.PipeAddress is only avail on Twisted 13.0+
PipeAddress = type(None)
try:
from twisted.internet.address import IPv6Address
_HAS_IPV6 = True
except ImportError:
_HAS_IPV6 = False
IPv6Address = type(None)
try:
from twisted.internet.interfaces import ISSLTransport
from twisted.protocols.tls import TLSMemoryBIOProtocol
from OpenSSL.SSL import Connection
_HAS_TLS = True
except ImportError:
_HAS_TLS = False
__all = (
'sleep',
'peer2str',
'transport_channel_id',
'extract_peer_certificate',
'create_transport_details',
)
def sleep(delay, reactor=None):
"""
Inline sleep for use in co-routines (Twisted ``inlineCallback`` decorated functions).
.. seealso::
* `twisted.internet.defer.inlineCallbacks <http://twistedmatrix.com/documents/current/api/twisted.internet.defer.html#inlineCallbacks>`__
* `twisted.internet.interfaces.IReactorTime <http://twistedmatrix.com/documents/current/api/twisted.internet.interfaces.IReactorTime.html>`__
:param delay: Time to sleep in seconds.
:type delay: float
:param reactor: The Twisted reactor to use.
:type reactor: None or provider of ``IReactorTime``.
"""
if not reactor:
from twisted.internet import reactor
d = Deferred()
reactor.callLater(delay, d.callback, None)
return d
def peer2str(transport: Union[ITransport, IProcessTransport]) -> str:
"""
Return a *peer descriptor* given a Twisted transport, for example:
* ``tcp4:127.0.0.1:52914``: a TCPv4 socket
* ``unix:/tmp/server.sock``: a Unix domain socket
* ``process:142092``: a Pipe originating from a spawning (parent) process
* ``pipe``: a Pipe terminating in a spawned (child) process
:returns: Returns a string representation of the peer of the Twisted transport.
"""
# IMPORTANT: we need to _first_ test for IProcessTransport
if IProcessTransport.providedBy(transport):
# note the PID of the forked process in the peer descriptor
res = "process:{}".format(transport.pid)
elif ITransport.providedBy(transport):
addr: Union[IPv4Address, IPv6Address, UNIXAddress, PipeAddress] = transport.getPeer()
if isinstance(addr, IPv4Address):
res = "tcp4:{0}:{1}".format(addr.host, addr.port)
elif _HAS_IPV6 and isinstance(addr, IPv6Address):
res = "tcp6:{0}:{1}".format(addr.host, addr.port)
elif isinstance(addr, UNIXAddress):
if addr.name:
res = "unix:{0}".format(addr.name)
else:
res = "unix"
elif isinstance(addr, PipeAddress):
# sadly, we don't have a way to get at the PID of the other side of the pipe
# res = "pipe"
res = "process:{0}".format(os.getppid())
else:
# gracefully fallback if we can't map the peer's address
res = "unknown"
else:
# gracefully fallback if we can't map the peer's transport
res = "unknown"
return res
if not _HAS_TLS:
def transport_channel_id(transport: object, is_server: bool, channel_id_type: Optional[str] = None) -> Optional[bytes]:
if channel_id_type is None:
return b'\x00' * 32
else:
raise RuntimeError('cannot determine TLS channel ID of type "{}" when TLS is not available on this system'.format(channel_id_type))
else:
def transport_channel_id(transport: object, is_server: bool, channel_id_type: Optional[str] = None) -> Optional[bytes]:
"""
Return TLS channel ID of WAMP transport of the given TLS channel ID type.
Application-layer user authentication protocols are vulnerable to generic credential forwarding attacks,
where an authentication credential sent by a client C to a server M may then be used by M to impersonate C at
another server S.
To prevent such credential forwarding attacks, modern authentication protocols rely on channel bindings.
For example, WAMP-cryptosign can use the tls-unique channel identifier provided by the TLS layer to strongly
bind authentication credentials to the underlying channel, so that a credential received on one TLS channel
cannot be forwarded on another.
:param transport: The Twisted TLS transport to extract the TLS channel ID from. If the transport isn't
TLS based, and non-empty ``channel_id_type`` is requested, ``None`` will be returned. If the transport
is indeed TLS based, an empty ``channel_id_type`` of ``None`` is requested, 32 NUL bytes will be returned.
:param is_server: Flag indicating that the transport is a server transport.
:param channel_id_type: TLS channel ID type, if set currently only ``"tls-unique"`` is supported.
:returns: The TLS channel ID (32 bytes).
"""
if channel_id_type is None:
return b'\x00' * 32
if channel_id_type not in ['tls-unique']:
raise RuntimeError('invalid TLS channel ID type "{}" requested'.format(channel_id_type))
if not isinstance(transport, TLSMemoryBIOProtocol):
raise RuntimeError(
'cannot determine TLS channel ID of type "{}" when TLS is not available on this transport {}'.format(
channel_id_type, type(transport)))
# get access to the OpenSSL connection underlying the Twisted protocol
# https://twistedmatrix.com/documents/current/api/twisted.protocols.tls.TLSMemoryBIOProtocol.html#getHandle
connection: Connection = transport.getHandle()
assert connection and isinstance(connection, Connection)
# Obtain latest TLS Finished message that we expected from peer, or None if handshake is not completed.
# http://www.pyopenssl.org/en/stable/api/ssl.html#OpenSSL.SSL.Connection.get_peer_finished
is_not_resumed = True
if channel_id_type == 'tls-unique':
# see also: https://bugs.python.org/file22646/tls_channel_binding.patch
if is_server != is_not_resumed:
# for routers (=servers) XOR new sessions, the channel ID is based on the TLS Finished message we
# expected to receive from the client: contents of the message or None if the TLS handshake has
# not yet completed.
tls_finished_msg = connection.get_peer_finished()
else:
# for clients XOR resumed sessions, the channel ID is based on the TLS Finished message we sent
# to the router (=server): contents of the message or None if the TLS handshake has not yet completed.
tls_finished_msg = connection.get_finished()
if tls_finished_msg is None:
# this can occur when:
# 1. we made a successful connection (in a TCP sense) but something failed with
# the TLS handshake (e.g. invalid certificate)
# 2. the TLS handshake has not yet completed
return b'\x00' * 32
else:
m = hashlib.sha256()
m.update(tls_finished_msg)
return m.digest()
else:
raise NotImplementedError('should not arrive here (unhandled channel_id_type "{}")'.format(channel_id_type))
if not _HAS_TLS:
def extract_peer_certificate(transport: object) -> Optional[Dict[str, Any]]:
"""
Dummy when no TLS is available.
:param transport: Ignored.
:return: Always return ``None``.
"""
return None
else:
def extract_peer_certificate(transport: TLSMemoryBIOProtocol) -> Optional[Dict[str, Any]]:
"""
Extract TLS x509 client certificate information from a Twisted stream transport, and
return a dict with x509 TLS client certificate information (if the client provided a
TLS client certificate).
:param transport: The secure transport from which to extract the peer certificate (if present).
:returns: If the peer provided a certificate, the parsed certificate information set.
"""
# check if the Twisted transport is a TLSMemoryBIOProtocol
if not (ISSLTransport.providedBy(transport) and hasattr(transport, 'getPeerCertificate')):
return None
cert = transport.getPeerCertificate()
if cert:
# extract x509 name components from an OpenSSL X509Name object
def maybe_bytes(_value):
if isinstance(_value, bytes):
return _value.decode('utf8')
else:
return _value
result = {
'md5': '{}'.format(maybe_bytes(cert.digest('md5'))).upper(),
'sha1': '{}'.format(maybe_bytes(cert.digest('sha1'))).upper(),
'sha256': '{}'.format(maybe_bytes(cert.digest('sha256'))).upper(),
'expired': bool(cert.has_expired()),
'hash': maybe_bytes(cert.subject_name_hash()),
'serial': int(cert.get_serial_number()),
'signature_algorithm': maybe_bytes(cert.get_signature_algorithm()),
'version': int(cert.get_version()),
'not_before': maybe_bytes(cert.get_notBefore()),
'not_after': maybe_bytes(cert.get_notAfter()),
'extensions': []
}
for i in range(cert.get_extension_count()):
ext = cert.get_extension(i)
ext_info = {
'name': '{}'.format(maybe_bytes(ext.get_short_name())),
'value': '{}'.format(maybe_bytes(ext)),
'critical': ext.get_critical() != 0
}
result['extensions'].append(ext_info)
for entity, name in [('subject', cert.get_subject()), ('issuer', cert.get_issuer())]:
result[entity] = {}
for key, value in name.get_components():
key = maybe_bytes(key)
value = maybe_bytes(value)
result[entity]['{}'.format(key).lower()] = '{}'.format(value)
return result
def create_transport_details(transport: Union[ITransport, IProcessTransport], is_server: bool) -> TransportDetails:
"""
Create transport details from Twisted transport.
:param transport: The Twisted transport to extract information from.
:param is_server: Flag indicating whether this transport side is a "server" (as in TCP server).
:return: Transport details object filled with information from the Twisted transport.
"""
peer = peer2str(transport)
own_pid = os.getpid()
if hasattr(threading, 'get_native_id'):
# New in Python 3.8
# https://docs.python.org/3/library/threading.html?highlight=get_native_id#threading.get_native_id
own_tid = threading.get_native_id()
else:
own_tid = threading.get_ident()
own_fd = -1
if _HAS_TLS and ISSLTransport.providedBy(transport):
channel_id = {
# this will only be filled when the TLS opening handshake is complete (!)
'tls-unique': transport_channel_id(transport, is_server, 'tls-unique'),
}
channel_type = TransportDetails.CHANNEL_TYPE_TLS
peer_cert = extract_peer_certificate(transport)
is_secure = True
else:
channel_id = {}
channel_type = TransportDetails.CHANNEL_TYPE_TCP
peer_cert = None
is_secure = False
# FIXME: really set a default (websocket)?
channel_framing = TransportDetails.CHANNEL_FRAMING_WEBSOCKET
td = TransportDetails(channel_type=channel_type, channel_framing=channel_framing, peer=peer,
is_server=is_server, own_pid=own_pid, own_tid=own_tid, own_fd=own_fd,
is_secure=is_secure, channel_id=channel_id, peer_cert=peer_cert)
return td
| mit | 329e13696984a12292c61caa247b3ece | 43.868421 | 148 | 0.637243 | 4.331534 | false | false | false | false |
crossbario/autobahn-python | autobahn/xbr/test/test_xbr_frealm.py | 2 | 7549 | import os
import sys
from unittest import skipIf
from unittest.mock import MagicMock
from twisted.trial.unittest import TestCase
from twisted.internet.defer import inlineCallbacks
from autobahn.xbr import HAS_XBR
from autobahn.wamp.cryptosign import HAS_CRYPTOSIGN
if HAS_XBR and HAS_CRYPTOSIGN:
from autobahn.xbr._frealm import Seeder, FederatedRealm
from autobahn.xbr._secmod import SecurityModuleMemory, EthereumKey
from autobahn.wamp.cryptosign import CryptosignKey
# https://web3py.readthedocs.io/en/stable/providers.html#infura-mainnet
HAS_INFURA = 'WEB3_INFURA_PROJECT_ID' in os.environ and len(os.environ['WEB3_INFURA_PROJECT_ID']) > 0
# TypeError: As of 3.10, the *loop* parameter was removed from Lock() since it is no longer necessary
IS_CPY_310 = sys.version_info.minor == 10
@skipIf(not os.environ.get('USE_TWISTED', False), 'only for Twisted')
@skipIf(not HAS_INFURA, 'env var WEB3_INFURA_PROJECT_ID not defined')
@skipIf(not (HAS_XBR and HAS_CRYPTOSIGN), 'package autobahn[encryption,xbr] not installed')
class TestFederatedRealm(TestCase):
gw_config = {
'type': 'infura',
'key': os.environ.get('WEB3_INFURA_PROJECT_ID', ''),
'network': 'mainnet',
}
# "builtins.TypeError: As of 3.10, the *loop* parameter was removed from Lock() since
# it is no longer necessary"
#
# solved via websockets>=10.3, but web3==5.29.0 requires websockets<10
#
@skipIf(IS_CPY_310, 'Web3 v5.29.0 (web3.auto.infura) raises TypeError on Python 3.10')
def test_frealm_ctor_auto(self):
name = 'wamp-proto.eth'
fr = FederatedRealm(name)
self.assertEqual(fr.status, 'STOPPED')
self.assertEqual(fr.name_or_address, name)
self.assertEqual(fr.gateway_config, None)
self.assertEqual(fr.name_category, 'ens')
def test_frealm_ctor_gw(self):
name = 'wamp-proto.eth'
fr = FederatedRealm(name, self.gw_config)
self.assertEqual(fr.status, 'STOPPED')
self.assertEqual(fr.name_or_address, name)
self.assertEqual(fr.gateway_config, self.gw_config)
self.assertEqual(fr.name_category, 'ens')
@inlineCallbacks
def test_frealm_initialize(self):
name = 'wamp-proto.eth'
fr1 = FederatedRealm(name, self.gw_config)
self.assertEqual(fr1.status, 'STOPPED')
yield fr1.initialize()
self.assertEqual(fr1.status, 'RUNNING')
self.assertEqual(fr1.address, '0x66267d0b1114cFae80C37942177a846d666b114a')
def test_frealm_seeders(self):
fr1 = MagicMock()
fr1.name_or_address = 'wamp-proto.eth'
fr1.address = '0x66267d0b1114cFae80C37942177a846d666b114a'
fr1.status = 'RUNNING'
fr1.seeders = [
Seeder(frealm=fr1,
endpoint='wss://frealm1.example.com/ws',
label='Example Inc.',
operator='0xf5fb56886f033855C1a36F651E927551749361bC',
country='US'),
Seeder(frealm=fr1,
endpoint='wss://fr1.foobar.org/ws',
label='Foobar Foundation',
operator='0xe59C7418403CF1D973485B36660728a5f4A8fF9c',
country='DE'),
Seeder(frealm=fr1,
endpoint='wss://public-frealm1.pierre.fr:443',
label='Pierre PP',
operator='0x254dffcd3277C0b1660F6d42EFbB754edaBAbC2B',
country='FR'),
]
self.assertEqual(len(fr1.seeders), 3)
transports = [s.endpoint for s in fr1.seeders]
self.assertEqual(transports, ['wss://frealm1.example.com/ws', 'wss://fr1.foobar.org/ws',
'wss://public-frealm1.pierre.fr:443'])
@inlineCallbacks
def test_frealm_secmod(self):
name = 'wamp-proto.eth'
seedphrase = "myth like bonus scare over problem client lizard pioneer submit female collect"
sm = SecurityModuleMemory.from_seedphrase(seedphrase)
yield sm.open()
self.assertEqual(len(sm), 2)
self.assertTrue(isinstance(sm[0], EthereumKey), 'unexpected type {} at index 0'.format(type(sm[0])))
self.assertTrue(isinstance(sm[1], CryptosignKey), 'unexpected type {} at index 1'.format(type(sm[1])))
fr = FederatedRealm(name, self.gw_config)
# FIXME
fr._seeders = [
Seeder(frealm=fr,
endpoint='wss://frealm1.example.com/ws',
label='Example Inc.',
operator='0xf5fb56886f033855C1a36F651E927551749361bC',
country='US'),
Seeder(frealm=fr,
endpoint='wss://fr1.foobar.org/ws',
label='Foobar Foundation',
operator='0xe59C7418403CF1D973485B36660728a5f4A8fF9c',
country='DE'),
Seeder(frealm=fr,
endpoint='wss://public-frealm1.pierre.fr:443',
label='Pierre PP',
operator='0x254dffcd3277C0b1660F6d42EFbB754edaBAbC2B',
country='FR'),
]
yield fr.initialize()
self.assertEqual(fr.status, 'RUNNING')
self.assertEqual(fr.address, '0x66267d0b1114cFae80C37942177a846d666b114a')
self.assertEqual(len(fr.seeders), 3)
delegate_key = sm[0]
client_key = sm[1]
authextra = yield fr.seeders[0].create_authextra(client_key=client_key,
delegate_key=delegate_key,
bandwidth_requested=512,
channel_id=None,
channel_binding=None)
self.assertEqual(authextra.get('pubkey', None), client_key.public_key(binary=False))
# print(authextra)
self.assertTrue('signature' in authextra)
self.assertTrue(type(authextra['signature']) == str)
self.assertEqual(len(authextra['signature']), 65 * 2)
# @skipIf(not os.environ.get('WAMP_ROUTER_URLS', None), 'WAMP_ROUTER_URLS not defined')
# @skipIf(not os.environ.get('USE_TWISTED', False), 'only for Twisted')
# @skipIf(not HAS_XBR, 'package autobahn[xbr] not installed')
# class TestFederatedRealmNetworked(TestCase):
#
# def test_seeders_multi_reconnect(self):
# from autobahn.twisted.component import Component, run
#
# # WAMP_ROUTER_URLS=ws://localhost:8080/ws,ws://localhost:8081/ws,ws://localhost:8082/ws
# # crossbar start --cbdir=./autobahn/xbr/test/.crossbar --config=config1.json
# transports = os.environ.get('WAMP_ROUTER_URLS', '').split(',')
# realm = 'realm1'
# authentication = {
# 'cryptosign': {
# 'privkey': '20e8c05d0ede9506462bb049c4843032b18e8e75b314583d0c8d8a4942f9be40',
# }
# }
#
# component = Component(transports=transports, realm=realm, authentication=authentication)
# # component.start()
#
# # @inlineCallbacks
# # def main(reactor, session):
# # print("Client session={}".format(session))
# # res = yield session.call('user.add2', 23, 666)
# # print(res)
# # session.leave()
# #
# # from autobahn.wamp.component import _run
# # from twisted.internet import reactor
# # d = _run(reactor, [component])
# # #d = run([component], log_level='info', stop_at_close=True)
# # res = yield d
| mit | 195006c4a0b79a89e71b512fbfa7a93b | 40.251366 | 110 | 0.602199 | 3.362584 | false | true | false | false |
crossbario/autobahn-python | examples/twisted/wamp/pubsub/unsubscribe/frontend.py | 3 | 2743 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from os import environ
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks
from autobahn.twisted.wamp import ApplicationSession, ApplicationRunner
class Component(ApplicationSession):
"""
An application component that subscribes and receives events.
After receiving 5 events, it unsubscribes, sleeps and then
resubscribes for another run. Then it stops.
"""
@inlineCallbacks
def test(self):
self.received = 0
self.sub = yield self.subscribe(self.on_event, 'com.myapp.topic1')
print("Subscribed with subscription ID {}".format(self.sub.id))
@inlineCallbacks
def on_event(self, i):
print("Got event: {}".format(i))
self.received += 1
if self.received > 5:
self.runs += 1
if self.runs > 1:
self.leave()
else:
yield self.sub.unsubscribe()
print("Unsubscribed .. continue in 5s ..")
reactor.callLater(5, self.test)
@inlineCallbacks
def onJoin(self, details):
print("session attached")
self.runs = 0
yield self.test()
def onDisconnect(self):
print("disconnected")
reactor.stop()
if __name__ == '__main__':
url = environ.get("AUTOBAHN_DEMO_ROUTER", "ws://127.0.0.1:8080/ws")
realm = "crossbardemo"
runner = ApplicationRunner(url, realm)
runner.run(Component)
| mit | fbcb1300f76b7cb8302ac6ddeed48300 | 35.573333 | 79 | 0.647831 | 4.460163 | false | false | false | false |
crossbario/autobahn-python | examples/xbr/test_typed.py | 3 | 3160 | import sys
import argparse
import os
from binascii import b2a_hex, a2b_hex
import web3
from autobahn import xbr
import eth_keys
from eth_account import Account
from cfxdb import pack_uint256, unpack_uint256, pack_uint128, unpack_uint128
def main(accounts):
from py_eth_sig_utils import signing, utils
from autobahn.xbr import _util
verifying_adr = a2b_hex('0x254dffcd3277C0b1660F6d42EFbB754edaBAbC2B'[2:])
channel_adr = a2b_hex('0x254dffcd3277C0b1660F6d42EFbB754edaBAbC2B'[2:])
data = _util._create_eip712_data(
verifying_adr,
channel_adr,
39,
2700,
False
)
# use fixed or generate a new raw random private key
if True:
# maker_key
pkey_raw = a2b_hex('a4985a2ed93107886e9a1f12c7b8e2e351cc1d26c42f3aab7f220f3a7d08fda6')
else:
pkey_raw = os.urandom(32)
print('Using private key: {}'.format(b2a_hex(pkey_raw).decode()))
# make a private key object from the raw private key bytes
pkey = eth_keys.keys.PrivateKey(pkey_raw)
# make a private account from the private key
acct = Account.privateKeyToAccount(pkey)
# get the public key of the account
addr = pkey.public_key.to_canonical_address()
print('Account address: {}'.format(b2a_hex(addr).decode()))
# get the canonical address of the account
caddr = web3.Web3.toChecksumAddress(addr)
print('Account canonical address: {}'.format(caddr))
# step-wise computation of signature
msg_hash = signing.encode_typed_data(data)
print('Ok, MSG_HASH = 0x{}'.format(b2a_hex(msg_hash).decode()))
sig_vrs = utils.ecsign(msg_hash, pkey_raw)
sig = signing.v_r_s_to_signature(*sig_vrs)
signature = signing.v_r_s_to_signature(*signing.sign_typed_data(data, pkey_raw))
assert len(signature) == 32 + 32 + 1
#assert signature == sig
print('Ok, signed typed data (using key {}):\nSIGNATURE = 0x{}'.format(caddr, b2a_hex(signature).decode()))
signer_address = signing.recover_typed_data(data, *signing.signature_to_v_r_s(signature))
assert signer_address == caddr
print('Ok, verified signature was signed by {}'.format(signer_address))
if __name__ == '__main__':
print('using web3.py v{}'.format(web3.__version__))
parser = argparse.ArgumentParser()
parser.add_argument('--gateway',
dest='gateway',
type=str,
default=None,
help='Ethereum HTTP gateway URL or None for auto-select (default: -, means let web3 auto-select).')
args = parser.parse_args()
if args.gateway:
w3 = web3.Web3(web3.Web3.HTTPProvider(args.gateway))
else:
# using automatic provider detection:
from web3.auto import w3
# check we are connected, and check network ID
if not w3.isConnected():
print('could not connect to Web3/Ethereum at "{}"'.format(args.gateway or 'auto'))
sys.exit(1)
else:
print('connected via provider "{}"'.format(args.gateway or 'auto'))
# set new provider on XBR library
xbr.setProvider(w3)
# now enter main ..
main(w3.eth.accounts)
| mit | 2f245e957e46661ec069a62d366bfee2 | 31.57732 | 123 | 0.653165 | 3.336853 | false | false | false | false |
crossbario/autobahn-python | examples/asyncio/wamp/rpc/errors/frontend.py | 3 | 3096 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from os import environ
import math
import asyncio
from autobahn import wamp
from autobahn.wamp.exception import ApplicationError
from autobahn.asyncio.wamp import ApplicationSession, ApplicationRunner
@wamp.error("com.myapp.error1")
class AppError1(Exception):
"""
An application specific exception that is decorated with a WAMP URI,
and hence can be automapped by Autobahn.
"""
class Component(ApplicationSession):
"""
Example WAMP application frontend that catches exceptions.
"""
async def onJoin(self, details):
# catching standard exceptions
##
for x in [2, 0, -2]:
try:
res = await self.call('com.myapp.sqrt', x)
except Exception as e:
print("Error: {} {}".format(e, e.args))
else:
print("Result: {}".format(res))
# catching WAMP application exceptions
##
for name in ['foo', 'a', '*' * 11, 'Hello']:
try:
res = await self.call('com.myapp.checkname', name)
except ApplicationError as e:
print("Error: {} {} {} {}".format(e, e.error, e.args, e.kwargs))
else:
print("Result: {}".format(res))
# defining and automapping WAMP application exceptions
##
self.define(AppError1)
try:
await self.call('com.myapp.compare', 3, 17)
except AppError1 as e:
print("Compare Error: {}".format(e))
await self.leave()
def onDisconnect(self):
asyncio.get_event_loop().stop()
if __name__ == '__main__':
url = environ.get("AUTOBAHN_DEMO_ROUTER", "ws://127.0.0.1:8080/ws")
realm = "crossbardemo"
runner = ApplicationRunner(url, realm)
runner.run(Component)
| mit | 2129728c81621d03660437a2c73cf37d | 33.4 | 80 | 0.621447 | 4.342216 | false | false | false | false |
crossbario/autobahn-python | examples/twisted/wamp/work/newapi/test_newapi2.py | 3 | 1046 | from twisted.internet.task import react
from twisted.internet.defer import inlineCallbacks as coroutine
from autobahn.twisted.wamp import Session
from autobahn.twisted.connection import Connection
class MySession(Session):
@coroutine
def on_join(self, details):
print("on_join: {}".format(details))
def add2(a, b):
return a + b
yield self.register(add2, 'com.example.add2')
try:
res = yield self.call('com.example.add2', 2, 3)
print("result: {}".format(res))
except Exception as e:
print("error: {}".format(e))
finally:
print('leaving ..')
#self.leave()
def on_leave(self, details):
print('on_leave xx: {}'.format(details))
self.disconnect()
def on_disconnect(self):
print('on_disconnect')
if __name__ == '__main__':
transports = 'ws://localhost:8080/ws'
connection = Connection(transports=transports)
connection.session = MySession
react(connection.start)
| mit | a47d99d0af8844977377811d45842860 | 23.904762 | 63 | 0.608031 | 3.932331 | false | false | false | false |
crossbario/autobahn-python | examples/asyncio/wamp/rpc/timeservice/frontend.py | 3 | 2079 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from os import environ
import datetime
import asyncio
from autobahn.asyncio.wamp import ApplicationSession, ApplicationRunner
class Component(ApplicationSession):
"""
An application component using the time service.
"""
async def onJoin(self, details):
try:
now = await self.call('com.timeservice.now')
except Exception as e:
print("Error: {}".format(e))
else:
print("Current time from time service: {}".format(now))
self.leave()
def onDisconnect(self):
asyncio.get_event_loop().stop()
if __name__ == '__main__':
url = environ.get("AUTOBAHN_DEMO_ROUTER", "ws://127.0.0.1:8080/ws")
realm = "crossbardemo"
runner = ApplicationRunner(url, realm)
runner.run(Component)
| mit | 3b87d8d3578add04af6d6346973b126a | 35.473684 | 79 | 0.660414 | 4.529412 | false | false | false | false |
crossbario/autobahn-python | examples/twisted/wamp/pubsub/retained/backend.py | 3 | 2351 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from os import environ
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks
from autobahn.twisted.wamp import Session, ApplicationRunner
from autobahn.twisted.util import sleep
from autobahn.wamp.types import PublishOptions
class Component(Session):
"""
An application component calling the different backend procedures.
"""
@inlineCallbacks
def onJoin(self, details):
print("session attached {}".format(details))
for topic in ["com.example.history", "com.example.no_history_here"]:
print("publishing '{}' as retained event".format(topic))
pub = yield self.publish(
topic, "some data, topic was '{}'".format(topic),
options=PublishOptions(retain=True, acknowledge=True),
)
print("published: {}".format(pub))
if __name__ == '__main__':
runner = ApplicationRunner(
environ.get("AUTOBAHN_DEMO_ROUTER", "ws://127.0.0.1:8080/auth_ws"),
"crossbardemo",
)
runner.run(Component)
| mit | d9a6c2b975360841e85591fa8e5fc73f | 38.847458 | 79 | 0.667376 | 4.556202 | false | false | false | false |
crossbario/autobahn-python | examples/twisted/wamp/app/crochet/example2/server.py | 3 | 3148 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from flask import Flask, request
from crochet import setup, run_in_reactor, wait_for
# this MUST be called _before_ any Autobahn or Twisted imports!
setup()
from twisted.internet.defer import returnValue # noqa
from autobahn.twisted.util import sleep # noqa
from autobahn.twisted.wamp import Application # noqa
# our WAMP app
#
wapp = Application()
@wapp.register('com.example.square')
def square(x):
print("square() called with {}".format(x))
return x * x
@wapp.register('com.example.slowsquare')
def slowsquare(x):
print("slowsquare() called with {}".format(x))
yield sleep(2)
returnValue(x * x)
# the following are synchronous wrappers around the asynchronous WAMP code
#
@wait_for(timeout=1)
def call_square(x):
return wapp.session.call('com.example.square', x)
@wait_for(timeout=5)
def call_slowsquare(x):
return wapp.session.call('com.example.slowsquare', x)
# our Flask app
#
app = Flask(__name__)
@app.route('/square/submit', methods=['POST'])
def square_submit():
x = int(request.form.get('x', 0))
res = call_square(x)
return "{} squared is {}".format(x, res)
@app.route('/slowsquare/submit', methods=['POST'])
def slowsquare_submit():
x = int(request.form.get('x', 0))
res = call_slowsquare(x)
return "{} squared is {}".format(x, res)
if __name__ == '__main__':
import sys
import logging
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
# this will start the WAMP app on a background thread and setup communication
# with the main thread that runs a (blocking) Flask server
#
@run_in_reactor
def start_wamp():
wapp.run("ws://127.0.0.1:9000", "realm1", standalone=True, start_reactor=False)
start_wamp()
# now start the Flask dev server (which is a regular blocking WSGI server)
#
app.run(port=8080)
| mit | 69d655f30abde1ba6fdf72d36abb9ad9 | 29.563107 | 87 | 0.676302 | 3.853121 | false | false | false | false |
crossbario/autobahn-python | autobahn/nvx/_utf8validator.py | 2 | 2659 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import os
from cffi import FFI
ffi = FFI()
ffi.cdef("""
void* nvx_utf8vld_new ();
void nvx_utf8vld_reset (void* utf8vld);
int nvx_utf8vld_validate (void* utf8vld, const uint8_t* data, size_t length);
void nvx_utf8vld_free (void* utf8vld);
int nvx_utf8vld_set_impl(void* utf8vld, int impl);
int nvx_utf8vld_get_impl(void* utf8vld);
""")
optional = True
if 'AUTOBAHN_USE_NVX' in os.environ and os.environ['AUTOBAHN_USE_NVX'] in ['1', 'true']:
optional = False
with open(os.path.join(os.path.dirname(__file__), '_utf8validator.c')) as fd:
c_source = fd.read()
ffi.set_source(
"_nvx_utf8validator",
c_source,
libraries=[],
extra_compile_args=['-std=c99', '-Wall', '-Wno-strict-prototypes', '-O3', '-march=native'],
optional=optional
)
class Utf8Validator:
def __init__(self):
self.ffi = ffi
from _nvx_utf8validator import lib
self.lib = lib
self._vld = self.ffi.gc(self.lib.nvx_utf8vld_new(), self.lib.nvx_utf8vld_free)
# print(self.lib.nvx_utf8vld_get_impl(self._vld))
def reset(self):
self.lib.nvx_utf8vld_reset(self._vld)
def validate(self, ba):
res = self.lib.nvx_utf8vld_validate(self._vld, ba, len(ba))
return (res >= 0, res == 0, None, None)
if __name__ == "__main__":
ffi.compile()
| mit | 66d0bf85da517b15c8620b6698bbaaaf | 31.426829 | 99 | 0.639714 | 3.507916 | false | false | false | false |
crossbario/autobahn-python | autobahn/xbr/_buyer.py | 2 | 27926 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import uuid
import binascii
from pprint import pformat
import os
import cbor2
import nacl.secret
import nacl.utils
import nacl.exceptions
import nacl.public
import txaio
from autobahn.wamp.exception import ApplicationError
from autobahn.wamp.protocol import ApplicationSession
from ._util import unpack_uint256, pack_uint256
import eth_keys
from ..util import hl, hlval
from ._eip712_channel_close import sign_eip712_channel_close, recover_eip712_channel_close
class Transaction(object):
def __init__(self, channel, delegate, pubkey, key_id, channel_seq, amount, balance, signature):
self.channel = channel
self.delegate = delegate
self.pubkey = pubkey
self.key_id = key_id
self.channel_seq = channel_seq
self.amount = amount
self.balance = balance
self.signature = signature
def marshal(self):
res = {
'channel': self.channel,
'delegate': self.delegate,
'pubkey': self.pubkey,
'key_id': self.key_id,
'channel_seq': self.channel_seq,
'amount': self.amount,
'balance': self.balance,
'signature': self.signature,
}
return res
def __str__(self):
return pformat(self.marshal())
class SimpleBuyer(object):
"""
Simple XBR buyer component. This component can be used by a XBR buyer delegate to
handle the automated buying of data encryption keys from the XBR market maker.
"""
log = None
def __init__(self, market_maker_adr, buyer_key, max_price):
"""
:param market_maker_adr:
:type market_maker_adr:
:param buyer_key: Consumer delegate (buyer) private Ethereum key.
:type buyer_key: bytes
:param max_price: Maximum price we are willing to buy per key.
:type max_price: int
"""
assert type(market_maker_adr) == bytes and len(market_maker_adr) == 20, 'market_maker_adr must be bytes[20], but got "{}"'.format(market_maker_adr)
assert type(buyer_key) == bytes and len(buyer_key) == 32, 'buyer delegate must be bytes[32], but got "{}"'.format(buyer_key)
assert type(max_price) == int and max_price > 0
self.log = txaio.make_logger()
# market maker address
self._market_maker_adr = market_maker_adr
self._xbrmm_config = None
# buyer delegate raw ethereum private key (32 bytes)
self._pkey_raw = buyer_key
# buyer delegate ethereum private key object
self._pkey = eth_keys.keys.PrivateKey(buyer_key)
# buyer delegate ethereum private account from raw private key
# FIXME
# self._acct = Account.privateKeyToAccount(self._pkey)
self._acct = None
# buyer delegate ethereum account canonical address
self._addr = self._pkey.public_key.to_canonical_address()
# buyer delegate ethereum account canonical checksummed address
# FIXME
# self._caddr = web3.Web3.toChecksumAddress(self._addr)
self._caddr = None
# ephemeral data consumer key
self._receive_key = nacl.public.PrivateKey.generate()
# maximum price per key we are willing to pay
self._max_price = max_price
# will be filled with on-chain payment channel contract, once started
self._channel = None
# channel current (off-chain) balance
self._balance = 0
# channel sequence number
self._seq = 0
# this holds the keys we bought (map: key_id => nacl.secret.SecretBox)
self._keys = {}
self._session = None
self._running = False
# automatically initiate a close of the payment channel when running into
# a transaction failing because of insufficient balance remaining in the channel
self._auto_close_channel = True
# FIXME: poor mans local transaction store
self._transaction_idx = {}
self._transactions = []
async def start(self, session, consumer_id):
"""
Start buying keys to decrypt XBR data by calling ``unwrap()``.
:param session: WAMP session over which to communicate with the XBR market maker.
:type session: :class:`autobahn.wamp.protocol.ApplicationSession`
:param consumer_id: XBR consumer ID.
:type consumer_id: str
:return: Current remaining balance in payment channel.
:rtype: int
"""
assert isinstance(session, ApplicationSession)
assert type(consumer_id) == str
assert not self._running
self._session = session
self._running = True
self.log.debug('Start buying from consumer delegate address {address} (public key 0x{public_key}..)',
address=hl(self._caddr),
public_key=binascii.b2a_hex(self._pkey.public_key[:10]).decode())
try:
self._xbrmm_config = await session.call('xbr.marketmaker.get_config')
# get the currently active (if any) payment channel for the delegate
assert type(self._addr) == bytes and len(self._addr) == 20
self._channel = await session.call('xbr.marketmaker.get_active_payment_channel', self._addr)
if not self._channel:
raise Exception('no active payment channel found')
channel_oid = self._channel['channel_oid']
assert type(channel_oid) == bytes and len(channel_oid) == 16
self._channel_oid = uuid.UUID(bytes=channel_oid)
# get the current (off-chain) balance of the payment channel
payment_balance = await session.call('xbr.marketmaker.get_payment_channel_balance', self._channel_oid.bytes)
except:
session.leave()
raise
# FIXME
if type(payment_balance['remaining']) == bytes:
payment_balance['remaining'] = unpack_uint256(payment_balance['remaining'])
if not payment_balance['remaining'] > 0:
raise Exception('no off-chain balance remaining on payment channel')
self._balance = payment_balance['remaining']
self._seq = payment_balance['seq']
self.log.info('Ok, buyer delegate started [active payment channel {channel_oid} with remaining balance {remaining} at sequence {seq}]',
channel_oid=hl(self._channel_oid), remaining=hlval(self._balance), seq=hlval(self._seq))
return self._balance
async def stop(self):
"""
Stop buying keys.
"""
assert self._running
self._running = False
self.log.info('Ok, buyer delegate stopped.')
async def balance(self):
"""
Return current balance of payment channel:
* ``amount``: The initial amount with which the payment channel was opened.
* ``remaining``: The remaining amount of XBR in the payment channel that can be spent.
* ``inflight``: The amount of XBR allocated to buy transactions that are currently processed.
:return: Current payment balance.
:rtype: dict
"""
assert self._session and self._session.is_attached()
payment_balance = await self._session.call('xbr.marketmaker.get_payment_channel_balance', self._channel['channel_oid'])
return payment_balance
async def open_channel(self, buyer_addr, amount, details=None):
"""
:param amount:
:type amount:
:param details:
:type details:
:return:
:rtype:
"""
assert self._session and self._session.is_attached()
# FIXME
signature = os.urandom(64)
payment_channel = await self._session.call('xbr.marketmaker.open_payment_channel',
buyer_addr,
self._addr,
amount,
signature)
balance = {
'amount': payment_channel['amount'],
'remaining': payment_channel['remaining'],
'inflight': payment_channel['inflight'],
}
return balance
async def close_channel(self, details=None):
"""
Requests to close the currently active payment channel.
:return:
"""
async def unwrap(self, key_id, serializer, ciphertext):
"""
Decrypt XBR data. This functions will potentially make the buyer call the
XBR market maker to buy data encryption keys from the XBR provider.
:param key_id: ID of the data encryption used for decryption
of application payload.
:type key_id: bytes
:param serializer: Application payload serializer.
:type serializer: str
:param ciphertext: Ciphertext of encrypted application payload to
decrypt.
:type ciphertext: bytes
:return: Decrypted application payload.
:rtype: object
"""
assert type(key_id) == bytes and len(key_id) == 16
# FIXME: support more app payload serializers
assert type(serializer) == str and serializer in ['cbor']
assert type(ciphertext) == bytes
market_oid = self._channel['market_oid']
channel_oid = self._channel['channel_oid']
# FIXME
current_block_number = 1
verifying_chain_id = self._xbrmm_config['verifying_chain_id']
verifying_contract_adr = binascii.a2b_hex(self._xbrmm_config['verifying_contract_adr'][2:])
# if we don't have the key, buy it!
if key_id in self._keys:
self.log.debug('Key {key_id} already in key store (or currently being bought).',
key_id=hl(uuid.UUID(bytes=key_id)))
else:
self.log.debug('Key {key_id} not yet in key store - buying key ..', key_id=hl(uuid.UUID(bytes=key_id)))
# mark the key as currently being bought already (the location of code here is multi-entrant)
self._keys[key_id] = False
# get (current) price for key we want to buy
quote = await self._session.call('xbr.marketmaker.get_quote', key_id)
# set price we pay set to the (current) quoted price
amount = unpack_uint256(quote['price'])
self.log.debug('Key {key_id} has current price quote {amount}',
key_id=hl(uuid.UUID(bytes=key_id)), amount=hl(int(amount / 10**18)))
if amount > self._max_price:
raise ApplicationError('xbr.error.max_price_exceeded',
'{}.unwrap() - key {} needed cannot be bought: price {} exceeds maximum price of {}'.format(self.__class__.__name__, uuid.UUID(bytes=key_id), int(amount / 10 ** 18), int(self._max_price / 10 ** 18)))
# check (locally) we have enough balance left in the payment channel to buy the key
balance = self._balance - amount
if balance < 0:
if self._auto_close_channel:
# FIXME: sign last transaction (from persisted local history)
last_tx = None
txns = self.past_transactions()
if txns:
last_tx = txns[0]
if last_tx:
# tx1 is the delegate portion, and tx2 is the market maker portion:
# tx1, tx2 = last_tx
# close_adr = tx1.channel
# close_seq = tx1.channel_seq
# close_balance = tx1.balance
# close_is_final = True
close_seq = self._seq
close_balance = self._balance
close_is_final = True
signature = sign_eip712_channel_close(self._pkey_raw,
verifying_chain_id,
verifying_contract_adr,
current_block_number,
market_oid,
channel_oid,
close_seq,
close_balance,
close_is_final)
self.log.debug('auto-closing payment channel {channel_oid} [close_seq={close_seq}, close_balance={close_balance}, close_is_final={close_is_final}]',
channel_oid=uuid.UUID(bytes=channel_oid),
close_seq=close_seq,
close_balance=int(close_balance / 10**18),
close_is_final=close_is_final)
# call market maker to initiate closing of payment channel
await self._session.call('xbr.marketmaker.close_channel',
channel_oid,
verifying_chain_id,
current_block_number,
verifying_contract_adr,
pack_uint256(close_balance),
close_seq,
close_is_final,
signature)
# FIXME: wait for and acquire new payment channel instead of bailing out ..
raise ApplicationError('xbr.error.channel_closed',
'{}.unwrap() - key {} cannot be bought: payment channel {} ran empty and we initiated close at remaining balance of {}'.format(self.__class__.__name__,
uuid.UUID(bytes=key_id),
channel_oid,
int(close_balance / 10 ** 18)))
raise ApplicationError('xbr.error.insufficient_balance',
'{}.unwrap() - key {} cannot be bought: insufficient balance {} in payment channel for amount {}'.format(self.__class__.__name__,
uuid.UUID(bytes=key_id),
int(self._balance / 10 ** 18),
int(amount / 10 ** 18)))
buyer_pubkey = self._receive_key.public_key.encode(encoder=nacl.encoding.RawEncoder)
channel_seq = self._seq + 1
is_final = False
# XBRSIG[1/8]: compute EIP712 typed data signature
signature = sign_eip712_channel_close(self._pkey_raw, verifying_chain_id, verifying_contract_adr,
current_block_number, market_oid, channel_oid, channel_seq,
balance, is_final)
# persist 1st phase of the transaction locally
self._save_transaction_phase1(channel_oid, self._addr, buyer_pubkey, key_id, channel_seq, amount, balance, signature)
# call the market maker to buy the key
try:
receipt = await self._session.call('xbr.marketmaker.buy_key',
self._addr,
buyer_pubkey,
key_id,
channel_oid,
channel_seq,
pack_uint256(amount),
pack_uint256(balance),
signature)
except ApplicationError as e:
if e.error == 'xbr.error.channel_closed':
self.stop()
raise e
except Exception as e:
self.log.error('Encountered error while calling market maker to buy key!')
self.log.failure()
self._keys[key_id] = e
raise e
# XBRSIG[8/8]: check market maker signature
marketmaker_signature = receipt['signature']
marketmaker_channel_seq = receipt['channel_seq']
marketmaker_amount_paid = unpack_uint256(receipt['amount_paid'])
marketmaker_remaining = unpack_uint256(receipt['remaining'])
marketmaker_inflight = unpack_uint256(receipt['inflight'])
signer_address = recover_eip712_channel_close(verifying_chain_id, verifying_contract_adr,
current_block_number, market_oid, channel_oid,
marketmaker_channel_seq, marketmaker_remaining,
False, marketmaker_signature)
if signer_address != self._market_maker_adr:
self.log.warn('{klass}.unwrap()::XBRSIG[8/8] - EIP712 signature invalid: signer_address={signer_address}, delegate_adr={delegate_adr}',
klass=self.__class__.__name__,
signer_address=hl(binascii.b2a_hex(signer_address).decode()),
delegate_adr=hl(binascii.b2a_hex(self._market_maker_adr).decode()))
raise ApplicationError('xbr.error.invalid_signature',
'{}.unwrap()::XBRSIG[8/8] - EIP712 signature invalid or not signed by market maker'.format(self.__class__.__name__))
if self._seq + 1 != marketmaker_channel_seq:
raise ApplicationError('xbr.error.invalid_transaction',
'{}.buy_key(): invalid transaction (channel sequence number mismatch - expected {}, but got {})'.format(self.__class__.__name__, self._seq, receipt['channel_seq']))
if self._balance - amount != marketmaker_remaining:
raise ApplicationError('xbr.error.invalid_transaction',
'{}.buy_key(): invalid transaction (channel remaining amount mismatch - expected {}, but got {})'.format(self.__class__.__name__, self._balance - amount, receipt['remaining']))
self._seq = marketmaker_channel_seq
self._balance = marketmaker_remaining
# persist 2nd phase of the transaction locally
self._save_transaction_phase2(channel_oid, self._market_maker_adr, buyer_pubkey, key_id, marketmaker_channel_seq,
marketmaker_amount_paid, marketmaker_remaining, marketmaker_signature)
# unseal the data encryption key
sealed_key = receipt['sealed_key']
unseal_box = nacl.public.SealedBox(self._receive_key)
try:
key = unseal_box.decrypt(sealed_key)
except nacl.exceptions.CryptoError as e:
self._keys[key_id] = e
raise ApplicationError('xbr.error.decryption_failed', '{}.unwrap() - could not unseal data encryption key: {}'.format(self.__class__.__name__, e))
# remember the key, so we can use it to actually decrypt application payload data
self._keys[key_id] = nacl.secret.SecretBox(key)
transactions_count = self.count_transactions()
self.log.info(
'{klass}.unwrap() - {tx_type} key {key_id} bought for {amount_paid} [payment_channel={payment_channel}, remaining={remaining}, inflight={inflight}, buyer_pubkey={buyer_pubkey}, transactions={transactions}]',
klass=self.__class__.__name__,
tx_type=hl('XBR BUY ', color='magenta'),
key_id=hl(uuid.UUID(bytes=key_id)),
amount_paid=hl(str(int(marketmaker_amount_paid / 10 ** 18)) + ' XBR', color='magenta'),
payment_channel=hl(binascii.b2a_hex(receipt['payment_channel']).decode()),
remaining=hl(int(marketmaker_remaining / 10 ** 18)),
inflight=hl(int(marketmaker_inflight / 10 ** 18)),
buyer_pubkey=hl(binascii.b2a_hex(buyer_pubkey).decode()),
transactions=transactions_count)
# if the key is already being bought, wait until the one buying path of execution has succeeded and done
log_counter = 0
while self._keys[key_id] is False:
if log_counter % 100:
self.log.debug('{klass}.unwrap() - waiting for key "{key_id}" currently being bought ..',
klass=self.__class__.__name__, key_id=hl(uuid.UUID(bytes=key_id)))
log_counter += 1
await txaio.sleep(.2)
# check if the key buying failed and fail the unwrapping in turn
if isinstance(self._keys[key_id], Exception):
e = self._keys[key_id]
raise e
# now that we have the data encryption key, decrypt the application payload
# the decryption key here is an instance of nacl.secret.SecretBox
try:
message = self._keys[key_id].decrypt(ciphertext)
except nacl.exceptions.CryptoError as e:
# Decryption failed. Ciphertext failed verification
raise ApplicationError('xbr.error.decryption_failed', '{}.unwrap() - failed to unwrap encrypted data: {}'.format(self.__class__.__name__, e))
# deserialize the application payload
# FIXME: support more app payload serializers
try:
payload = cbor2.loads(message)
except cbor2.decoder.CBORDecodeError as e:
# premature end of stream (expected to read 4187 bytes, got 27 instead)
raise ApplicationError('xbr.error.deserialization_failed', '{}.unwrap() - failed to deserialize application payload: {}'.format(self.__class__.__name__, e))
return payload
def _save_transaction_phase1(self, channel_oid, delegate_adr, buyer_pubkey, key_id, channel_seq, amount, balance, signature):
"""
:param channel_oid:
:param delegate_adr:
:param buyer_pubkey:
:param key_id:
:param channel_seq:
:param amount:
:param balance:
:param signature:
:return:
"""
if key_id in self._transaction_idx:
raise RuntimeError('save_transaction_phase1: duplicate transaction for key 0x{}'.format(binascii.b2a_hex(key_id)))
tx1 = Transaction(channel_oid, delegate_adr, buyer_pubkey, key_id, channel_seq, amount, balance, signature)
key_idx = len(self._transactions)
self._transactions.append([tx1, None])
self._transaction_idx[key_id] = key_idx
def _save_transaction_phase2(self, channel_oid, delegate_adr, buyer_pubkey, key_id, channel_seq, amount, balance, signature):
"""
:param channel_oid:
:param delegate_adr:
:param buyer_pubkey:
:param key_id:
:param channel_seq:
:param amount:
:param balance:
:param signature:
:return:
"""
if key_id not in self._transaction_idx:
raise RuntimeError('save_transaction_phase2: transaction for key 0x{} not found'.format(binascii.b2a_hex(key_id)))
key_idx = self._transaction_idx[key_id]
if self._transactions[key_idx][1]:
raise RuntimeError(
'save_transaction_phase2: duplicate transaction for key 0x{}'.format(binascii.b2a_hex(key_id)))
tx1 = self._transactions[key_idx][0]
tx2 = Transaction(channel_oid, delegate_adr, buyer_pubkey, key_id, channel_seq, amount, balance, signature)
assert tx1.channel == tx2.channel
# assert tx1.delegate == tx2.delegate
assert tx1.pubkey == tx2.pubkey
assert tx1.key_id == tx2.key_id
assert tx1.channel_seq == tx2.channel_seq
assert tx1.amount == tx2.amount
assert tx1.balance == tx2.balance
# note: signatures will differ (obviously)!
assert tx1.signature != tx2.signature
self._transactions[key_idx][1] = tx2
def past_transactions(self, filter_complete=True, limit=1):
"""
:param filter_complete:
:param limit:
:return:
"""
assert type(filter_complete) == bool
assert type(limit) == int and limit > 0
n = 0
res = []
while n < limit:
if len(self._transactions) > n:
tx = self._transactions[-n]
if not filter_complete or (tx[0] and tx[1]):
res.append(tx)
n += 1
else:
break
return res
def count_transactions(self):
"""
:return:
"""
res = {
'complete': 0,
'pending': 0,
}
for tx1, tx2 in self._transactions:
if tx1 and tx2:
res['complete'] += 1
else:
res['pending'] += 1
return res
def get_transaction(self, key_id):
"""
:param key_id:
:return:
"""
idx = self._transaction_idx.get(key_id, None)
if idx:
return self._transactions[idx]
def is_complete(self, key_id):
"""
:param key_id:
:return:
"""
idx = self._transaction_idx.get(key_id, None)
if idx:
tx1, tx2 = self._transactions[idx]
return tx1 and tx2
return False
| mit | 412f01fbb013f71a78d68013ffa968e0 | 42.908805 | 238 | 0.535236 | 4.62045 | false | false | false | false |
crossbario/autobahn-python | examples/asyncio/wamp/pubsub/complex/frontend.py | 3 | 2456 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import random
from os import environ
import asyncio
from autobahn.wamp.types import SubscribeOptions
from autobahn.asyncio.wamp import ApplicationSession, ApplicationRunner
class Component(ApplicationSession):
"""
An application component that subscribes and receives events of no
payload and of complex payload, and stops after 5 seconds.
"""
async def onJoin(self, details):
self.received = 0
def on_heartbeat(details=None):
print("Got heartbeat (publication ID {})".format(details.publication))
await self.subscribe(on_heartbeat, 'com.myapp.heartbeat', options=SubscribeOptions(details_arg='details'))
def on_topic2(a, b, c=None, d=None):
print("Got event: {} {} {} {}".format(a, b, c, d))
await self.subscribe(on_topic2, 'com.myapp.topic2')
asyncio.get_event_loop().call_later(5, self.leave)
def onDisconnect(self):
asyncio.get_event_loop().stop()
if __name__ == '__main__':
url = environ.get("AUTOBAHN_DEMO_ROUTER", "ws://127.0.0.1:8080/ws")
realm = "crossbardemo"
runner = ApplicationRunner(url, realm)
runner.run(Component)
| mit | 988779202bc8d817b5ea6af82ff18d44 | 37.984127 | 114 | 0.67386 | 4.278746 | false | false | false | false |
crossbario/autobahn-python | examples/twisted/wamp/rpc/arguments/backend.py | 3 | 2604 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from os import environ
from twisted.internet.defer import inlineCallbacks
from autobahn.twisted.wamp import ApplicationSession, ApplicationRunner
class Component(ApplicationSession):
"""
An application component providing procedures with different kinds
of arguments.
"""
@inlineCallbacks
def onJoin(self, details):
print("session attached")
def ping():
return
def add2(a, b):
return a + b
def stars(nick="somebody", stars=0):
return "{} starred {}x".format(nick, stars)
def orders(product, limit=5):
return ["Product {}".format(i) for i in range(50)][:limit]
def arglen(*args, **kwargs):
return [len(args), len(kwargs)]
yield self.register(ping, 'com.arguments.ping')
yield self.register(add2, 'com.arguments.add2')
yield self.register(stars, 'com.arguments.stars')
yield self.register(orders, 'com.arguments.orders')
yield self.register(arglen, 'com.arguments.arglen')
print("Procedures registered; ready for frontend.")
if __name__ == '__main__':
url = environ.get("AUTOBAHN_DEMO_ROUTER", "ws://127.0.0.1:8080/ws")
realm = "crossbardemo"
runner = ApplicationRunner(url, realm)
runner.run(Component)
| mit | 6ed1bd52f6465354f56df61c0c3aba03 | 36.2 | 79 | 0.655146 | 4.369128 | false | false | false | false |
crossbario/autobahn-python | autobahn/asyncio/component.py | 1 | 15953 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import asyncio
import ssl
import signal
from functools import wraps
import txaio
from autobahn.asyncio.websocket import WampWebSocketClientFactory
from autobahn.asyncio.rawsocket import WampRawSocketClientFactory
from autobahn.wamp import component
from autobahn.wamp.exception import TransportLost
from autobahn.asyncio.wamp import Session
from autobahn.wamp.serializer import create_transport_serializers, create_transport_serializer
__all__ = ('Component', 'run')
def _unique_list(seq):
"""
Return a list with unique elements from sequence, preserving order.
"""
seen = set()
return [x for x in seq if x not in seen and not seen.add(x)]
def _camel_case_from_snake_case(snake):
parts = snake.split('_')
return parts[0] + ''.join(s.capitalize() for s in parts[1:])
def _create_transport_factory(loop, transport, session_factory):
"""
Create a WAMP-over-XXX transport factory.
"""
if transport.type == 'websocket':
serializers = create_transport_serializers(transport)
factory = WampWebSocketClientFactory(
session_factory,
url=transport.url,
serializers=serializers,
proxy=transport.proxy, # either None or a dict with host, port
)
elif transport.type == 'rawsocket':
serializer = create_transport_serializer(transport.serializers[0])
factory = WampRawSocketClientFactory(session_factory, serializer=serializer)
else:
assert(False), 'should not arrive here'
# set the options one at a time so we can give user better feedback
for k, v in transport.options.items():
try:
factory.setProtocolOptions(**{k: v})
except (TypeError, KeyError):
# this allows us to document options as snake_case
# until everything internally is upgraded from
# camelCase
try:
factory.setProtocolOptions(
**{_camel_case_from_snake_case(k): v}
)
except (TypeError, KeyError):
raise ValueError(
"Unknown {} transport option: {}={}".format(transport.type, k, v)
)
return factory
class Component(component.Component):
"""
A component establishes a transport and attached a session
to a realm using the transport for communication.
The transports a component tries to use can be configured,
as well as the auto-reconnect strategy.
"""
log = txaio.make_logger()
session_factory = Session
"""
The factory of the session we will instantiate.
"""
def _is_ssl_error(self, e):
"""
Internal helper.
"""
return isinstance(e, ssl.SSLError)
def _check_native_endpoint(self, endpoint):
if isinstance(endpoint, dict):
if 'tls' in endpoint:
tls = endpoint['tls']
if isinstance(tls, (dict, bool)):
pass
elif isinstance(tls, ssl.SSLContext):
pass
else:
raise ValueError(
"'tls' configuration must be a dict, bool or "
"SSLContext instance"
)
else:
raise ValueError(
"'endpoint' configuration must be a dict or IStreamClientEndpoint"
" provider"
)
# async function
def _connect_transport(self, loop, transport, session_factory, done):
"""
Create and connect a WAMP-over-XXX transport.
"""
factory = _create_transport_factory(loop, transport, session_factory)
# XXX the rest of this should probably be factored into its
# own method (or three!)...
if transport.proxy:
timeout = transport.endpoint.get('timeout', 10) # in seconds
if type(timeout) != int:
raise ValueError('invalid type {} for timeout in client endpoint configuration'.format(type(timeout)))
# do we support HTTPS proxies?
f = loop.create_connection(
protocol_factory=factory,
host=transport.proxy['host'],
port=transport.proxy['port'],
)
time_f = asyncio.ensure_future(asyncio.wait_for(f, timeout=timeout))
return self._wrap_connection_future(transport, done, time_f)
elif transport.endpoint['type'] == 'tcp':
version = transport.endpoint.get('version', 4)
if version not in [4, 6]:
raise ValueError('invalid IP version {} in client endpoint configuration'.format(version))
host = transport.endpoint['host']
if type(host) != str:
raise ValueError('invalid type {} for host in client endpoint configuration'.format(type(host)))
port = transport.endpoint['port']
if type(port) != int:
raise ValueError('invalid type {} for port in client endpoint configuration'.format(type(port)))
timeout = transport.endpoint.get('timeout', 10) # in seconds
if type(timeout) != int:
raise ValueError('invalid type {} for timeout in client endpoint configuration'.format(type(timeout)))
tls = transport.endpoint.get('tls', None)
tls_hostname = None
# create a TLS enabled connecting TCP socket
if tls:
if isinstance(tls, dict):
for k in tls.keys():
if k not in ["hostname", "trust_root"]:
raise ValueError("Invalid key '{}' in 'tls' config".format(k))
hostname = tls.get('hostname', host)
if type(hostname) != str:
raise ValueError('invalid type {} for hostname in TLS client endpoint configuration'.format(hostname))
cert_fname = tls.get('trust_root', None)
tls_hostname = hostname
tls = True
if cert_fname is not None:
tls = ssl.create_default_context(
purpose=ssl.Purpose.SERVER_AUTH,
cafile=cert_fname,
)
elif isinstance(tls, ssl.SSLContext):
# tls=<an SSLContext> is valid
tls_hostname = host
elif tls in [False, True]:
if tls:
tls_hostname = host
else:
raise RuntimeError('unknown type {} for "tls" configuration in transport'.format(type(tls)))
f = loop.create_connection(
protocol_factory=factory,
host=host,
port=port,
ssl=tls,
server_hostname=tls_hostname,
)
time_f = asyncio.ensure_future(asyncio.wait_for(f, timeout=timeout))
return self._wrap_connection_future(transport, done, time_f)
elif transport.endpoint['type'] == 'unix':
path = transport.endpoint['path']
timeout = int(transport.endpoint.get('timeout', 10)) # in seconds
f = loop.create_unix_connection(
protocol_factory=factory,
path=path,
)
time_f = asyncio.ensure_future(asyncio.wait_for(f, timeout=timeout))
return self._wrap_connection_future(transport, done, time_f)
else:
assert(False), 'should not arrive here'
def _wrap_connection_future(self, transport, done, conn_f):
def on_connect_success(result):
# async connect call returns a 2-tuple
transport, proto = result
# in the case where we .abort() the transport / connection
# during setup, we still get on_connect_success but our
# transport is already closed (this will happen if
# e.g. there's an "open handshake timeout") -- I don't
# know if there's a "better" way to detect this? #python
# doesn't know of one, anyway
if transport.is_closing():
if not txaio.is_called(done):
reason = getattr(proto, "_onclose_reason", "Connection already closed")
txaio.reject(done, TransportLost(reason))
return
# if e.g. an SSL handshake fails, we will have
# successfully connected (i.e. get here) but need to
# 'listen' for the "connection_lost" from the underlying
# protocol in case of handshake failure .. so we wrap
# it. Also, we don't increment transport.success_count
# here on purpose (because we might not succeed).
# XXX double-check that asyncio behavior on TLS handshake
# failures is in fact as described above
orig = proto.connection_lost
@wraps(orig)
def lost(fail):
rtn = orig(fail)
if not txaio.is_called(done):
# asyncio will call connection_lost(None) in case of
# a transport failure, in which case we create an
# appropriate exception
if fail is None:
fail = TransportLost("failed to complete connection")
txaio.reject(done, fail)
return rtn
proto.connection_lost = lost
def on_connect_failure(err):
transport.connect_failures += 1
# failed to establish a connection in the first place
txaio.reject(done, err)
txaio.add_callbacks(conn_f, on_connect_success, None)
# the errback is added as a second step so it gets called if
# there as an error in on_connect_success itself.
txaio.add_callbacks(conn_f, None, on_connect_failure)
return conn_f
# async function
def start(self, loop=None):
"""
This starts the Component, which means it will start connecting
(and re-connecting) to its configured transports. A Component
runs until it is "done", which means one of:
- There was a "main" function defined, and it completed successfully;
- Something called ``.leave()`` on our session, and we left successfully;
- ``.stop()`` was called, and completed successfully;
- none of our transports were able to connect successfully (failure);
:returns: a Future which will resolve (to ``None``) when we are
"done" or with an error if something went wrong.
"""
if loop is None:
self.log.warn("Using default loop")
loop = asyncio.get_event_loop()
return self._start(loop=loop)
def run(components, start_loop=True, log_level='info'):
"""
High-level API to run a series of components.
This will only return once all the components have stopped
(including, possibly, after all re-connections have failed if you
have re-connections enabled). Under the hood, this calls
XXX fixme for asyncio
-- if you wish to manage the loop yourself, use the
:meth:`autobahn.asyncio.component.Component.start` method to start
each component yourself.
:param components: the Component(s) you wish to run
:type components: instance or list of :class:`autobahn.asyncio.component.Component`
:param start_loop: When ``True`` (the default) this method
start a new asyncio loop.
:type start_loop: bool
:param log_level: a valid log-level (or None to avoid calling start_logging)
:type log_level: string
"""
# actually, should we even let people "not start" the logging? I'm
# not sure that's wise... (double-check: if they already called
# txaio.start_logging() what happens if we call it again?)
if log_level is not None:
txaio.start_logging(level=log_level)
loop = asyncio.get_event_loop()
if loop.is_closed():
asyncio.set_event_loop(asyncio.new_event_loop())
loop = asyncio.get_event_loop()
txaio.config.loop = loop
log = txaio.make_logger()
# see https://github.com/python/asyncio/issues/341 asyncio has
# "odd" handling of KeyboardInterrupt when using Tasks (as
# run_until_complete does). Another option is to just resture
# default SIGINT handling, which is to exit:
# import signal
# signal.signal(signal.SIGINT, signal.SIG_DFL)
async def nicely_exit(signal):
log.info("Shutting down due to {signal}", signal=signal)
try:
tasks = asyncio.Task.all_tasks()
except AttributeError:
# this changed with python >= 3.7
tasks = asyncio.all_tasks()
for task in tasks:
# Do not cancel the current task.
try:
current_task = asyncio.Task.current_task()
except AttributeError:
current_task = asyncio.current_task()
if task is not current_task:
task.cancel()
def cancel_all_callback(fut):
try:
fut.result()
except asyncio.CancelledError:
log.debug("All task cancelled")
except Exception as e:
log.error("Error while shutting down: {exception}", exception=e)
finally:
loop.stop()
fut = asyncio.gather(*tasks)
fut.add_done_callback(cancel_all_callback)
try:
loop.add_signal_handler(signal.SIGINT, lambda: asyncio.ensure_future(nicely_exit("SIGINT")))
loop.add_signal_handler(signal.SIGTERM, lambda: asyncio.ensure_future(nicely_exit("SIGTERM")))
except NotImplementedError:
# signals are not available on Windows
pass
def done_callback(loop, arg):
loop.stop()
# returns a future; could run_until_complete() but see below
component._run(loop, components, done_callback)
if start_loop:
try:
loop.run_forever()
# this is probably more-correct, but then you always get
# "Event loop stopped before Future completed":
# loop.run_until_complete(f)
except asyncio.CancelledError:
pass
# finally:
# signal.signal(signal.SIGINT, signal.SIG_DFL)
# signal.signal(signal.SIGTERM, signal.SIG_DFL)
# Close the event loop at the end, otherwise an exception is
# thrown. https://bugs.python.org/issue23548
loop.close()
| mit | 35bb41c3e38b43d8db2fc8e6ab507bf5 | 37.256595 | 126 | 0.592052 | 4.578932 | false | false | false | false |
crossbario/autobahn-python | autobahn/wamp/serializer.py | 2 | 36432 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import os
import re
import struct
import platform
import math
import decimal
from binascii import b2a_hex, a2b_hex
from typing import Optional, List, Tuple
from txaio import time_ns
from autobahn.wamp.interfaces import IObjectSerializer, ISerializer, IMessage
from autobahn.wamp.exception import ProtocolError
from autobahn.wamp import message
# note: __all__ must be a list here, since we dynamically
# extend it depending on availability of more serializers
__all__ = ['Serializer',
'JsonObjectSerializer',
'JsonSerializer']
SERID_TO_OBJSER = {}
SERID_TO_SER = {}
class Serializer(object):
"""
Base class for WAMP serializers. A WAMP serializer is the core glue between
parsed WAMP message objects and the bytes on wire (the transport).
"""
RATED_MESSAGE_SIZE = 512
"""
Serialized WAMP message payload size per rated WAMP message.
"""
# WAMP defines the following 24 message types
MESSAGE_TYPE_MAP = {
message.Hello.MESSAGE_TYPE: message.Hello,
message.Welcome.MESSAGE_TYPE: message.Welcome,
message.Abort.MESSAGE_TYPE: message.Abort,
message.Challenge.MESSAGE_TYPE: message.Challenge,
message.Authenticate.MESSAGE_TYPE: message.Authenticate,
message.Goodbye.MESSAGE_TYPE: message.Goodbye,
message.Error.MESSAGE_TYPE: message.Error,
message.Publish.MESSAGE_TYPE: message.Publish,
message.Published.MESSAGE_TYPE: message.Published,
message.Subscribe.MESSAGE_TYPE: message.Subscribe,
message.Subscribed.MESSAGE_TYPE: message.Subscribed,
message.Unsubscribe.MESSAGE_TYPE: message.Unsubscribe,
message.Unsubscribed.MESSAGE_TYPE: message.Unsubscribed,
message.Event.MESSAGE_TYPE: message.Event,
message.Call.MESSAGE_TYPE: message.Call,
message.Cancel.MESSAGE_TYPE: message.Cancel,
message.Result.MESSAGE_TYPE: message.Result,
message.Register.MESSAGE_TYPE: message.Register,
message.Registered.MESSAGE_TYPE: message.Registered,
message.Unregister.MESSAGE_TYPE: message.Unregister,
message.Unregistered.MESSAGE_TYPE: message.Unregistered,
message.Invocation.MESSAGE_TYPE: message.Invocation,
message.Interrupt.MESSAGE_TYPE: message.Interrupt,
message.Yield.MESSAGE_TYPE: message.Yield
}
"""
Mapping of WAMP message type codes to WAMP message classes.
"""
def __init__(self, serializer):
"""
:param serializer: The object serializer to use for WAMP wire-level serialization.
:type serializer: An object that implements :class:`autobahn.interfaces.IObjectSerializer`.
"""
self._serializer = serializer
self._stats_reset = time_ns()
self._stats_cycle = 0
self._serialized_bytes = 0
self._serialized_messages = 0
self._serialized_rated_messages = 0
self._unserialized_bytes = 0
self._unserialized_messages = 0
self._unserialized_rated_messages = 0
self._autoreset_rated_messages = None
self._autoreset_duration = None
self._autoreset_callback = None
def stats_reset(self):
"""
Get serializer statistics: timestamp when statistics were last reset.
:return: Last reset time of statistics (UTC, ns since Unix epoch)
:rtype: int
"""
return self._stats_reset
def stats_bytes(self):
"""
Get serializer statistics: bytes (serialized + unserialized).
:return: Number of bytes.
:rtype: int
"""
return self._serialized_bytes + self._unserialized_bytes
def stats_messages(self):
"""
Get serializer statistics: messages (serialized + unserialized).
:return: Number of messages.
:rtype: int
"""
return self._serialized_messages + self._unserialized_messages
def stats_rated_messages(self):
"""
Get serializer statistics: rated messages (serialized + unserialized).
:return: Number of rated messages.
:rtype: int
"""
return self._serialized_rated_messages + self._unserialized_rated_messages
def set_stats_autoreset(self, rated_messages, duration, callback, reset_now=False):
"""
Configure a user callback invoked when accumulated stats hit specified threshold.
When the specified number of rated messages have been processed or the specified duration
has passed, statistics are automatically reset, and the last statistics is provided to
the user callback.
:param rated_messages: Number of rated messages that should trigger an auto-reset.
:type rated_messages: int
:param duration: Duration in ns that when passed will trigger an auto-reset.
:type duration: int
:param callback: User callback to be invoked when statistics are auto-reset. The function
will be invoked with a single positional argument: the accumulated statistics before the reset.
:type callback: callable
"""
assert(rated_messages is None or type(rated_messages) == int)
assert(duration is None or type(duration) == int)
assert(rated_messages or duration)
assert(callable(callback))
self._autoreset_rated_messages = rated_messages
self._autoreset_duration = duration
self._autoreset_callback = callback
# maybe auto-reset and trigger user callback ..
if self._autoreset_callback and reset_now:
stats = self.stats(reset=True)
self._autoreset_callback(stats)
return stats
def stats(self, reset=True, details=False):
"""
Get (and reset) serializer statistics.
:param reset: If ``True``, reset the serializer statistics.
:type reset: bool
:param details: If ``True``, return detailed statistics split up by serialization/unserialization.
:type details: bool
:return: Serializer statistics, eg:
.. code-block:: json
{
"timestamp": 1574156576688704693,
"duration": 34000000000,
"bytes": 0,
"messages": 0,
"rated_messages": 0
}
:rtype: dict
"""
assert(type(reset) == bool)
assert(type(details) == bool)
self._stats_cycle += 1
if details:
data = {
'cycle': self._stats_cycle,
'serializer': self.SERIALIZER_ID,
'timestamp': self._stats_reset,
'duration': time_ns() - self._stats_reset,
'serialized': {
'bytes': self._serialized_bytes,
'messages': self._serialized_messages,
'rated_messages': self._serialized_rated_messages,
},
'unserialized': {
'bytes': self._unserialized_bytes,
'messages': self._unserialized_messages,
'rated_messages': self._unserialized_rated_messages,
}
}
else:
data = {
'cycle': self._stats_cycle,
'serializer': self.SERIALIZER_ID,
'timestamp': self._stats_reset,
'duration': time_ns() - self._stats_reset,
'bytes': self._serialized_bytes + self._unserialized_bytes,
'messages': self._serialized_messages + self._unserialized_messages,
'rated_messages': self._serialized_rated_messages + self._unserialized_rated_messages,
}
if reset:
self._serialized_bytes = 0
self._serialized_messages = 0
self._serialized_rated_messages = 0
self._unserialized_bytes = 0
self._unserialized_messages = 0
self._unserialized_rated_messages = 0
self._stats_reset = time_ns()
return data
def serialize(self, msg: IMessage) -> Tuple[bytes, bool]:
"""
Implements :func:`autobahn.wamp.interfaces.ISerializer.serialize`
"""
data, is_binary = msg.serialize(self._serializer), self._serializer.BINARY
# maintain statistics for serialized WAMP message data
self._serialized_bytes += len(data)
self._serialized_messages += 1
self._serialized_rated_messages += int(math.ceil(float(len(data)) / self.RATED_MESSAGE_SIZE))
# maybe auto-reset and trigger user callback ..
if self._autoreset_callback and ((self._autoreset_duration and (time_ns() - self._stats_reset) >= self._autoreset_duration) or (self._autoreset_rated_messages and self.stats_rated_messages() >= self._autoreset_rated_messages)):
stats = self.stats(reset=True)
self._autoreset_callback(stats)
return data, is_binary
def unserialize(self, payload: bytes, isBinary: Optional[bool] = None) -> List[IMessage]:
"""
Implements :func:`autobahn.wamp.interfaces.ISerializer.unserialize`
"""
if isBinary is not None:
if isBinary != self._serializer.BINARY:
raise ProtocolError(
"invalid serialization of WAMP message (binary {0}, but expected {1})".format(isBinary,
self._serializer.BINARY))
try:
raw_msgs = self._serializer.unserialize(payload)
except Exception as e:
raise ProtocolError("invalid serialization of WAMP message: {0} {1}".format(type(e).__name__, e))
if self._serializer.NAME == 'flatbuffers':
msgs = raw_msgs
else:
msgs = []
for raw_msg in raw_msgs:
if type(raw_msg) != list:
raise ProtocolError("invalid type {0} for WAMP message".format(type(raw_msg)))
if len(raw_msg) == 0:
raise ProtocolError("missing message type in WAMP message")
message_type = raw_msg[0]
if type(message_type) != int:
raise ProtocolError("invalid type {0} for WAMP message type".format(type(message_type)))
Klass = self.MESSAGE_TYPE_MAP.get(message_type)
if Klass is None:
raise ProtocolError("invalid WAMP message type {0}".format(message_type))
# this might again raise `ProtocolError` ..
msg = Klass.parse(raw_msg)
msgs.append(msg)
# maintain statistics for unserialized WAMP message data
self._unserialized_bytes += len(payload)
self._unserialized_messages += len(msgs)
self._unserialized_rated_messages += int(math.ceil(float(len(payload)) / self.RATED_MESSAGE_SIZE))
# maybe auto-reset and trigger user callback ..
if self._autoreset_callback and ((self._autoreset_duration and (time_ns() - self._stats_reset) >= self._autoreset_duration) or (self._autoreset_rated_messages and self.stats_rated_messages() >= self._autoreset_rated_messages)):
stats = self.stats(reset=True)
self._autoreset_callback(stats)
return msgs
# JSON serialization is always supported
_USE_UJSON = 'AUTOBAHN_USE_UJSON' in os.environ
if _USE_UJSON:
try:
import ujson
_USE_UJSON = True
except ImportError:
import json
_USE_UJSON = False
else:
import json
if _USE_UJSON:
# ujson doesn't support plugging into the JSON string parsing machinery ..
print('WARNING: Autobahn is using ujson accelerated JSON module - will run faster,\nbut only on CPython and will loose ability to transport binary payload transparently!')
_loads = ujson.loads
_dumps = ujson.dumps
_json = ujson
else:
# print('Notice: Autobahn is using json built-in standard library module for JSON serialization')
import base64
class _WAMPJsonEncoder(json.JSONEncoder):
def __init__(self, *args, **kwargs):
if 'use_binary_hex_encoding' in kwargs:
self._use_binary_hex_encoding = kwargs['use_binary_hex_encoding']
del kwargs['use_binary_hex_encoding']
else:
self._use_binary_hex_encoding = False
json.JSONEncoder.__init__(self, *args, **kwargs)
def default(self, obj):
if isinstance(obj, bytes):
if self._use_binary_hex_encoding:
return '0x' + b2a_hex(obj).decode('ascii')
else:
return '\x00' + base64.b64encode(obj).decode('ascii')
elif isinstance(obj, decimal.Decimal):
return str(obj)
else:
return json.JSONEncoder.default(self, obj)
#
# the following is a hack. see http://bugs.python.org/issue29992
#
from json import scanner
from json.decoder import scanstring
_DEC_MATCH = re.compile(r'^[\+\-E\.0-9]+$')
class _WAMPJsonDecoder(json.JSONDecoder):
def __init__(self, *args, **kwargs):
if 'use_binary_hex_encoding' in kwargs:
self._use_binary_hex_encoding = kwargs['use_binary_hex_encoding']
del kwargs['use_binary_hex_encoding']
else:
self._use_binary_hex_encoding = False
if 'use_decimal_from_str' in kwargs:
self._use_decimal_from_str = kwargs['use_decimal_from_str']
del kwargs['use_decimal_from_str']
else:
self._use_decimal_from_str = False
if 'use_decimal_from_float' in kwargs:
self._use_decimal_from_float = kwargs['use_decimal_from_float']
del kwargs['use_decimal_from_float']
if self._use_decimal_from_float:
kwargs['parse_float'] = decimal.Decimal
else:
self._use_decimal_from_str = False
json.JSONDecoder.__init__(self, *args, **kwargs)
def _parse_string(*args, **kwargs):
s, idx = scanstring(*args, **kwargs)
if self._use_binary_hex_encoding:
if s and s[0:2] == '0x':
s = a2b_hex(s[2:])
return s, idx
else:
if s and s[0] == '\x00':
s = base64.b64decode(s[1:])
return s, idx
if self._use_decimal_from_str and _DEC_MATCH.match(s):
try:
s = decimal.Decimal(s)
return s, idx
except decimal.InvalidOperation:
pass
return s, idx
self.parse_string = _parse_string
# we need to recreate the internal scan function ..
self.scan_once = scanner.py_make_scanner(self)
# .. and we have to explicitly use the Py version,
# not the C version, as the latter won't work
# self.scan_once = scanner.make_scanner(self)
def _loads(s, use_binary_hex_encoding=False, use_decimal_from_str=False, use_decimal_from_float=False):
return json.loads(s,
use_binary_hex_encoding=use_binary_hex_encoding,
use_decimal_from_str=use_decimal_from_str,
use_decimal_from_float=use_decimal_from_float,
cls=_WAMPJsonDecoder)
def _dumps(obj, use_binary_hex_encoding=False):
return json.dumps(obj,
separators=(',', ':'),
ensure_ascii=False,
sort_keys=False,
use_binary_hex_encoding=use_binary_hex_encoding,
cls=_WAMPJsonEncoder)
_json = json
class JsonObjectSerializer(object):
JSON_MODULE = _json
"""
The JSON module used (now only stdlib).
"""
NAME = 'json'
BINARY = False
def __init__(self, batched=False, use_binary_hex_encoding=False, use_decimal_from_str=False, use_decimal_from_float=False):
"""
:param batched: Flag that controls whether serializer operates in batched mode.
:type batched: bool
:param use_binary_hex_encoding: Flag to enable HEX encoding prefixed with ``"0x"``,
otherwise prefix binaries with a ``\0`` byte.
:type use_binary_hex_encoding: bool
:param use_decimal_from_str: Flag to automatically encode Decimals as strings, and
to try to parse strings as Decimals.
:type use_decimal_from_str: bool
"""
self._batched = batched
self._use_binary_hex_encoding = use_binary_hex_encoding
self._use_decimal_from_str = use_decimal_from_str
self._use_decimal_from_float = use_decimal_from_float
def serialize(self, obj):
"""
Implements :func:`autobahn.wamp.interfaces.IObjectSerializer.serialize`
"""
s = _dumps(obj, use_binary_hex_encoding=self._use_binary_hex_encoding)
if isinstance(s, str):
s = s.encode('utf8')
if self._batched:
return s + b'\30'
else:
return s
def unserialize(self, payload):
"""
Implements :func:`autobahn.wamp.interfaces.IObjectSerializer.unserialize`
"""
if self._batched:
chunks = payload.split(b'\30')[:-1]
else:
chunks = [payload]
if len(chunks) == 0:
raise Exception("batch format error")
return [_loads(data.decode('utf8'),
use_binary_hex_encoding=self._use_binary_hex_encoding,
use_decimal_from_str=self._use_decimal_from_str,
use_decimal_from_float=self._use_decimal_from_float) for data in chunks]
IObjectSerializer.register(JsonObjectSerializer)
SERID_TO_OBJSER[JsonObjectSerializer.NAME] = JsonObjectSerializer
class JsonSerializer(Serializer):
SERIALIZER_ID = "json"
"""
ID used as part of the WebSocket subprotocol name to identify the
serializer with WAMP-over-WebSocket.
"""
RAWSOCKET_SERIALIZER_ID = 1
"""
ID used in lower four bits of second octet in RawSocket opening
handshake identify the serializer with WAMP-over-RawSocket.
"""
MIME_TYPE = "application/json"
"""
MIME type announced in HTTP request/response headers when running
WAMP-over-Longpoll HTTP fallback.
"""
def __init__(self, batched=False, use_binary_hex_encoding=False, use_decimal_from_str=False):
"""
Ctor.
:param batched: Flag to control whether to put this serialized into batched mode.
:type batched: bool
"""
Serializer.__init__(self, JsonObjectSerializer(batched=batched,
use_binary_hex_encoding=use_binary_hex_encoding,
use_decimal_from_str=use_decimal_from_str))
if batched:
self.SERIALIZER_ID = "json.batched"
ISerializer.register(JsonSerializer)
SERID_TO_SER[JsonSerializer.SERIALIZER_ID] = JsonSerializer
_HAS_MSGPACK = False
_USE_UMSGPACK = platform.python_implementation() == 'PyPy' or 'AUTOBAHN_USE_UMSGPACK' in os.environ
if not _USE_UMSGPACK:
try:
# on CPython, use an impl. with native extension:
# https://pypi.org/project/msgpack/
# https://github.com/msgpack/msgpack-python
import msgpack
except ImportError:
pass
else:
_HAS_MSGPACK = True
_packb = lambda obj: msgpack.packb(obj, use_bin_type=True) # noqa
_unpackb = lambda data: msgpack.unpackb(data, raw=False) # noqa
_msgpack = msgpack
# print('Notice: Autobahn is using msgpack library (with native extension, best on CPython) for MessagePack serialization')
else:
try:
# on PyPy in particular, use a pure python impl.:
# https://pypi.python.org/pypi/u-msgpack-python
# https://github.com/vsergeev/u-msgpack-python
import umsgpack
except ImportError:
pass
else:
_HAS_MSGPACK = True
_packb = umsgpack.packb
_unpackb = umsgpack.unpackb
_msgpack = umsgpack
# print('Notice: Autobahn is using umsgpack library (pure Python, best on PyPy) for MessagePack serialization')
if _HAS_MSGPACK:
class MsgPackObjectSerializer(object):
NAME = 'msgpack'
MSGPACK_MODULE = _msgpack
BINARY = True
"""
Flag that indicates whether this serializer needs a binary clean transport.
"""
def __init__(self, batched=False):
"""
:param batched: Flag that controls whether serializer operates in batched mode.
:type batched: bool
"""
self._batched = batched
def serialize(self, obj):
"""
Implements :func:`autobahn.wamp.interfaces.IObjectSerializer.serialize`
"""
data = _packb(obj)
if self._batched:
return struct.pack("!L", len(data)) + data
else:
return data
def unserialize(self, payload):
"""
Implements :func:`autobahn.wamp.interfaces.IObjectSerializer.unserialize`
"""
if self._batched:
msgs = []
N = len(payload)
i = 0
while i < N:
# read message length prefix
if i + 4 > N:
raise Exception("batch format error [1]")
l = struct.unpack("!L", payload[i:i + 4])[0]
# read message data
if i + 4 + l > N:
raise Exception("batch format error [2]")
data = payload[i + 4:i + 4 + l]
# append parsed raw message
msgs.append(_unpackb(data))
# advance until everything consumed
i = i + 4 + l
if i != N:
raise Exception("batch format error [3]")
return msgs
else:
unpacked = _unpackb(payload)
return [unpacked]
IObjectSerializer.register(MsgPackObjectSerializer)
__all__.append('MsgPackObjectSerializer')
SERID_TO_OBJSER[MsgPackObjectSerializer.NAME] = MsgPackObjectSerializer
class MsgPackSerializer(Serializer):
SERIALIZER_ID = "msgpack"
"""
ID used as part of the WebSocket subprotocol name to identify the
serializer with WAMP-over-WebSocket.
"""
RAWSOCKET_SERIALIZER_ID = 2
"""
ID used in lower four bits of second octet in RawSocket opening
handshake identify the serializer with WAMP-over-RawSocket.
"""
MIME_TYPE = "application/x-msgpack"
"""
MIME type announced in HTTP request/response headers when running
WAMP-over-Longpoll HTTP fallback.
"""
def __init__(self, batched=False):
"""
Ctor.
:param batched: Flag to control whether to put this serialized into batched mode.
:type batched: bool
"""
Serializer.__init__(self, MsgPackObjectSerializer(batched=batched))
if batched:
self.SERIALIZER_ID = "msgpack.batched"
ISerializer.register(MsgPackSerializer)
SERID_TO_SER[MsgPackSerializer.SERIALIZER_ID] = MsgPackSerializer
__all__.append('MsgPackSerializer')
_HAS_CBOR = False
try:
import cbor2
except ImportError:
pass
else:
_HAS_CBOR = True
_cbor_loads = cbor2.loads
_cbor_dumps = cbor2.dumps
_cbor = cbor2
if _HAS_CBOR:
class CBORObjectSerializer(object):
"""
CBOR serializer based on `cbor2 <https://github.com/agronholm/cbor2>`_.
This CBOR serializer has proper support for arbitrary precision decimals,
via tagged decimal fraction encoding, as described in
`RFC7049 section 2.4.3 <https://datatracker.ietf.org/doc/html/rfc7049#section-2.4.3>`_.
"""
NAME = 'cbor'
CBOR_MODULE = _cbor
BINARY = True
"""
Flag that indicates whether this serializer needs a binary clean transport.
"""
def __init__(self, batched=False):
"""
:param batched: Flag that controls whether serializer operates in batched mode.
:type batched: bool
"""
self._batched = batched
def serialize(self, obj):
"""
Implements :func:`autobahn.wamp.interfaces.IObjectSerializer.serialize`
"""
data = _cbor_dumps(obj)
if self._batched:
return struct.pack("!L", len(data)) + data
else:
return data
def unserialize(self, payload):
"""
Implements :func:`autobahn.wamp.interfaces.IObjectSerializer.unserialize`
"""
if self._batched:
msgs = []
N = len(payload)
i = 0
while i < N:
# read message length prefix
if i + 4 > N:
raise Exception("batch format error [1]")
l = struct.unpack("!L", payload[i:i + 4])[0]
# read message data
if i + 4 + l > N:
raise Exception("batch format error [2]")
data = payload[i + 4:i + 4 + l]
# append parsed raw message
msgs.append(_cbor_loads(data))
# advance until everything consumed
i = i + 4 + l
if i != N:
raise Exception("batch format error [3]")
return msgs
else:
unpacked = _cbor_loads(payload)
return [unpacked]
IObjectSerializer.register(CBORObjectSerializer)
SERID_TO_OBJSER[CBORObjectSerializer.NAME] = CBORObjectSerializer
__all__.append('CBORObjectSerializer')
class CBORSerializer(Serializer):
SERIALIZER_ID = "cbor"
"""
ID used as part of the WebSocket subprotocol name to identify the
serializer with WAMP-over-WebSocket.
"""
RAWSOCKET_SERIALIZER_ID = 3
"""
ID used in lower four bits of second octet in RawSocket opening
handshake identify the serializer with WAMP-over-RawSocket.
"""
MIME_TYPE = "application/cbor"
"""
MIME type announced in HTTP request/response headers when running
WAMP-over-Longpoll HTTP fallback.
"""
def __init__(self, batched=False):
"""
Ctor.
:param batched: Flag to control whether to put this serialized into batched mode.
:type batched: bool
"""
Serializer.__init__(self, CBORObjectSerializer(batched=batched))
if batched:
self.SERIALIZER_ID = "cbor.batched"
ISerializer.register(CBORSerializer)
SERID_TO_SER[CBORSerializer.SERIALIZER_ID] = CBORSerializer
__all__.append('CBORSerializer')
# UBJSON serialization depends on the `py-ubjson` package being available
# https://pypi.python.org/pypi/py-ubjson
# https://github.com/Iotic-Labs/py-ubjson
try:
import ubjson
except ImportError:
pass
else:
# print('Notice: Autobahn is using ubjson module for UBJSON serialization')
class UBJSONObjectSerializer(object):
NAME = 'ubjson'
UBJSON_MODULE = ubjson
BINARY = True
"""
Flag that indicates whether this serializer needs a binary clean transport.
"""
def __init__(self, batched=False):
"""
:param batched: Flag that controls whether serializer operates in batched mode.
:type batched: bool
"""
self._batched = batched
def serialize(self, obj):
"""
Implements :func:`autobahn.wamp.interfaces.IObjectSerializer.serialize`
"""
data = ubjson.dumpb(obj)
if self._batched:
return struct.pack("!L", len(data)) + data
else:
return data
def unserialize(self, payload):
"""
Implements :func:`autobahn.wamp.interfaces.IObjectSerializer.unserialize`
"""
if self._batched:
msgs = []
N = len(payload)
i = 0
while i < N:
# read message length prefix
if i + 4 > N:
raise Exception("batch format error [1]")
l = struct.unpack("!L", payload[i:i + 4])[0]
# read message data
if i + 4 + l > N:
raise Exception("batch format error [2]")
data = payload[i + 4:i + 4 + l]
# append parsed raw message
msgs.append(ubjson.loadb(data))
# advance until everything consumed
i = i + 4 + l
if i != N:
raise Exception("batch format error [3]")
return msgs
else:
unpacked = ubjson.loadb(payload)
return [unpacked]
IObjectSerializer.register(UBJSONObjectSerializer)
SERID_TO_OBJSER[UBJSONObjectSerializer.NAME] = UBJSONObjectSerializer
__all__.append('UBJSONObjectSerializer')
class UBJSONSerializer(Serializer):
SERIALIZER_ID = "ubjson"
"""
ID used as part of the WebSocket subprotocol name to identify the
serializer with WAMP-over-WebSocket.
"""
RAWSOCKET_SERIALIZER_ID = 4
"""
ID used in lower four bits of second octet in RawSocket opening
handshake identify the serializer with WAMP-over-RawSocket.
"""
MIME_TYPE = "application/ubjson"
"""
MIME type announced in HTTP request/response headers when running
WAMP-over-Longpoll HTTP fallback.
"""
def __init__(self, batched=False):
"""
Ctor.
:param batched: Flag to control whether to put this serialized into batched mode.
:type batched: bool
"""
Serializer.__init__(self, UBJSONObjectSerializer(batched=batched))
if batched:
self.SERIALIZER_ID = "ubjson.batched"
ISerializer.register(UBJSONSerializer)
SERID_TO_SER[UBJSONSerializer.SERIALIZER_ID] = UBJSONSerializer
__all__.append('UBJSONSerializer')
_HAS_FLATBUFFERS = False
try:
import flatbuffers # noqa
from autobahn.wamp import message_fbs
except ImportError:
pass
else:
_HAS_FLATBUFFERS = True
if _HAS_FLATBUFFERS:
class FlatBuffersObjectSerializer(object):
NAME = 'flatbuffers'
FLATBUFFERS_MODULE = flatbuffers
BINARY = True
"""
Flag that indicates whether this serializer needs a binary clean transport.
"""
MESSAGE_TYPE_MAP = {
message_fbs.MessageType.EVENT: (message_fbs.Event, message.Event),
message_fbs.MessageType.PUBLISH: (message_fbs.Publish, message.Publish),
}
def __init__(self, batched=False):
"""
:param batched: Flag that controls whether serializer operates in batched mode.
:type batched: bool
"""
assert not batched, 'WAMP-FlatBuffers serialization does not support message batching currently'
self._batched = batched
def serialize(self, obj):
"""
Implements :func:`autobahn.wamp.interfaces.IObjectSerializer.serialize`
"""
raise NotImplementedError()
def unserialize(self, payload):
"""
Implements :func:`autobahn.wamp.interfaces.IObjectSerializer.unserialize`
"""
union_msg = message_fbs.Message.Message.GetRootAsMessage(payload, 0)
msg_type = union_msg.MsgType()
if msg_type in self.MESSAGE_TYPE_MAP:
fbs_klass, wamp_klass = self.MESSAGE_TYPE_MAP[msg_type]
fbs_msg = fbs_klass()
_tab = union_msg.Msg()
fbs_msg.Init(_tab.Bytes, _tab.Pos)
msg = wamp_klass(from_fbs=fbs_msg)
return [msg]
else:
raise NotImplementedError('message type {} not yet implemented for WAMP-FlatBuffers'.format(msg_type))
IObjectSerializer.register(FlatBuffersObjectSerializer)
__all__.append('FlatBuffersObjectSerializer')
SERID_TO_OBJSER[FlatBuffersObjectSerializer.NAME] = FlatBuffersObjectSerializer
class FlatBuffersSerializer(Serializer):
SERIALIZER_ID = "flatbuffers"
"""
ID used as part of the WebSocket subprotocol name to identify the
serializer with WAMP-over-WebSocket.
"""
RAWSOCKET_SERIALIZER_ID = 5
"""
ID used in lower four bits of second octet in RawSocket opening
handshake identify the serializer with WAMP-over-RawSocket.
"""
MIME_TYPE = "application/x-flatbuffers"
"""
MIME type announced in HTTP request/response headers when running
WAMP-over-Longpoll HTTP fallback.
"""
def __init__(self, batched=False):
"""
:param batched: Flag to control whether to put this serialized into batched mode.
:type batched: bool
"""
Serializer.__init__(self, FlatBuffersObjectSerializer(batched=batched))
if batched:
self.SERIALIZER_ID = "flatbuffers.batched"
ISerializer.register(FlatBuffersSerializer)
SERID_TO_SER[FlatBuffersSerializer.SERIALIZER_ID] = FlatBuffersSerializer
__all__.append('FlatBuffersSerializer')
def create_transport_serializer(serializer_id):
batched = False
if '.' in serializer_id:
l = serializer_id.split('.')
serializer_id = l[0]
if len(l) > 1 and l[1] == 'batched':
batched = True
if serializer_id in SERID_TO_SER:
return SERID_TO_SER[serializer_id](batched=batched)
else:
raise RuntimeError('could not create serializer for "{}" (available: {})'.format(serializer_id, sorted(SERID_TO_SER.keys())))
def create_transport_serializers(transport):
"""
Create a list of serializers to use with a WAMP protocol factory.
"""
serializers = []
for serializer_id in transport.serializers:
serializers.append(create_transport_serializer(serializer_id))
return serializers
| mit | acf26b2c42565b242b08f1e29c6a539c | 33.664129 | 235 | 0.582208 | 4.335079 | false | false | false | false |
crossbario/autobahn-python | autobahn/websocket/compress.py | 3 | 4566 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from autobahn.websocket.compress_base import \
PerMessageCompressOffer, \
PerMessageCompressOfferAccept, \
PerMessageCompressResponse, \
PerMessageCompressResponseAccept, \
PerMessageCompress
from autobahn.websocket.compress_deflate import \
PerMessageDeflateMixin, \
PerMessageDeflateOffer, \
PerMessageDeflateOfferAccept, \
PerMessageDeflateResponse, \
PerMessageDeflateResponseAccept, \
PerMessageDeflate
# this must be a list (not tuple), since we dynamically
# extend it ..
__all__ = [
'PerMessageCompressOffer',
'PerMessageCompressOfferAccept',
'PerMessageCompressResponse',
'PerMessageCompressResponseAccept',
'PerMessageCompress',
'PerMessageDeflateOffer',
'PerMessageDeflateOfferAccept',
'PerMessageDeflateResponse',
'PerMessageDeflateResponseAccept',
'PerMessageDeflate',
'PERMESSAGE_COMPRESSION_EXTENSION'
]
# map of available compression extensions
PERMESSAGE_COMPRESSION_EXTENSION = {
# class for 'permessage-deflate' is always available
PerMessageDeflateMixin.EXTENSION_NAME: {
'Offer': PerMessageDeflateOffer,
'OfferAccept': PerMessageDeflateOfferAccept,
'Response': PerMessageDeflateResponse,
'ResponseAccept': PerMessageDeflateResponseAccept,
'PMCE': PerMessageDeflate
}
}
# include 'permessage-bzip2' classes if bzip2 is available
try:
import bz2
except ImportError:
bz2 = None
else:
from autobahn.websocket.compress_bzip2 import \
PerMessageBzip2Mixin, \
PerMessageBzip2Offer, \
PerMessageBzip2OfferAccept, \
PerMessageBzip2Response, \
PerMessageBzip2ResponseAccept, \
PerMessageBzip2
PMCE = {
'Offer': PerMessageBzip2Offer,
'OfferAccept': PerMessageBzip2OfferAccept,
'Response': PerMessageBzip2Response,
'ResponseAccept': PerMessageBzip2ResponseAccept,
'PMCE': PerMessageBzip2
}
PERMESSAGE_COMPRESSION_EXTENSION[PerMessageBzip2Mixin.EXTENSION_NAME] = PMCE
__all__.extend(['PerMessageBzip2Offer',
'PerMessageBzip2OfferAccept',
'PerMessageBzip2Response',
'PerMessageBzip2ResponseAccept',
'PerMessageBzip2'])
# include 'permessage-snappy' classes if Snappy is available
try:
# noinspection PyPackageRequirements
import snappy
except ImportError:
snappy = None
else:
from autobahn.websocket.compress_snappy import \
PerMessageSnappyMixin, \
PerMessageSnappyOffer, \
PerMessageSnappyOfferAccept, \
PerMessageSnappyResponse, \
PerMessageSnappyResponseAccept, \
PerMessageSnappy
PMCE = {
'Offer': PerMessageSnappyOffer,
'OfferAccept': PerMessageSnappyOfferAccept,
'Response': PerMessageSnappyResponse,
'ResponseAccept': PerMessageSnappyResponseAccept,
'PMCE': PerMessageSnappy
}
PERMESSAGE_COMPRESSION_EXTENSION[PerMessageSnappyMixin.EXTENSION_NAME] = PMCE
__all__.extend(['PerMessageSnappyOffer',
'PerMessageSnappyOfferAccept',
'PerMessageSnappyResponse',
'PerMessageSnappyResponseAccept',
'PerMessageSnappy'])
| mit | f615ec600dc15ce4d25d1f0753e05e54 | 34.395349 | 81 | 0.689225 | 4.287324 | false | false | false | false |
crossbario/autobahn-python | examples/twisted/wamp/pubsub/basic/backend.py | 3 | 2100 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from os import environ
from twisted.internet.defer import inlineCallbacks
from autobahn.twisted.util import sleep
from autobahn.twisted.wamp import ApplicationSession, ApplicationRunner
class Component(ApplicationSession):
"""
An application component that publishes an event every second.
"""
@inlineCallbacks
def onJoin(self, details):
print("session attached")
counter = 0
while True:
print('backend publishing com.myapp.topic1', counter)
self.publish('com.myapp.topic1', counter)
counter += 1
yield sleep(1)
if __name__ == '__main__':
url = environ.get("AUTOBAHN_DEMO_ROUTER", "ws://127.0.0.1:8080/ws")
realm = "crossbardemo"
runner = ApplicationRunner(url, realm)
runner.run(Component)
| mit | 4c6eaad83517d66793ec05cc977345ca | 37.181818 | 79 | 0.672381 | 4.555315 | false | false | false | false |
crossbario/autobahn-python | autobahn/xbr/_eip712_authority_certificate.py | 2 | 19333 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import os.path
import pprint
from binascii import a2b_hex
from typing import Dict, Any, Optional, List
import web3
import cbor2
from py_eth_sig_utils.eip712 import encode_typed_data
from autobahn.wamp.message import _URI_PAT_REALM_NAME_ETH
from autobahn.xbr._secmod import EthereumKey
from ._eip712_base import sign, recover, is_chain_id, is_address, is_block_number, is_signature, is_eth_privkey
from ._eip712_certificate import EIP712Certificate
def create_eip712_authority_certificate(chainId: int,
verifyingContract: bytes,
validFrom: int,
issuer: bytes,
subject: bytes,
realm: bytes,
capabilities: int,
meta: str) -> dict:
"""
Authority certificate: long-lived, on-chain L2.
:param chainId:
:param verifyingContract:
:param validFrom:
:param issuer:
:param subject:
:param realm:
:param capabilities:
:param meta:
:return:
"""
assert is_chain_id(chainId)
assert is_address(verifyingContract)
assert is_block_number(validFrom)
assert is_address(issuer)
assert is_address(subject)
assert is_address(realm)
assert type(capabilities) == int and 0 <= capabilities <= 2 ** 53
assert meta is None or type(meta) == str
data = {
'types': {
'EIP712Domain': [
{
'name': 'name',
'type': 'string'
},
{
'name': 'version',
'type': 'string'
},
],
'EIP712AuthorityCertificate': [
{
'name': 'chainId',
'type': 'uint256'
},
{
'name': 'verifyingContract',
'type': 'address'
},
{
'name': 'validFrom',
'type': 'uint256'
},
{
'name': 'issuer',
'type': 'address'
},
{
'name': 'subject',
'type': 'address'
},
{
'name': 'realm',
'type': 'address'
},
{
'name': 'capabilities',
'type': 'uint64'
},
{
'name': 'meta',
'type': 'string'
}
]
},
'primaryType': 'EIP712AuthorityCertificate',
'domain': {
'name': 'WMP',
'version': '1',
},
'message': {
'chainId': chainId,
'verifyingContract': verifyingContract,
'validFrom': validFrom,
'issuer': issuer,
'subject': subject,
'realm': realm,
'capabilities': capabilities,
'meta': meta or '',
}
}
return data
def sign_eip712_authority_certificate(eth_privkey: bytes,
chainId: int,
verifyingContract: bytes,
validFrom: int,
issuer: bytes,
subject: bytes,
realm: bytes,
capabilities: int,
meta: str) -> bytes:
"""
Sign the given data using a EIP712 based signature with the provided private key.
:param eth_privkey:
:param chainId:
:param verifyingContract:
:param validFrom:
:param issuer:
:param subject:
:param realm:
:param capabilities:
:param meta:
:return:
"""
assert is_eth_privkey(eth_privkey)
data = create_eip712_authority_certificate(chainId, verifyingContract, validFrom, issuer,
subject, realm, capabilities, meta)
return sign(eth_privkey, data)
def recover_eip712_authority_certificate(chainId: int,
verifyingContract: bytes,
validFrom: int,
issuer: bytes,
subject: bytes,
realm: bytes,
capabilities: int,
meta: str,
signature: bytes) -> bytes:
"""
Recover the signer address the given EIP712 signature was signed with.
:param chainId:
:param verifyingContract:
:param validFrom:
:param issuer:
:param subject:
:param realm:
:param capabilities:
:param meta:
:param signature:
:return: The (computed) signer address the signature was signed with.
"""
assert is_signature(signature)
data = create_eip712_authority_certificate(chainId, verifyingContract, validFrom, issuer,
subject, realm, capabilities, meta)
return recover(data, signature)
class EIP712AuthorityCertificate(EIP712Certificate):
CAPABILITY_ROOT_CA = 1
CAPABILITY_INTERMEDIATE_CA = 2
CAPABILITY_PUBLIC_RELAY = 4
CAPABILITY_PRIVATE_RELAY = 8
CAPABILITY_PROVIDER = 16
CAPABILITY_CONSUMER = 32
__slots__ = (
# EIP712 attributes
'chainId',
'verifyingContract',
'validFrom',
'issuer',
'subject',
'realm',
'capabilities',
'meta',
# additional attributes
'signatures',
'hash',
)
def __init__(self, chainId: int, verifyingContract: bytes, validFrom: int, issuer: bytes, subject: bytes,
realm: bytes, capabilities: int, meta: str,
signatures: Optional[List[bytes]] = None):
super().__init__(chainId, verifyingContract, validFrom)
self.issuer = issuer
self.subject = subject
self.realm = realm
self.capabilities = capabilities
self.meta = meta
self.signatures = signatures
eip712 = create_eip712_authority_certificate(chainId,
verifyingContract,
validFrom,
issuer,
subject,
realm,
capabilities,
meta)
self.hash = encode_typed_data(eip712)
def __eq__(self, other: Any) -> bool:
if not isinstance(other, self.__class__):
return False
if not EIP712AuthorityCertificate.__eq__(self, other):
return False
if other.chainId != self.chainId:
return False
if other.verifyingContract != self.verifyingContract:
return False
if other.validFrom != self.validFrom:
return False
if other.issuer != self.issuer:
return False
if other.subject != self.subject:
return False
if other.realm != self.realm:
return False
if other.capabilities != self.capabilities:
return False
if other.meta != self.meta:
return False
if other.signatures != self.signatures:
return False
if other.hash != self.hash:
return False
return True
def __ne__(self, other: Any) -> bool:
return not self.__eq__(other)
def __str__(self) -> str:
return pprint.pformat(self.marshal())
def sign(self, key: EthereumKey, binary: bool = False) -> bytes:
eip712 = create_eip712_authority_certificate(self.chainId,
self.verifyingContract,
self.validFrom,
self.issuer,
self.subject,
self.realm,
self.capabilities,
self.meta)
return key.sign_typed_data(eip712, binary=binary)
def recover(self, signature: bytes) -> bytes:
return recover_eip712_authority_certificate(self.chainId,
self.verifyingContract,
self.validFrom,
self.issuer,
self.subject,
self.realm,
self.capabilities,
self.meta,
signature)
def marshal(self, binary: bool = False) -> Dict[str, Any]:
obj = create_eip712_authority_certificate(chainId=self.chainId,
verifyingContract=self.verifyingContract,
validFrom=self.validFrom,
issuer=self.issuer,
subject=self.subject,
realm=self.realm,
capabilities=self.capabilities,
meta=self.meta)
if not binary:
obj['message']['verifyingContract'] = web3.Web3.toChecksumAddress(obj['message']['verifyingContract']) if obj['message']['verifyingContract'] else None
obj['message']['issuer'] = web3.Web3.toChecksumAddress(obj['message']['issuer']) if obj['message']['issuer'] else None
obj['message']['subject'] = web3.Web3.toChecksumAddress(obj['message']['subject']) if obj['message']['subject'] else None
obj['message']['realm'] = web3.Web3.toChecksumAddress(obj['message']['realm']) if obj['message']['realm'] else None
return obj
@staticmethod
def parse(obj, binary: bool = False) -> 'EIP712AuthorityCertificate':
if type(obj) != dict:
raise ValueError('invalid type {} for object in EIP712AuthorityCertificate.parse'.format(type(obj)))
primaryType = obj.get('primaryType', None)
if primaryType != 'EIP712AuthorityCertificate':
raise ValueError('invalid primaryType "{}" - expected "EIP712AuthorityCertificate"'.format(primaryType))
# FIXME: check EIP712 types, domain
data = obj.get('message', None)
if type(data) != dict:
raise ValueError('invalid type {} for EIP712AuthorityCertificate'.format(type(data)))
for k in data:
if k not in ['type', 'chainId', 'verifyingContract', 'validFrom', 'issuer', 'subject',
'realm', 'capabilities', 'meta']:
raise ValueError('invalid attribute "{}" in EIP712AuthorityCertificate'.format(k))
_type = data.get('type', None)
if _type and _type != 'EIP712AuthorityCertificate':
raise ValueError('unexpected type "{}" in EIP712AuthorityCertificate'.format(_type))
chainId = data.get('chainId', None)
if chainId is None:
raise ValueError('missing chainId in EIP712AuthorityCertificate')
if type(chainId) != int:
raise ValueError('invalid type {} for chainId in EIP712AuthorityCertificate'.format(type(chainId)))
verifyingContract = data.get('verifyingContract', None)
if verifyingContract is None:
raise ValueError('missing verifyingContract in EIP712AuthorityCertificate')
if binary:
if type(verifyingContract) != bytes:
raise ValueError(
'invalid type {} for verifyingContract in EIP712AuthorityCertificate'.format(type(verifyingContract)))
if len(verifyingContract) != 20:
raise ValueError('invalid value length {} of verifyingContract'.format(len(verifyingContract)))
else:
if type(verifyingContract) != str:
raise ValueError(
'invalid type {} for verifyingContract in EIP712AuthorityCertificate'.format(type(verifyingContract)))
if not _URI_PAT_REALM_NAME_ETH.match(verifyingContract):
raise ValueError(
'invalid value "{}" for verifyingContract in EIP712AuthorityCertificate'.format(verifyingContract))
verifyingContract = a2b_hex(verifyingContract[2:])
validFrom = data.get('validFrom', None)
if validFrom is None:
raise ValueError('missing validFrom in EIP712AuthorityCertificate')
if type(validFrom) != int:
raise ValueError('invalid type {} for validFrom in EIP712AuthorityCertificate'.format(type(validFrom)))
issuer = data.get('issuer', None)
if issuer is None:
raise ValueError('missing issuer in EIP712AuthorityCertificate')
if binary:
if type(issuer) != bytes:
raise ValueError(
'invalid type {} for issuer in EIP712AuthorityCertificate'.format(type(issuer)))
if len(issuer) != 20:
raise ValueError('invalid value length {} of issuer'.format(len(issuer)))
else:
if type(issuer) != str:
raise ValueError('invalid type {} for issuer in EIP712AuthorityCertificate'.format(type(issuer)))
if not _URI_PAT_REALM_NAME_ETH.match(issuer):
raise ValueError('invalid value "{}" for issuer in EIP712AuthorityCertificate'.format(issuer))
issuer = a2b_hex(issuer[2:])
subject = data.get('subject', None)
if subject is None:
raise ValueError('missing subject in EIP712AuthorityCertificate')
if binary:
if type(subject) != bytes:
raise ValueError(
'invalid type {} for subject in EIP712AuthorityCertificate'.format(type(subject)))
if len(subject) != 20:
raise ValueError('invalid value length {} of verifyingContract'.format(len(subject)))
else:
if type(subject) != str:
raise ValueError('invalid type {} for subject in EIP712AuthorityCertificate'.format(type(subject)))
if not _URI_PAT_REALM_NAME_ETH.match(subject):
raise ValueError('invalid value "{}" for subject in EIP712AuthorityCertificate'.format(subject))
subject = a2b_hex(subject[2:])
realm = data.get('realm', None)
if realm is None:
raise ValueError('missing realm in EIP712AuthorityCertificate')
if binary:
if type(realm) != bytes:
raise ValueError(
'invalid type {} for realm in EIP712AuthorityCertificate'.format(type(realm)))
if len(realm) != 20:
raise ValueError('invalid value length {} of realm'.format(len(realm)))
else:
if type(realm) != str:
raise ValueError('invalid type {} for realm in EIP712AuthorityCertificate'.format(type(realm)))
if not _URI_PAT_REALM_NAME_ETH.match(realm):
raise ValueError('invalid value "{}" for realm in EIP712AuthorityCertificate'.format(realm))
realm = a2b_hex(realm[2:])
capabilities = data.get('capabilities', None)
if capabilities is None:
raise ValueError('missing capabilities in EIP712AuthorityCertificate')
if type(capabilities) != int:
raise ValueError('invalid type {} for capabilities in EIP712AuthorityCertificate'.format(type(capabilities)))
meta = data.get('meta', None)
if meta is None:
raise ValueError('missing meta in EIP712AuthorityCertificate')
if type(meta) != str:
raise ValueError('invalid type {} for meta in EIP712AuthorityCertificate'.format(type(meta)))
obj = EIP712AuthorityCertificate(chainId=chainId, verifyingContract=verifyingContract, validFrom=validFrom,
issuer=issuer, subject=subject, realm=realm, capabilities=capabilities, meta=meta)
return obj
def save(self, filename: str) -> int:
"""
Save certificate to file. File format (serialized as CBOR):
[cert_hash: bytes, cert_eip712: Dict[str, Any], cert_signatures: List[bytes]]
:param filename:
:return:
"""
cert_obj = [self.hash, self.marshal(binary=True), self.signatures or []]
with open(filename, 'wb') as f:
data = cbor2.dumps(cert_obj)
f.write(data)
return len(data)
@staticmethod
def load(filename) -> 'EIP712AuthorityCertificate':
"""
Load certificate from file.
:param filename:
:return:
"""
if not os.path.isfile(filename):
raise RuntimeError('cannot create EIP712AuthorityCertificate from filename "{}": not a file'.format(filename))
with open(filename, 'rb') as f:
cert_hash, cert_eip712, cert_signatures = cbor2.loads(f.read())
cert = EIP712AuthorityCertificate.parse(cert_eip712, binary=True)
assert cert_hash == cert.hash
cert.signatures = cert_signatures
return cert
| mit | 131994a363a909411a1b8717347b77dd | 40.84632 | 163 | 0.525216 | 4.857538 | false | false | false | false |
crossbario/autobahn-python | examples/twisted/wamp/work/newapi/test_newapi_pubreg_decorator.py | 3 | 2606 | import txaio
from twisted.internet.defer import inlineCallbacks
from twisted.internet.endpoints import TCP4ClientEndpoint
from twisted.internet.endpoints import SSL4ClientEndpoint
from twisted.internet.endpoints import UNIXClientEndpoint
from twisted.internet.ssl import optionsForClientTLS, trustRootFromCertificates, Certificate, CertificateOptions
from twisted.internet import reactor
from autobahn.twisted.component import Component, run
from autobahn.twisted.util import sleep
from autobahn.twisted.wamp import Session
from autobahn.wamp.types import PublishOptions, SubscribeOptions
from autobahn.wamp.types import RegisterOptions, CallOptions
component = Component(
transports='ws://localhost:8080/ws',
realm='crossbardemo',
)
# @component.subscribe(
# "com.example.",
# options=SubscribeOptions(match="prefix"),
# )
# def catch_all(*args, **kw):
# print("catch_all({}, {})".format(args, kw))
@component.subscribe(
"com.example.",
options=SubscribeOptions(match="prefix", details_arg='details'),
)
def an_event(details=None):
print("topic '{}'".format(details.topic))
@component.register(
"com.example.progressive",
options=RegisterOptions(details_arg='details'),
)
@inlineCallbacks
def progressive_callee(details=None):
print("progressive", details)
if details.progress is None:
raise RuntimeError(
"You can only call be with an on_progress handler"
)
for x in ['here are', 'some progressive', 'results']:
details.progress(x)
yield sleep(.5)
return None
@component.on_join
def join(session, details):
print("Session {} joined: {}".format(details.session, details))
def pub(topic):
print("publishing '{}'".format(topic))
return session.publish(
topic,
options=PublishOptions(exclude_me=False),
)
def call_progress(topic):
print("calling '{}' progressively".format(topic))
def on_progress(some_data):
print("received: '{}'".format(some_data))
return session.call(
topic,
options=CallOptions(on_progress=on_progress)
)
reactor.callLater(1, pub, 'com.example.foo')
reactor.callLater(2, pub, 'com.non_matching')
reactor.callLater(3, pub, 'com.example.some.other.uri')
reactor.callLater(4, call_progress, 'com.example.progressive')
reactor.callLater(7, session.leave)
@component.on_leave
def leave(session, details):
print("Session leaving: {}: {}".format(details.reason, details.message))
if __name__ == '__main__':
run(component)
| mit | 6374a0d3ad5ce6ef8934d99044a258b6 | 29.302326 | 112 | 0.692249 | 3.798834 | false | false | false | false |
crossbario/autobahn-python | autobahn/twisted/test/test_tx_websocket_agent.py | 3 | 2078 | from twisted.trial import unittest
try:
from autobahn.twisted.testing import create_memory_agent, MemoryReactorClockResolver, create_pumper
HAVE_TESTING = True
except ImportError:
HAVE_TESTING = False
from twisted.internet.defer import inlineCallbacks
from autobahn.twisted.websocket import WebSocketServerProtocol
class TestAgent(unittest.TestCase):
skip = not HAVE_TESTING
def setUp(self):
self.pumper = create_pumper()
self.reactor = MemoryReactorClockResolver()
return self.pumper.start()
def tearDown(self):
return self.pumper.stop()
@inlineCallbacks
def test_echo_server(self):
class EchoServer(WebSocketServerProtocol):
def onMessage(self, msg, is_binary):
self.sendMessage(msg)
agent = create_memory_agent(self.reactor, self.pumper, EchoServer)
proto = yield agent.open("ws://localhost:1234/ws", dict())
messages = []
def got(msg, is_binary):
messages.append(msg)
proto.on("message", got)
proto.sendMessage(b"hello")
if True:
# clean close
proto.sendClose()
else:
# unclean close
proto.transport.loseConnection()
yield proto.is_closed
self.assertEqual([b"hello"], messages)
@inlineCallbacks
def test_secure_echo_server(self):
class EchoServer(WebSocketServerProtocol):
def onMessage(self, msg, is_binary):
self.sendMessage(msg)
agent = create_memory_agent(self.reactor, self.pumper, EchoServer)
proto = yield agent.open("wss://localhost:1234/ws", dict())
messages = []
def got(msg, is_binary):
messages.append(msg)
proto.on("message", got)
proto.sendMessage(b"hello")
if True:
# clean close
proto.sendClose()
else:
# unclean close
proto.transport.loseConnection()
yield proto.is_closed
self.assertEqual([b"hello"], messages)
| mit | eed9924967007317219a65207ac44c53 | 25.987013 | 103 | 0.615977 | 4.258197 | false | true | false | false |
crossbario/autobahn-python | examples/twisted/wamp/rpc/progress/frontend.py | 3 | 2239 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from os import environ
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks
from autobahn.wamp.types import CallOptions
from autobahn.twisted.wamp import ApplicationSession, ApplicationRunner
class Component(ApplicationSession):
"""
Application component that consumes progressive results.
"""
@inlineCallbacks
def onJoin(self, details):
print("session attached")
def on_progress(i):
print("Progress: {}".format(i))
res = yield self.call('com.myapp.longop', 3, options=CallOptions(on_progress=on_progress))
print("Final: {}".format(res))
self.leave()
def onDisconnect(self):
print("disconnected")
reactor.stop()
if __name__ == '__main__':
url = environ.get("AUTOBAHN_DEMO_ROUTER", "ws://127.0.0.1:8080/ws")
realm = "crossbardemo"
runner = ApplicationRunner(url, realm)
runner.run(Component)
| mit | 59ec9a48052fe38702b25803434a894c | 35.112903 | 98 | 0.675301 | 4.486974 | false | false | false | false |
crossbario/autobahn-python | examples/twisted/websocket/wrapping/server_endpoint.py | 3 | 2044 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from twisted.internet.protocol import Protocol
class HelloServerProtocol(Protocol):
def connectionMade(self):
print("connectionMade")
self.transport.write('how are you?')
def dataReceived(self, data):
print("dataReceived: {}".format(data))
if __name__ == '__main__':
import sys
from twisted.python import log
from twisted.internet import reactor
from twisted.internet.protocol import Factory
from twisted.internet.endpoints import serverFromString
log.startLogging(sys.stdout)
wrappedFactory = Factory.forProtocol(HelloServerProtocol)
endpoint = serverFromString(reactor, "autobahn:tcp\:9000:url=ws\://127.0.0.1\:9000")
endpoint.listen(wrappedFactory)
reactor.run()
| mit | 579fcbfacc22ff234aa351f65cf2998b | 35.5 | 88 | 0.689335 | 4.656036 | false | false | false | false |
crossbario/autobahn-python | examples/asyncio/wamp/pubsub/complex/backend.py | 3 | 2264 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import random
from os import environ
import asyncio
from autobahn.wamp.types import SubscribeOptions
from autobahn.asyncio.wamp import ApplicationSession, ApplicationRunner
class Component(ApplicationSession):
"""
An application component that publishes events with no payload and
with complex payloads every second.
"""
async def onJoin(self, details):
counter = 0
while True:
print("publish: com.myapp.heartbeat")
self.publish('com.myapp.heartbeat')
obj = {'counter': counter, 'foo': [1, 2, 3]}
print("publish: com.myapp.topic2")
self.publish('com.myapp.topic2', random.randint(0, 100), 23, c="Hello", d=obj)
counter += 1
await asyncio.sleep(1)
if __name__ == '__main__':
url = environ.get("AUTOBAHN_DEMO_ROUTER", "ws://127.0.0.1:8080/ws")
realm = "crossbardemo"
runner = ApplicationRunner(url, realm)
runner.run(Component)
| mit | fff3dd52ee796de3b048f62e66fbc15d | 37.372881 | 90 | 0.661219 | 4.387597 | false | false | false | false |
crossbario/autobahn-python | examples/twisted/wamp/meta/backend.py | 3 | 2700 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from os import environ
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks
from autobahn.twisted.wamp import Session, ApplicationRunner
from autobahn.twisted.util import sleep
class Component(Session):
"""
An application component calling the different backend procedures.
"""
@inlineCallbacks
def onJoin(self, details):
print("session attached {}".format(details))
counter = 0
while True:
counter += 1
pub = yield self.publish('event.foo.{}'.format(counter), "some data")
print("published {}".format(pub))
yield sleep(1)
sub = yield self.subscribe(lambda: None, 'event.sub_{}'.format(counter))
print("subscribed {sub.id}".format(sub=sub))
yield sleep(1)
reg = yield self.register(lambda: 42, 'event.rpc.test_{}'.format(counter))
print("registered {reg.id}".format(reg=reg))
yield sleep(1)
call = yield self.call('event.rpc.test_{}'.format(counter))
print("called {}".format(call))
yield sleep(1)
yield reg.unregister()
yield sub.unsubscribe()
if __name__ == '__main__':
runner = ApplicationRunner(
environ.get("AUTOBAHN_DEMO_ROUTER", "ws://127.0.0.1:8080/auth_ws"),
"crossbardemo",
)
runner.run(Component)
| mit | 038799f18c5afd28e146d3947c80225f | 36.5 | 86 | 0.639259 | 4.51505 | false | false | false | false |
crossbario/autobahn-python | examples/twisted/websocket/wrapping/client_endpoint.py | 3 | 2049 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from twisted.internet.protocol import Protocol
class HelloClientProtocol(Protocol):
def connectionMade(self):
print("connectionMade")
self.transport.write('hello')
def dataReceived(self, data):
print("dataReceived: {}".format(data))
if __name__ == '__main__':
import sys
from twisted.python import log
from twisted.internet import reactor
from twisted.internet.protocol import Factory
from twisted.internet.endpoints import clientFromString
log.startLogging(sys.stdout)
wrappedFactory = Factory.forProtocol(HelloClientProtocol)
endpoint = clientFromString(reactor, "autobahn:tcp\:127.0.0.1\:9000:url=ws\://localhost\:9000")
endpoint.connect(wrappedFactory)
reactor.run()
| mit | 78edeed73ea3a1c8d06888babcfc345e | 35.589286 | 99 | 0.690581 | 4.656818 | false | false | false | false |
crossbario/autobahn-python | examples/twisted/websocket/wxpython/server.py | 3 | 2828 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from autobahn.twisted.websocket import WebSocketServerFactory, \
WebSocketServerProtocol
class BroadcastServerProtocol(WebSocketServerProtocol):
def onOpen(self):
self.factory.register(self)
def onMessage(self, payload, isBinary):
self.factory.broadcast(payload, isBinary)
def onClose(self, wasClean, code, reason):
self.factory.unregister(self)
class BroadcastServerFactory(WebSocketServerFactory):
"""
Simple broadcast server broadcasting any message it receives to all
currently connected clients.
"""
protocol = BroadcastServerProtocol
def __init__(self, url):
WebSocketServerFactory.__init__(self, url)
self.clients = []
def register(self, client):
if client not in self.clients:
self.clients.append(client)
print("registered client {}".format(client.peer))
def unregister(self, client):
if client in self.clients:
self.clients.remove(client)
print("unregistered client {}".format(client.peer))
def broadcast(self, payload, isBinary):
for c in self.clients:
c.sendMessage(payload, isBinary)
print("broadcasted message to {} clients".format(len(self.clients)))
if __name__ == '__main__':
import sys
from twisted.python import log
from twisted.internet import reactor
log.startLogging(sys.stdout)
factory = BroadcastServerFactory("ws://127.0.0.1:9000")
reactor.listenTCP(9000, factory)
reactor.run()
| mit | 0f13867c619073ec2735f81f6532a7f3 | 33.072289 | 79 | 0.673621 | 4.576052 | false | false | false | false |
demisto/content | Packs/FeedReversingLabsRansomwareAndRelatedToolsApp/Integrations/ReversingLabsRansomwareAndRelatedToolsFeed/ReversingLabsRansomwareAndRelatedToolsFeed.py | 2 | 10316 | from CommonServerPython import *
VERSION = "v1.0.0"
USER_AGENT = f"ReversingLabs XSOAR Ransomware Feed {VERSION}"
MAX_HOURS_HISTORICAL = 4
ALLOWED_INDICATOR_TYPES = ("ipv4", "domain", "hash", "uri")
INDICATOR_TYPE_MAP = {
"ipv4": FeedIndicatorType.IP,
"domain": FeedIndicatorType.Domain,
"hash": FeedIndicatorType.File,
"uri": FeedIndicatorType.URL
}
class Client(BaseClient):
RANSOMWARE_INDICATORS_ENDPOINT = "/api/public/v1/ransomware/indicators?hours={hours}&" \
"indicatorTypes={indicator_types}&tagFormat=dict"
def __init__(self, base_url, auth, headers, verify):
super(Client, self).__init__(base_url=base_url, auth=auth, headers=headers, verify=verify)
def query_indicators(self, hours, indicator_types, timeout, retries):
endpoint = self.RANSOMWARE_INDICATORS_ENDPOINT.format(
hours=hours,
indicator_types=indicator_types,
)
try:
response = self._http_request(
method="GET",
url_suffix=endpoint,
timeout=timeout,
auth=self._auth,
retries=retries,
resp_type="json"
)
except Exception as e:
return_error(f"Request towards the defined endpoint {endpoint} did not succeed. {str(e)}")
return response
def confidence_to_score(confidence):
if confidence >= 70:
return 3
elif 69 >= confidence >= 2:
return 2
else:
return None
def calculate_hours_historical(hours_param):
last_run = get_feed_last_run()
if not last_run:
return hours_param
try:
time_delta = datetime.now() - datetime.strptime(last_run.get("last_run"), "%Y-%m-%dT%H:%M:%S.%f")
time_delta_hours_rounded = round((time_delta.seconds / 3600) + 1)
return time_delta_hours_rounded
except Exception:
return 2
def return_validated_params(params):
hours_param = params.get("hours")
try:
hours_param = int(hours_param)
except ValueError:
return_error("The First fetch time parameter must be integer.")
hours_historical = calculate_hours_historical(hours_param)
if hours_historical > MAX_HOURS_HISTORICAL:
hours_historical = MAX_HOURS_HISTORICAL
indicator_types_param = params.get("indicatorTypes")
for indicator_type in indicator_types_param:
if indicator_type not in ALLOWED_INDICATOR_TYPES:
return_error(f"Selected indicator type '{indicator_type}' is not supported.")
indicator_types_param = ",".join(indicator_types_param)
return hours_historical, indicator_types_param
def fetch_indicators_command(client, params):
hours_historical, indicator_types_param = return_validated_params(params)
new_last_run = datetime.now().isoformat()
response = client.query_indicators(
hours=hours_historical,
indicator_types=indicator_types_param,
timeout=(30, 300),
retries=3
)
tlp_color_param = params.get("tlp_color", None)
user_tag_list = []
user_tags_param = params.get("feedTags", None)
if user_tags_param:
user_tags_param = user_tags_param.split(",")
for user_tag in user_tags_param:
user_tag_list.append(user_tag.strip())
data = response.get("data", [])
indicators = []
for rl_indicator in data:
indicator = create_indicator_object(rl_indicator, user_tag_list, tlp_color_param)
indicators.append(indicator)
return indicators, new_last_run
def map_file_info(indicator, tag_list, file_info):
if file_info:
if isinstance(file_info, list):
tag_list.extend(file_info)
elif isinstance(file_info, dict):
file_name = file_info.get("fileName")
file_info_fields = assign_params(
size=file_info.get("fileSize"),
filetype=file_info.get("fileType"),
associatedfilenames=[file_name]
)
indicator["fields"].update(file_info_fields)
if file_name and isinstance(file_name, str):
file_name_parts = file_name.split(".")
if len(file_name_parts) > 1:
file_extension = file_name_parts[-1]
indicator["fields"]["fileextension"] = file_extension
def create_indicator_object(rl_indicator, user_tag_list, tlp_color_param):
last_update = rl_indicator.get("lastUpdate", None)
last_seen = datetime.strptime(last_update, "%Y-%m-%dT%H:%M:%SZ") if last_update else datetime.now()
last_seen = last_seen.strftime("%Y-%m-%dT%H:%M:%S+00:00")
indicator_type = rl_indicator.get("indicatorType").lower()
indicator = {
"value": rl_indicator.get("indicatorValue"),
"type": INDICATOR_TYPE_MAP.get(indicator_type),
"rawJSON": rl_indicator,
"fields": {
"lastseenbysource": last_seen
},
"score": confidence_to_score(rl_indicator.get("confidence", 0)),
}
indicator_tags = rl_indicator.get("indicatorTags")
if not indicator_tags:
return indicator
tag_list = []
mitre = indicator_tags.get("mitre")
if mitre:
tag_list.extend(mitre)
lifecycle_stage = indicator_tags.get("lifecycleStage")
if lifecycle_stage:
tag_list.append(lifecycle_stage)
source = indicator_tags.get("source")
if source:
tag_list.append(source)
additional_fields = assign_params(
malwaretypes=indicator_tags.get("malwareType"),
malwarefamily=indicator_tags.get("malwareFamilyName"),
trafficlightprotocol=tlp_color_param
)
indicator["fields"].update(additional_fields)
if indicator_type == "hash":
hashes = rl_indicator.get("hash")
if hashes:
hash_fields = assign_params(
sha1=hashes.get("sha1"),
sha256=hashes.get("sha256"),
md5=hashes.get("md5")
)
indicator["fields"].update(hash_fields)
map_file_info(indicator, tag_list, indicator_tags.get("fileInfo"))
elif indicator_type in ("ipv4", "uri", "domain"):
port = indicator_tags.get("port")
if port:
indicator["fields"]["port"] = port
protocol = indicator_tags.get("Protocol")
if protocol:
tag_list.extend(protocol)
if indicator_type == "ipv4":
asn = indicator_tags.get("asn")
if asn:
indicator["fields"]["asn"] = asn
tag_list.extend(user_tag_list)
if len(tag_list) > 0:
indicator["fields"]["tags"] = tag_list
return indicator
def get_indicators_command(client):
hours_arg = demisto.args().get("hours_back", 2)
try:
hours_arg = int(hours_arg)
except ValueError:
return_error("The hours_back argument must be a whole number.")
if hours_arg > MAX_HOURS_HISTORICAL:
hours_arg = MAX_HOURS_HISTORICAL
indicator_types_arg = demisto.args().get("indicator_types", "ipv4,domain,hash,uri").replace(" ", "")
for indicator_type in indicator_types_arg.split(","):
if indicator_type not in ALLOWED_INDICATOR_TYPES:
return_error(f"Selected indicator type '{indicator_type}' is not supported.")
limit = int(demisto.args().get("limit", 50))
response = client.query_indicators(
hours=hours_arg,
indicator_types=indicator_types_arg,
timeout=(30, 300),
retries=3
)
indicator_list = response.get("data", [])[:limit]
readable_output = format_readable_output(response, indicator_list)
command_result = CommandResults(
readable_output=readable_output,
raw_response=response,
outputs_prefix='ReversingLabs',
outputs={"indicators": indicator_list}
)
return command_result
def format_readable_output(response, indicator_list):
indicator_types = response.get("request").get("indicatorTypes", [])
hours = response.get("request").get("hours", "")
markdown = f"""## ReversingLabs Ransomware and Related Tools Feed\n **Indicator types**: {', '.join(indicator_types)}
**Hours**: {hours}
"""
indicator_table = tableToMarkdown(
name="Indicators",
t=indicator_list,
headers=["indicatorValue", "indicatorType", "daysValid", "confidence",
"rating", "indicatorTags", "lastUpdate", "deleted", "hash"],
headerTransform=pascalToSpace
)
markdown = f"{markdown}\n{indicator_table}"
return markdown
def test_module_command(client, params):
hours_param, indicator_types_param = return_validated_params(params)
client.query_indicators(
hours=hours_param,
indicator_types=indicator_types_param,
timeout=(30, 300),
retries=1
)
return "ok"
def main():
params = demisto.params()
host = params.get("host")
username = params.get("credentials", {}).get("identifier")
password = params.get("credentials", {}).get("password")
verify = params.get("insecure")
command = demisto.command()
demisto.debug(f"Command being called is {command}")
try:
client = Client(
base_url=host,
verify=verify,
auth=(username, password),
headers={"User-Agent": USER_AGENT}
)
if command == "test-module":
result = test_module_command(client, params)
return_results(result)
elif command == "reversinglabs-get-indicators":
command_result = get_indicators_command(client)
return_results(command_result)
elif command == "fetch-indicators":
indicators, new_last_run = fetch_indicators_command(client, params)
for indicator_batch in batch(indicators, 200):
demisto.createIndicators(indicator_batch)
set_feed_last_run({"last_run": new_last_run})
else:
raise NotImplementedError(f"Command {command} is not implemented.")
except Exception as e:
demisto.error(traceback.format_exc())
return_error(f"Failed to execute {command} command.\nError:\n{str(e)}")
if __name__ in ["__main__", "builtin", "builtins"]:
main()
| mit | aebc0d88e122296850ccdbc5263a6543 | 28.059155 | 121 | 0.61235 | 3.849254 | false | false | false | false |
demisto/content | Packs/OpenLDAP/Integrations/OpenLDAP/OpenLDAP.py | 2 | 32938 | import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
''' IMPORTS '''
import ssl
from ldap3 import Server, Connection, Tls, BASE, AUTO_BIND_TLS_BEFORE_BIND, AUTO_BIND_NO_TLS
from ldap3.utils.dn import parse_dn
from ldap3.core.exceptions import LDAPBindError, LDAPInvalidDnError, LDAPSocketOpenError, LDAPInvalidPortError
from typing import Tuple, List
''' LDAP Authentication CLIENT '''
class LdapClient:
"""
Base client for Ldap authentication.
:type kwargs: ``dict``
:param kwargs: Initialize params for ldap client
"""
OPENLDAP = 'OpenLDAP'
ACTIVE_DIRECTORY = 'Active Directory'
GROUPS_TOKEN = 'primaryGroupToken'
GROUPS_MEMBER = 'memberOf'
GROUPS_PRIMARY_ID = 'primaryGroupID'
TIMEOUT = 120 # timeout for ssl/tls socket
DEV_BUILD_NUMBER = 'REPLACE_THIS_WITH_CI_BUILD_NUM' # is used only in dev mode
SUPPORTED_BUILD_NUMBER = 57352 # required server build number
CIPHERS_STRING = '@SECLEVEL=1:ECDHE+AESGCM:ECDHE+CHACHA20:DHE+AESGCM:DHE+CHACHA20:ECDH+AESGCM:DH+AESGCM:' \
'ECDH+AES:DH+AES:RSA+ANESGCM:RSA+AES:!aNULL:!eNULL:!MD5:!DSS' # Allowed ciphers for SSL/TLS
def __init__(self, kwargs):
self._ldap_server_vendor = kwargs.get('ldap_server_vendor', self.OPENLDAP) # OpenLDAP or Active Directory
self._host = kwargs.get('host')
self._port = int(kwargs.get('port')) if kwargs.get('port') else None
self._username = kwargs.get('credentials', {}).get('identifier', '')
self._password = kwargs.get('credentials', {}).get('password', '')
self._base_dn = kwargs.get('base_dn', '').strip()
self._connection_type = kwargs.get('connection_type', 'none').lower()
self._fetch_groups = kwargs.get('fetch_groups', True)
self._verify = not kwargs.get('insecure', False)
self._ldap_server = self._initialize_ldap_server()
self._page_size = int(kwargs.get('page_size', 500))
# OpenLDAP only fields:
self._groups_filter_class = kwargs.get('group_filter_class', 'posixGroup').strip()
self._group_identifier_attribute = kwargs.get('group_identifier_attribute', 'gidNumber').strip()
self._member_identifier_attribute = kwargs.get('member_identifier_attribute', 'memberUid').strip()
self._user_filter_class = kwargs.get('user_filter_class', 'posixAccount')
self._user_identifier_attribute = kwargs.get('user_identifier_attribute', 'uid')
self._custom_attributes = kwargs.get('custom_attributes', '')
@property
def GROUPS_OBJECT_CLASS(self):
"""
:rtype: ``str``
:return: Group's base class object name.
"""
return self._groups_filter_class
@property
def GROUPS_IDENTIFIER_ATTRIBUTE(self):
"""
:rtype: ``str``
:return: Groups identifier attribute.
"""
return self._group_identifier_attribute
@property
def GROUPS_MEMBERSHIP_IDENTIFIER_ATTRIBUTE(self):
"""
:rtype: ``str``
:return: Groups membership attribute.
"""
return self._member_identifier_attribute
@property
def USER_OBJECT_CLASS(self):
"""
:rtype: ``str``
:return: User's base class object name.
"""
return self._user_filter_class
@property
def USER_IDENTIFIER_ATTRIBUTE(self):
"""
rtype: ``str``
:return: Users identifier attribute.
"""
return self._user_identifier_attribute
@property
def CUSTOM_ATTRIBUTE(self):
"""
rtype: ``str``
:return: User defined attributes.
"""
return self._custom_attributes
def _get_tls_object(self):
"""
Returns a TLS object according to the user's selection of the 'Trust any certificate' checkbox.
"""
if self._verify: # Trust any certificate is unchecked
# Trust any certificate = False means that the LDAP server's certificate must be valid -
# i.e if the server's certificate is not valid the connection will fail.
tls = Tls(validate=ssl.CERT_REQUIRED, ca_certs_file=os.environ.get('SSL_CERT_FILE'),
version=ssl.PROTOCOL_TLS)
else: # Trust any certificate is checked
# Trust any certificate = True means that we do not require validation of the LDAP server's certificate,
# and allow the use of all possible ciphers.
tls = Tls(validate=ssl.CERT_NONE, ca_certs_file=None, version=ssl.PROTOCOL_TLS,
ciphers=self.CIPHERS_STRING)
# By setting the version to ssl.PROTOCOL_TLS we select the highest protocol version that both client
# and server support (can be SSL or TLS versions).
return tls
def _initialize_ldap_server(self):
"""
Initializes ldap server object with given parameters. Supports both encrypted and non encrypted connection.
:rtype: ldap3.Server
:return: Initialized ldap server object.
"""
if self._connection_type == 'ssl': # Secure connection (SSL\TLS)
demisto.info(f"Initializing LDAP sever with SSL/TLS (unsecure: {not self._verify})."
f" port: {self._port or 'default(636)'}")
tls = self._get_tls_object()
return Server(host=self._host, port=self._port, use_ssl=True, tls=tls, connect_timeout=LdapClient.TIMEOUT)
elif self._connection_type == 'start tls': # Secure connection (STARTTLS)
demisto.info(f"Initializing LDAP sever without a secure connection - Start TLS operation will be executed"
f" during bind. (unsecure: {not self._verify}). port: {self._port or 'default(389)'}")
tls = self._get_tls_object()
return Server(host=self._host, port=self._port, use_ssl=False, tls=tls, connect_timeout=LdapClient.TIMEOUT)
else: # Unsecure (non encrypted connection initialized) - connection type is None
demisto.info(f"Initializing LDAP sever without a secure connection. port: {self._port or 'default(389)'}")
return Server(host=self._host, port=self._port, connect_timeout=LdapClient.TIMEOUT)
@staticmethod
def _parse_ldap_group_entries(ldap_group_entries: List[dict], groups_identifier_attribute: str) -> List[dict]:
"""
Returns parsed ldap groups entries.
"""
return [{'DN': ldap_group.get('dn'), 'Attributes': [{'Name': LdapClient.GROUPS_TOKEN,
'Values': [str(ldap_group.get('attributes', {}).get(
groups_identifier_attribute))]}]}
for ldap_group in ldap_group_entries]
@staticmethod
def _parse_ldap_group_entries_and_referrals(ldap_group_entries: List[dict]) -> Tuple[List[str], List[dict]]:
"""
Returns parsed ldap groups entries and referrals.
"""
referrals: List[str] = []
entries: List[dict] = []
for ldap_group in ldap_group_entries:
if ldap_group_type := ldap_group.get('type'):
if ldap_group_type == 'searchResRef': # a referral
referrals.extend(ldap_group.get('uri') or [])
elif ldap_group_type == 'searchResEntry': # an entry
entries.append(
{'DN': ldap_group.get('dn'),
'Attributes': [{'Name': LdapClient.GROUPS_TOKEN,
'Values': [str(ldap_group.get('attributes', {}).get(LdapClient.GROUPS_TOKEN))]}
]
})
return referrals, entries
def _parse_and_authenticate_ldap_group_entries_and_referrals(self, ldap_group_entries: List[dict],
password: str) -> Tuple[List[str], List[dict]]:
"""
Returns parsed ldap groups entries and referrals.
Authenticate - performs simple bind operation on the ldap server with the given user and password.
"""
referrals: List[str] = []
entries: List[dict] = []
for entry in ldap_group_entries:
if entry_type := entry.get('type'):
if entry_type == 'searchResRef': # a referral
referrals.extend(entry.get('uri') or [])
elif entry_type == 'searchResEntry': # an entry
# (should be only one searchResEntry to authenticate)
entry_dn = entry.get('dn', '')
entry_attributes = entry.get('attributes', {})
relevant_entry_attributes = []
for attr in entry_attributes:
if attr_value := entry_attributes.get(attr, []):
if not isinstance(attr_value, list):
attr_value = [str(attr_value)] # handle numerical values
relevant_entry_attributes.append({'Name': attr, 'Values': attr_value})
entries.append({'DN': entry_dn, 'Attributes': relevant_entry_attributes})
self.authenticate_ldap_user(entry_dn, password)
return referrals, entries
@staticmethod
def _parse_ldap_users_groups_entries(ldap_group_entries: List[dict]) -> List[Optional[Any]]:
"""
Returns parsed user's group entries.
"""
return [ldap_group.get('dn') for ldap_group in ldap_group_entries]
@staticmethod
def _build_entry_for_user(user_groups: str, user_data: dict,
mail_attribute: str, name_attribute: str, phone_attribute: str) -> dict:
"""
Returns entry for specific ldap user.
"""
parsed_ldap_groups = {'Name': LdapClient.GROUPS_MEMBER, 'Values': user_groups}
parsed_group_id = {'Name': LdapClient.GROUPS_PRIMARY_ID, 'Values': user_data['gid_number']}
attributes = [parsed_ldap_groups, parsed_group_id]
if 'name' in user_data:
attributes.append({'Name': name_attribute, 'Values': [user_data['name']]})
if 'email' in user_data:
attributes.append({'Name': mail_attribute, 'Values': [user_data['email']]})
if 'mobile' in user_data:
attributes.append({'Name': phone_attribute, 'Values': [user_data['mobile']]})
return {
'DN': user_data['dn'],
'Attributes': attributes
}
@staticmethod
def _is_valid_dn(dn: str, user_identifier_attribute: str) -> Tuple[bool, str]:
"""
Validates whether given input is valid ldap DN. Returns flag indicator and user's identifier value from DN.
"""
try:
parsed_dn = parse_dn(dn, strip=False)
for attribute_and_value in parsed_dn:
if attribute_and_value[0].lower() == user_identifier_attribute.lower():
return True, attribute_and_value[1]
raise Exception(f'OpenLDAP {user_identifier_attribute} attribute was not found in user DN : {dn}')
except LDAPInvalidDnError as e:
demisto.debug(f'OpenLDAP failed parsing DN with error: {str(e)}. Fallback for unique id activated')
return False, dn
except Exception:
raise
def _fetch_all_groups(self):
"""
Fetches all ldap groups under given base DN.
"""
auto_bind = self._get_auto_bind_value()
with Connection(self._ldap_server, self._username, self._password, auto_bind=auto_bind) as ldap_conn:
demisto.info(f'LDAP Connection Details: {ldap_conn}')
if self._ldap_server_vendor == self.ACTIVE_DIRECTORY:
search_filter = '(&(objectClass=group)(objectCategory=group))'
referrals, entries = self._get_ldap_groups_entries_and_referrals_ad(ldap_conn=ldap_conn,
search_filter=search_filter)
return {
'Controls': None,
'Referrals': referrals,
'Entries': entries
}
else: # ldap server is OpenLDAP
search_filter = f'(objectClass={self.GROUPS_OBJECT_CLASS})'
ldap_group_entries = ldap_conn.extend.standard.paged_search(search_base=self._base_dn,
search_filter=search_filter,
attributes=[
self.GROUPS_IDENTIFIER_ATTRIBUTE],
paged_size=self._page_size)
return {
'Controls': None,
'Referrals': ldap_conn.result.get('referrals'),
'Entries': LdapClient._parse_ldap_group_entries(ldap_group_entries,
self.GROUPS_IDENTIFIER_ATTRIBUTE)
}
def _get_formatted_custom_attributes(self) -> str:
"""
:return: custom attributes parsed to the form (att_name1=value1)(attname2=value2)
"""
if not self.CUSTOM_ATTRIBUTE:
return ''
formatted_attributes = ''
for att in self.CUSTOM_ATTRIBUTE.split(','):
if len(att.split('=')) != 2:
raise Exception(f'User defined attributes must be of the form'
f' \"attrA=valA,attrB=valB,...\", but got: {self.CUSTOM_ATTRIBUTE}')
formatted_attributes = formatted_attributes + f'({att})'
return formatted_attributes
def _get_ldap_groups_entries_and_referrals_ad(self, ldap_conn: Connection,
search_filter: str) -> Tuple[List[str], List[dict]]:
"""
Returns parsed ldap groups entries and referrals.
"""
ldap_group_entries = ldap_conn.extend.standard.paged_search(search_base=self._base_dn,
search_filter=search_filter,
attributes=[LdapClient.GROUPS_TOKEN],
paged_size=self._page_size,
generator=False)
referrals, entries = LdapClient._parse_ldap_group_entries_and_referrals(ldap_group_entries)
return referrals, entries
def _create_search_filter(self, filter_prefix: str) -> str:
return filter_prefix + self._get_formatted_custom_attributes()
def _fetch_specific_groups(self, specific_groups: str) -> dict:
"""
Fetches specific ldap groups under given base DN.
"""
auto_bind = self._get_auto_bind_value()
dn_list = [group.strip() for group in argToList(specific_groups, separator="#")]
with Connection(self._ldap_server, self._username, self._password, auto_bind=auto_bind) as ldap_conn:
demisto.info(f'LDAP Connection Details: {ldap_conn}')
if self._ldap_server_vendor == self.ACTIVE_DIRECTORY:
dns_filter = ''
for dn in dn_list:
dns_filter += f'(distinguishedName={dn})'
search_filter = f'(&(objectClass=group)(objectCategory=group)(|{dns_filter}))'
referrals, entries = self._get_ldap_groups_entries_and_referrals_ad(ldap_conn=ldap_conn,
search_filter=search_filter)
return {
'Controls': None,
'Referrals': referrals,
'Entries': entries
}
else: # ldap server is OpenLDAP
parsed_ldap_entries = []
for dn in dn_list:
search_filter = f'(objectClass={self.GROUPS_OBJECT_CLASS})'
ldap_group_entries = ldap_conn.extend.standard.paged_search(search_base=dn,
search_filter=search_filter,
attributes=[
self.GROUPS_IDENTIFIER_ATTRIBUTE],
paged_size=self._page_size,
search_scope=BASE)
parsed_ldap_entries.append(
self._parse_ldap_group_entries(ldap_group_entries, self.GROUPS_IDENTIFIER_ATTRIBUTE))
return {
'Controls': None,
'Referrals': ldap_conn.result.get('referrals'),
'Entries': parsed_ldap_entries
}
@staticmethod
def _get_ad_username(username: str) -> str:
"""
Gets a user logon name (the username that is used for log in to XSOAR)
and returns the Active Directory username.
"""
x_username = username
if '\\' in username:
x_username = username.split('\\')[1]
elif '@' in username:
x_username = username.split('@')[0]
return x_username
def _get_auto_bind_value(self) -> str:
"""
Returns the proper auto bind value according to the desirable connection type.
The 'TLS' in the auto_bind parameter refers to the STARTTLS LDAP operation, that can be performed only on a
cleartext connection (unsecure connection - port 389).
If the Client's connection type is Start TLS - the secure level will be upgraded to TLS during the
connection bind itself and thus we use the AUTO_BIND_TLS_BEFORE_BIND constant.
If the Client's connection type is SSL - the connection is already secured (server was initialized with
use_ssl=True and port 636) and therefore we use the AUTO_BIND_NO_TLS constant.
Otherwise, the Client's connection type is None - the connection is unsecured and should stay unsecured,
thus we use the AUTO_BIND_NO_TLS constant here as well.
"""
if self._connection_type == 'start tls':
auto_bind = AUTO_BIND_TLS_BEFORE_BIND
else:
auto_bind = AUTO_BIND_NO_TLS
return auto_bind
def get_ldap_groups(self, specific_group: str = '') -> dict:
"""
Implements ldap groups command.
"""
instance_name = demisto.integrationInstance()
if not self._fetch_groups and not specific_group:
demisto.info(f'Instance [{instance_name}] configured not to fetch groups')
sys.exit()
searched_results = self._fetch_specific_groups(
specific_group) if not self._fetch_groups else self._fetch_all_groups()
demisto.info(f'Retrieved {len(searched_results["Entries"])} groups from LDAP Authentication {instance_name}')
return searched_results
def authenticate_ldap_user(self, username: str, password: str) -> str:
"""
Performs simple bind operation on ldap server.
"""
auto_bind = self._get_auto_bind_value()
ldap_conn = Connection(server=self._ldap_server, user=username, password=password, auto_bind=auto_bind)
demisto.info(f'LDAP Connection Details: {ldap_conn}')
if ldap_conn.bound:
ldap_conn.unbind()
return "Done"
else:
raise Exception(f"LDAP Authentication - authentication connection failed,"
f" server type is: {self._ldap_server_vendor}")
def get_user_data(self, username: str, pull_name: bool, pull_mail: bool, pull_phone: bool,
name_attribute: str, mail_attribute: str, phone_attribute: str,
search_user_by_dn: bool = False) -> dict:
"""
Returns data for given ldap user.
"""
auto_bind = self._get_auto_bind_value()
with Connection(self._ldap_server, self._username, self._password, auto_bind=auto_bind) as ldap_conn:
demisto.info(f'LDAP Connection Details: {ldap_conn}')
attributes = [self.GROUPS_IDENTIFIER_ATTRIBUTE]
if pull_name:
attributes.append(name_attribute)
if pull_mail:
attributes.append(mail_attribute)
if pull_phone:
attributes.append(phone_attribute)
if search_user_by_dn:
search_filter = f'(&(objectClass={self.USER_OBJECT_CLASS})' +\
self._get_formatted_custom_attributes() + ')'
ldap_conn.search(search_base=username, search_filter=search_filter, size_limit=1,
attributes=attributes, search_scope=BASE)
else:
custom_attributes = self._get_formatted_custom_attributes()
search_filter = (f'(&(objectClass={self.USER_OBJECT_CLASS})'
f'({self.USER_IDENTIFIER_ATTRIBUTE}={username}){custom_attributes})')
ldap_conn.search(search_base=self._base_dn, search_filter=search_filter, size_limit=1,
attributes=attributes)
if not ldap_conn.entries:
raise Exception("LDAP Authentication - LDAP user not found")
entry = ldap_conn.entries[0]
if self.GROUPS_IDENTIFIER_ATTRIBUTE not in entry \
or not entry[self.GROUPS_IDENTIFIER_ATTRIBUTE].value:
raise Exception(f"LDAP Authentication - OpenLDAP user's {self.GROUPS_IDENTIFIER_ATTRIBUTE} not found")
user_data = {'dn': entry.entry_dn, 'gid_number': [str(entry[self.GROUPS_IDENTIFIER_ATTRIBUTE].value)],
'referrals': ldap_conn.result.get('referrals')}
if name_attribute in entry and entry[name_attribute].value:
user_data['name'] = ldap_conn.entries[0][name_attribute].value
if mail_attribute in entry and entry[mail_attribute].value:
user_data['email'] = ldap_conn.entries[0][mail_attribute].value
if phone_attribute in entry and entry[phone_attribute].value:
user_data['mobile'] = ldap_conn.entries[0][phone_attribute].value
return user_data
def get_user_groups(self, user_identifier: str):
"""
Returns user's group.
"""
auto_bind = self._get_auto_bind_value()
with Connection(self._ldap_server, self._username, self._password, auto_bind=auto_bind) as ldap_conn:
demisto.info(f'LDAP Connection Details: {ldap_conn}')
search_filter = (f'(&(objectClass={self.GROUPS_OBJECT_CLASS})'
f'({self.GROUPS_MEMBERSHIP_IDENTIFIER_ATTRIBUTE}={user_identifier}))')
ldap_group_entries = ldap_conn.extend.standard.paged_search(search_base=self._base_dn,
search_filter=search_filter,
attributes=[
self.GROUPS_IDENTIFIER_ATTRIBUTE],
paged_size=self._page_size)
return LdapClient._parse_ldap_users_groups_entries(ldap_group_entries)
def authenticate_and_roles_openldap(self, username: str, password: str, pull_name: bool = True,
pull_mail: bool = True, pull_phone: bool = False, mail_attribute: str = 'mail',
name_attribute: str = 'name', phone_attribute: str = 'mobile') -> dict:
"""
Implements authenticate and roles command for OpenLDAP.
"""
search_user_by_dn, user_identifier = LdapClient._is_valid_dn(username, self.USER_IDENTIFIER_ATTRIBUTE)
user_data = self.get_user_data(username=username, search_user_by_dn=search_user_by_dn, pull_name=pull_name,
pull_mail=pull_mail, pull_phone=pull_phone, mail_attribute=mail_attribute,
name_attribute=name_attribute, phone_attribute=phone_attribute)
self.authenticate_ldap_user(user_data['dn'], password)
user_groups = self.get_user_groups(user_identifier)
return {
'Controls': None,
'Referrals': user_data['referrals'],
'Entries': [LdapClient._build_entry_for_user(user_groups=user_groups, user_data=user_data,
mail_attribute=mail_attribute, name_attribute=name_attribute,
phone_attribute=phone_attribute)]
}
def authenticate_and_roles_active_directory(self, username: str, password: str, pull_name: bool = True,
pull_mail: bool = True, pull_phone: bool = False,
mail_attribute: str = 'mail', name_attribute: str = 'name',
phone_attribute: str = 'mobile') -> dict:
"""
Implements authenticate and roles command for Active Directory.
"""
xsoar_username = self._get_ad_username(username)
auto_bind = self._get_auto_bind_value()
with Connection(self._ldap_server, self._username, self._password, auto_bind=auto_bind) as ldap_conn:
demisto.info(f'LDAP Connection Details: {ldap_conn}')
attributes = [self.GROUPS_MEMBER, self.GROUPS_PRIMARY_ID]
if pull_name:
attributes.append(name_attribute)
if pull_mail:
attributes.append(mail_attribute)
if pull_phone:
attributes.append(phone_attribute)
search_filter = f'(|(sAMAccountName={xsoar_username})(userPrincipalName={username}))'
ldap_conn_entries = ldap_conn.extend.standard.paged_search(search_base=self._base_dn,
search_filter=search_filter,
attributes=attributes,
paged_size=self._page_size,
generator=False)
referrals, entries = \
self._parse_and_authenticate_ldap_group_entries_and_referrals(ldap_group_entries=ldap_conn_entries,
password=password)
if not entries: # if the user not exist in AD the query returns no entries
raise Exception("LDAP Authentication - LDAP user not found")
return {
'Controls': [],
'Referrals': referrals,
'Entries': entries
}
def authenticate_and_roles(self, username: str, password: str, pull_name: bool = True, pull_mail: bool = True,
pull_phone: bool = False, mail_attribute: str = 'mail', name_attribute: str = 'name',
phone_attribute: str = 'mobile') -> dict:
"""
Implements authenticate and roles command.
"""
if self._ldap_server_vendor == self.ACTIVE_DIRECTORY:
return self.authenticate_and_roles_active_directory(username=username, password=password,
pull_name=pull_name, pull_mail=pull_mail,
pull_phone=pull_phone, mail_attribute=mail_attribute,
name_attribute=name_attribute,
phone_attribute=phone_attribute)
else: # ldap server is OpenLDAP
return self.authenticate_and_roles_openldap(username=username, password=password,
pull_name=pull_name, pull_mail=pull_mail, pull_phone=pull_phone,
mail_attribute=mail_attribute, name_attribute=name_attribute,
phone_attribute=phone_attribute)
def test_module(self):
"""
Basic test connection and validation of the Ldap integration.
"""
build_number = get_demisto_version().get('buildNumber', LdapClient.DEV_BUILD_NUMBER)
self._get_formatted_custom_attributes()
if build_number != LdapClient.DEV_BUILD_NUMBER \
and LdapClient.SUPPORTED_BUILD_NUMBER > int(build_number):
raise Exception(f'LDAP Authentication integration is supported from build number:'
f' {LdapClient.SUPPORTED_BUILD_NUMBER}')
if self._ldap_server_vendor == self.OPENLDAP:
try:
parse_dn(self._username)
except LDAPInvalidDnError:
raise Exception("Invalid credentials input. Credentials must be full DN.")
self.authenticate_ldap_user(username=self._username, password=self._password)
return 'ok'
def main():
""" COMMANDS MANAGER / SWITCH PANEL """
params = demisto.params()
command = demisto.command()
args = demisto.args()
demisto.info(f'Command being called is {command}')
try:
# initialized LDAP Authentication client
client = LdapClient(params)
if command == 'test-module':
test_result = client.test_module()
return_results(test_result)
elif command == 'ad-authenticate':
username = args.get('username')
password = args.get('password')
authentication_result = client.authenticate_ldap_user(username, password)
demisto.info(f'ad-authenticate command - authentication result: {authentication_result}')
return_results(authentication_result)
elif command == 'ad-groups':
specific_group = args.get('specific-groups')
searched_results = client.get_ldap_groups(specific_group)
demisto.info(f'ad-groups command - searched results: {searched_results}')
return_results(searched_results)
elif command == 'ad-authenticate-and-roles':
username = args.get('username')
password = args.get('password')
mail_attribute = args.get('attribute-mail', 'mail')
name_attribute = args.get('attribute-name', 'name')
phone_attribute = args.get('attribute-phone', 'mobile')
pull_name = argToBoolean(args.get('attribute-name-pull', True))
pull_mail = argToBoolean(args.get('attribute-mail-pull', True))
pull_phone = argToBoolean(args.get('attribute-phone-pull', False))
entry_result = client.authenticate_and_roles(username=username, password=password, pull_name=pull_name,
pull_mail=pull_mail, pull_phone=pull_phone,
mail_attribute=mail_attribute, name_attribute=name_attribute,
phone_attribute=phone_attribute)
demisto.info(f'ad-authenticate-and-roles command - entry results: {entry_result}')
return_results(entry_result)
else:
raise NotImplementedError(f'Command {command} is not implemented')
# Log exceptions
except Exception as e:
msg = str(e)
if isinstance(e, LDAPBindError):
msg = f'LDAP Authentication - authentication connection failed. Additional details: {msg}'
elif isinstance(e, LDAPSocketOpenError):
msg = f'LDAP Authentication - Failed to connect to LDAP server. Additional details: {msg}'
elif isinstance(e, LDAPInvalidPortError):
msg = 'LDAP Authentication - Not valid ldap server input.' \
' Check that server input is of form: ip or ldap://ip'
return_error(str(msg))
if __name__ in ['__main__', '__builtin__', 'builtins']:
main()
| mit | c378297af5dc25489fb9f86e96289a2c | 48.83056 | 120 | 0.550094 | 4.514529 | false | false | false | false |
demisto/content | Packs/XSOARStorage/Integrations/XSOARStorage/XSOARStorage.py | 2 | 3475 | import traceback
from typing import Any, Dict
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
SIZE_LIMIT = 1024000
MAX_SIZE = int(demisto.params().get('maxsize', SIZE_LIMIT))
MAX_SIZE = MAX_SIZE if MAX_SIZE <= SIZE_LIMIT else SIZE_LIMIT
def xsoar_store_list_command(args: Dict[str, Any]) -> CommandResults:
namespace = args.get('namespace', 'default')
data = demisto.getIntegrationContext().get(namespace)
if not data:
if namespace == 'default':
return_error("Namespace: <default> empty!")
else:
return_error(f"Namespace: <{namespace}> not found!")
data = [key for key in demisto.getIntegrationContext().get(namespace, [])]
number_of_keys = len(data)
r_data = "\n".join(data)
return CommandResults(
readable_output=f"{number_of_keys} key(s) found: \n {r_data}",
outputs_prefix=f"XSOAR.Store.{namespace}",
outputs={"keys": data},
raw_response=data
)
def xsoar_store_put_command(args: Dict[str, Any]) -> CommandResults:
namespace = args.get('namespace', 'default')
key = args.get('key')
input_data = args.get('data')
current_data = demisto.getIntegrationContext()
if (sys.getsizeof(current_data) + sys.getsizeof(input_data)) > MAX_SIZE:
return_error(f"Store cannot be larger than {MAX_SIZE} bytes")
if namespace in current_data:
current_data[namespace][key] = input_data
else:
current_data[namespace] = {key: input_data}
demisto.setIntegrationContext(current_data)
return CommandResults(
readable_output=f"put: <{input_data}> in key: <{key}> for namespace: <{namespace}>"
)
def xsoar_store_get_command(args: Dict[str, Any]) -> CommandResults:
namespace = args.get('namespace', 'default')
key = args.get('key')
data = demisto.getIntegrationContext().get(namespace)
data = data.get(key)
return CommandResults(
readable_output=f"retrieved: <{data}> from key: <{key}> for namespace: <{namespace}>",
outputs_prefix=f"XSOAR.Store.{namespace}.{key}",
outputs=data
)
''' MAIN FUNCTION '''
def main() -> None:
"""main function, parses params and runs command functions
:return:
:rtype:
"""
demisto.debug(f'Command being called is {demisto.command()}')
try:
if demisto.command() == 'test-module':
# This is the call made when pressing the integration Test button.
return_results('ok')
elif demisto.command() == 'xsoar-store-list':
return_results(xsoar_store_list_command(demisto.args()))
elif demisto.command() == 'xsoar-store-put':
return_results(xsoar_store_put_command(demisto.args()))
elif demisto.command() == 'xsoar-store-get':
return_results(xsoar_store_get_command(demisto.args()))
# Log exceptions and return errors
except Exception as e:
demisto.error(traceback.format_exc()) # print the traceback
return_error(f'Failed to execute {demisto.command()} command.\nError:\n{str(e)}')
"""
Entry Point
-----------
This is the integration code entry point. It checks whether the ``__name__``
variable is ``__main__`` , ``__builtin__`` (for Python 2) or ``builtins`` (for
Python 3) and then calls the ``main()`` function. Just keep this convention.
"""
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | 165c1ca4b36e0c88c0aae5c637656acc | 26.148438 | 94 | 0.631942 | 3.586171 | false | false | false | false |
demisto/content | Packs/Malwr/Integrations/Malwr/Malwr.py | 2 | 12712 | import ast
import hashlib
import re
from typing import Any
import demistomock as demisto # noqa: F401
import requests
from bs4 import BeautifulSoup
from CommonServerPython import * # noqa: F401
MAIN_URL = 'https://malwr.com'
STATUS_URL = '/submission/status/{}/'
RESULT_URL = '/analysis/{}/'
MD5_PREFIX_STR = 'with MD5 '
SUPPORTED_COMMANDS = ['Submit', 'Status', 'Result', 'Detonate']
DETONATE_DEFAULT_TIMEOUT = 600
DETONATE_POLLING_INTERVAL = 10
def md5(fname): # pragma: no cover
hash_md5 = hashlib.md5() # guardrails-disable-line # nosec B324
with open(fname, 'rb') as f:
for chunk in iter(lambda: f.read(4096), b''):
hash_md5.update(chunk)
return hash_md5.hexdigest()
def get_file_path(file_id): # pragma: no cover
filepath_result = demisto.getFilePath(file_id)
if 'path' not in filepath_result:
demisto.results(f'Error: entry {file_id} is not a file.')
return
return filepath_result['path']
# The Malwar API from https://github.com/PaulSec/API-malwr.com
class MalwrAPI:
"""
MalwrAPI Main Handler
"""
HEADERS = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:104.0) Gecko/20100101 Firefox/104.0'}
def __init__(self, url, username=None, password=None): # pragma: no cover
self.url = url
self.session = requests.session()
self.username = username
self.password = password
self.logged = False
def login(self): # pragma: no cover
"""Login on malwr.com website"""
if self.username and self.password:
soup = self.request_to_soup(self.url + '/account/login')
csrf_input = soup.find(attrs=dict(name='csrfmiddlewaretoken'))
csrf_token = csrf_input['value']
payload = {
'csrfmiddlewaretoken': csrf_token,
'username': f'{self.username}',
'password': f'{self.password}'
}
login_request = self.session.post(self.url + "/account/login/",
data=payload, headers=self.HEADERS)
if login_request.status_code == 200:
self.logged = True
return True
else:
self.logged = False
return False
def request_to_soup(self, url=None): # pragma: no cover
"""Request url and return the Beautifoul Soup object of html returned"""
if not url:
url = self.url
req = self.session.get(url, headers=self.HEADERS)
soup = BeautifulSoup(req.text, "html.parser")
return soup
@staticmethod
def evaluate_simple_math_expr(expr: str) -> Optional[int]:
# from https://stackoverflow.com/a/38860845
try:
tree = ast.parse(expr, mode='eval')
except SyntaxError:
return None # not a Python expression
if not all(isinstance(node, (ast.Expression,
ast.UnaryOp, ast.unaryop,
ast.BinOp, ast.operator,
ast.Num)) for node in ast.walk(tree)):
return None # not a mathematical expression (numbers and operators)
result = eval(compile(tree, filename='', mode='eval')) # nosec B307
return result
@staticmethod
def find_submission_links(req):
# regex to check if the file was already submitted before
pattern = r'(\/analysis\/[a-zA-Z0-9]{12,}\/)'
submission_links = re.findall(pattern, req.text)
return submission_links
def submit_sample(self, filepath, analyze=True, share=True, private=True):
if self.logged is False:
self.login()
s = self.session
req = s.get(self.url + '/submission/', headers=self.HEADERS)
soup = BeautifulSoup(req.text, "html.parser")
pattern = r'(\d [-+*] \d) ='
math_captcha_fields = re.findall(pattern, req.text)
math_captcha_field = None
if math_captcha_fields:
math_captcha_field = MalwrAPI.evaluate_simple_math_expr(math_captcha_fields[0])
data = {
'math_captcha_field': math_captcha_field,
'math_captcha_question': soup.find('input', {'name': 'math_captcha_question'})['value'],
'csrfmiddlewaretoken': soup.find('input', {'name': 'csrfmiddlewaretoken'})['value'],
'share': 'on' if share else 'off', # share by default
'analyze': 'on' if analyze else 'off', # analyze by default
'private': 'on' if private else 'off' # private by default
}
req = s.post(self.url + '/submission/', data=data, headers=self.HEADERS, files={'sample': open(filepath, 'rb')})
submission_links = MalwrAPI.find_submission_links(req)
res: dict[str, Any] = {
'md5': hashlib.md5(open(filepath, 'rb').read()).hexdigest(), # guardrails-disable-line # nosec
'file': filepath
}
if len(submission_links) > 0:
res['analysis_link'] = submission_links[0]
return res, soup
else:
pattern = r'(\/submission\/status\/[a-zA-Z0-9]{12,}\/)'
submission_status = re.findall(pattern, req.text)
if len(submission_status) > 0:
res['analysis_link'] = submission_status[0]
return res, soup
elif 'file like this waiting for processing, submission aborted.' in req.text:
return 'File already submitted, check its status.', soup
else:
return 'Error with the file.', soup
def get_status(self, analysis_id): # pragma: no cover
s = self.session
req = s.get(self.url + STATUS_URL.format(analysis_id), headers=self.HEADERS)
soup = BeautifulSoup(req.text, 'html.parser')
submission_links = MalwrAPI.find_submission_links(req)
if len(submission_links) > 0:
status = 'complete'
return status, submission_links[0], soup
elif 'The analysis is still pending' in str(soup):
status = 'pending'
else:
status = 'error'
return status, None, soup
def get_result(self, analysis_id): # pragma: no cover
analysis_status, _, _ = self.get_status(analysis_id)
if analysis_status != 'complete':
status = 'pending'
soup = None
is_malicious = None
md5 = None
else:
status = 'complete'
s = self.session
req = s.get(self.url + RESULT_URL.format(analysis_id), headers=self.HEADERS)
soup = BeautifulSoup(req.text, 'html.parser')
is_malicious = 'malicious' in str(soup)
soup_str = str(soup)
start_index = soup_str.find(MD5_PREFIX_STR)
if start_index == -1:
md5 = None
else:
start_index += len(MD5_PREFIX_STR)
md5 = soup_str[start_index: start_index + 32]
return status, is_malicious, soup, md5
def __setattr__(self, name, value):
if name == 'HEADERS':
raise AttributeError(f"can't reassign constant '{name}'")
else:
self.__dict__[name] = value
def main(): # pragma: no cover
if 'identifier' in demisto.params()['credentials'] and 'password' in demisto.params()['credentials']:
username = demisto.params()['credentials']['identifier']
password = demisto.params()['credentials']['password']
else:
username = None
password = None
malwr = MalwrAPI(
url=demisto.params()['server'],
username=username,
password=password
)
entry: dict[str, Any] = {
'Type': entryTypes['note'],
'ContentsFormat': formats['text'],
'ReadableContentsFormat': formats['text']
}
if demisto.command() == 'test-module':
demisto.results('ok')
return
elif demisto.command() == 'malwr-submit':
file_id = demisto.args()['fileId']
filepath = get_file_path(file_id)
res, soup = malwr.submit_sample(filepath)
if isinstance(res, dict) and 'analysis_link' in res:
analysis_id = res['analysis_link'].split('/')[-2]
message = 'File submitted: {}{}\n'.format(MAIN_URL, res['analysis_link'])
message += 'MD5: {}\n'.format(res['md5'])
message += f'Analysis ID: {analysis_id}'
entry['Contents'] = str(soup)
entry['HumanReadable'] = message
entry['EntryContext'] = {
'Malwr.Submissions(val.Id==obj.Id)': {'Id': analysis_id, 'Md5': res['md5'], 'Status': 'pending'}
}
else:
entry['HumanReadable'] = res
elif demisto.command() == 'malwr-status':
analysis_id = demisto.args()['analysisId']
status, data, soup = malwr.get_status(analysis_id)
if status == 'complete':
message = f'The analysis is complete, you can view it at: {MAIN_URL}{data}.'
elif status == 'pending':
message = 'The analysis is still in progress.'
else:
message = 'Error: the specified analysis does not exist.'
entry['Contents'] = str(soup)
entry['HumanReadable'] = message
entry['EntryContext'] = {'Malwr.Submissions(val.Id==obj.Id)': {'Id': analysis_id, 'Status': status}}
elif demisto.command() == 'malwr-result':
analysis_id = demisto.args()['analysisId']
status, is_malicious, soup, md5 = malwr.get_result(analysis_id)
if status == 'pending':
message = 'The analysis is still in progress.'
demisto.results(message)
return
if is_malicious:
entry['EntryContext'] = {
'Malwr.Submissions(val.Id==obj.Id)': {
'Id': analysis_id, 'Status': status, 'Malicious': {'Vendor': 'Malwr'}
}
}
entry['EntryContext']['DBotScore'] = {'Indicator': md5, 'Vendor': 'Malwr', 'Score': 3}
message = 'The file is malicious.'
else:
entry['EntryContext'] = {'Malwr.Submissions(val.Id==obj.Id)': {'Id': analysis_id, 'Status': status}}
entry['EntryContext']['DBotScore'] = {'Indicator': md5, 'Vendor': 'Malwr', 'Score': 0}
message = 'The file is not malicious.'
entry['Contents'] = str(soup)
entry['HumanReadable'] = message
elif demisto.command() == 'malwr-detonate':
status = ''
file_id = demisto.args()['fileId']
filepath = get_file_path(file_id)
timeout = int(demisto.args()['timeout']) if 'timeout' in demisto.args() else DETONATE_DEFAULT_TIMEOUT
# Submit the sample
res, soup = malwr.submit_sample(filepath)
if isinstance(res, dict) and 'analysis_link' not in res:
demisto.results(f'ERROR: {res}')
return
# Poll the status of the analysis
analysis_id = res.get('analysis_link', '').split('/')[-2]
start_time = time.time()
while (time.time() - start_time) < timeout:
status, _, _ = malwr.get_status(analysis_id)
if status == 'error':
demisto.results('Error analyzing file.')
return
demisto.info(f'status = {status}')
if status == 'complete':
break
time.sleep(DETONATE_POLLING_INTERVAL) # pylint: disable=sleep-exists
if status == 'pending':
demisto.results('File analysis timed out.')
return
# Get the result
status, is_malicious, soup, md5 = malwr.get_result(analysis_id)
if status != 'complete':
demisto.results('Error analyzing file.')
return
if is_malicious:
entry['EntryContext'] = {
'Malwr.Submissions(val.Id==obj.Id)': {
'Id': analysis_id, 'Md5': md5, 'Status': status, 'Malicious': {'Vendor': 'Malwr'}
}
}
entry['EntryContext']['DBotScore'] = {
'Indicator': md5, 'Vendor': 'Malwr', 'Score': 3 if is_malicious else 0
}
message = 'The file is malicious.'
else:
entry['EntryContext'] = {
'Malwr.Submissions(val.Id==obj.Id)': {'Id': analysis_id, 'Md5': res['md5'], 'Status': status}
}
message = 'The file is not malicious.'
entry['Contents'] = str(soup)
entry['HumanReadable'] = message
demisto.results(entry)
if __name__ in ('__main__', '__builtin__', 'builtins'): # pragma: no cover
main()
| mit | 008ad95264ba6753ea7f565f5532f974 | 36.169591 | 120 | 0.557898 | 3.893415 | false | false | false | false |
demisto/content | Utils/github_workflow_scripts/handle_external_pr.py | 2 | 6195 | #!/usr/bin/env python3
import json
from typing import List
import urllib3
from blessings import Terminal
from github import Github
from github.Repository import Repository
from utils import get_env_var, timestamped_print
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
print = timestamped_print
REVIEWERS = ['ostolero', 'omerKarkKatz', 'JasBeilin']
MARKETPLACE_CONTRIBUTION_PR_AUTHOR = 'xsoar-bot'
WELCOME_MSG = 'Thank you for your contribution. Your generosity and caring are unrivaled! Rest assured - our content ' \
'wizard @{selected_reviewer} will very shortly look over your proposed changes.'
WELCOME_MSG_WITH_GFORM = 'Thank you for your contribution. Your generosity and caring are unrivaled! Make sure to ' \
'register your contribution by filling the [Contribution Registration]' \
'(https://forms.gle/XDfxU4E61ZwEESSMA) form, ' \
'so our content wizard @{selected_reviewer} will know he can start review the proposed ' \
'changes.'
def determine_reviewer(potential_reviewers: List[str], repo: Repository) -> str:
"""Checks the number of open 'Contribution' PRs that have either been assigned to a user or a review
was requested from the user for each potential reviewer and returns the user with the smallest amount
Args:
potential_reviewers (List): The github usernames from which a reviewer will be selected
repo (Repository): The relevant repo
Returns:
str: The github username to assign to a PR
"""
label_to_consider = 'contribution'
pulls = repo.get_pulls(state='OPEN')
assigned_prs_per_potential_reviewer = {reviewer: 0 for reviewer in potential_reviewers}
for pull in pulls:
# we only consider 'Contribution' prs when computing who to assign
pr_labels = [label.name.casefold() for label in pull.labels]
if label_to_consider not in pr_labels:
continue
assignees = {assignee.login for assignee in pull.assignees}
requested_reviewers, _ = pull.get_review_requests()
reviewers_info = {requested_reviewer.login for requested_reviewer in requested_reviewers}
combined_list = assignees.union(reviewers_info)
for reviewer in potential_reviewers:
if reviewer in combined_list:
assigned_prs_per_potential_reviewer[reviewer] = assigned_prs_per_potential_reviewer.get(reviewer, 0) + 1
print(f'{assigned_prs_per_potential_reviewer=}')
selected_reviewer = sorted(assigned_prs_per_potential_reviewer,
key=assigned_prs_per_potential_reviewer.get)[0] # type: ignore
print(f'{selected_reviewer=}')
return selected_reviewer
def main():
"""Handles External PRs (PRs from forks)
Performs the following operations:
1. If the external PR's base branch is master we create a new branch and set it as the base branch of the PR.
2. Labels the PR with the "Contribution" label. (Adds the "Hackathon" label where applicable.)
3. Assigns a Reviewer.
4. Creates a welcome comment
Will use the following env vars:
- CONTENTBOT_GH_ADMIN_TOKEN: token to use to update the PR
- EVENT_PAYLOAD: json data from the pull_request event
"""
t = Terminal()
payload_str = get_env_var('EVENT_PAYLOAD')
if not payload_str:
raise ValueError('EVENT_PAYLOAD env variable not set or empty')
payload = json.loads(payload_str)
print(f'{t.cyan}Processing PR started{t.normal}')
org_name = 'demisto'
repo_name = 'content'
gh = Github(get_env_var('CONTENTBOT_GH_ADMIN_TOKEN'), verify=False)
content_repo = gh.get_repo(f'{org_name}/{repo_name}')
pr_number = payload.get('pull_request', {}).get('number')
pr = content_repo.get_pull(pr_number)
# Add 'Contribution' Label to PR
contribution_label = 'Contribution'
pr.add_to_labels(contribution_label)
print(f'{t.cyan}Added "Contribution" label to the PR{t.normal}')
# check base branch is master
if pr.base.ref == 'master':
print(f'{t.cyan}Determining name for new base branch{t.normal}')
branch_prefix = 'contrib/'
new_branch_name = f'{branch_prefix}{pr.head.label.replace(":", "_")}'
existant_branches = content_repo.get_git_matching_refs(f'heads/{branch_prefix}')
potential_conflicting_branch_names = [branch.ref.lstrip('refs/heads/') for branch in existant_branches]
# make sure new branch name does not conflict with existing branch name
while new_branch_name in potential_conflicting_branch_names:
# append or increment digit
if not new_branch_name[-1].isdigit():
new_branch_name += '-1'
else:
digit = str(int(new_branch_name[-1]) + 1)
new_branch_name = f'{new_branch_name[:-1]}{digit}'
master_branch_commit_sha = content_repo.get_branch('master').commit.sha
# create new branch
print(f'{t.cyan}Creating new branch "{new_branch_name}"{t.normal}')
content_repo.create_git_ref(f'refs/heads/{new_branch_name}', master_branch_commit_sha)
# update base branch of the PR
pr.edit(base=new_branch_name)
print(f'{t.cyan}Updated base branch of PR "{pr_number}" to "{new_branch_name}"{t.normal}')
# assign reviewers / request review from
reviewer_to_assign = determine_reviewer(REVIEWERS, content_repo)
pr.add_to_assignees(reviewer_to_assign)
pr.create_review_request(reviewers=[reviewer_to_assign])
print(f'{t.cyan}Assigned user "{reviewer_to_assign}" to the PR{t.normal}')
print(f'{t.cyan}Requested review from user "{reviewer_to_assign}"{t.normal}')
# create welcome comment (only users who contributed through Github need to have that contribution form filled)
message_to_send = WELCOME_MSG if pr.user.login == MARKETPLACE_CONTRIBUTION_PR_AUTHOR else WELCOME_MSG_WITH_GFORM
body = message_to_send.format(selected_reviewer=reviewer_to_assign)
pr.create_issue_comment(body)
print(f'{t.cyan}Created welcome comment{t.normal}')
if __name__ == "__main__":
main()
| mit | 6e9efbc32e74c058b2c252bf7ea77f07 | 46.290076 | 120 | 0.676998 | 3.66568 | false | false | false | false |
demisto/content | Packs/ParseHTMLTables/Scripts/ParseHTMLTables/ParseHTMLTables.py | 2 | 12014 | import copy
from typing import Any, Dict, Generator, List, Optional, Tuple, Union
from bs4 import BeautifulSoup, NavigableString, Tag
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
TITLE_THRESHOLD = 4
class Table:
def __init__(self, title: str):
self.__title = title
self.__headers: List[str] = []
self.__rows: List[Tuple[List[str], List[str]]] = []
self.__rowspan_labels: List[Tuple[int, str]] = []
def __set_rowspan_labels(self, columns: Optional[List[Tag]]):
if not columns or not any(col.attrs.get('rowspan') for col in columns):
return
rowspan_labels: List[Tuple[int, str]] = []
for col in columns:
try:
rowspan = int(col.attrs.get('rowspan') or 1)
except Exception:
rowspan = 1
rowspan = max(1, rowspan)
try:
colspan = int(col.attrs.get('colspan') or 1)
except Exception:
colspan = 1
colspan = max(1, colspan)
rowspan_labels += [(rowspan, col.text.strip())] * colspan
self.__rowspan_labels = rowspan_labels
def get_title(self) -> str:
return self.__title
def set_header_labels(self, headers: List[Tag]):
self.__headers = [header.text.strip() for header in headers]
def get_header_labels(self) -> List[str]:
return self.__headers
def add_row(self, columns: List[Tag], labels: Optional[List[Tag]] = None):
"""
Add a row with cells and labels.
:param columns: List of data cells of the row.
:param labels: List of header cells of the row.
"""
rowspan_labels = self.__rowspan_labels
# Normalize labels
if labels and any(label.attrs.get('rowspan') for label in labels):
self.__set_rowspan_labels(labels)
normalized_labels = []
if labels:
for i, (count, label) in enumerate(rowspan_labels):
if count >= 2:
normalized_labels.append(label)
for label in labels:
try:
colspan = int(label.attrs.get('colspan') or 1)
except Exception:
colspan = 1
normalized_labels += [label.text.strip()] * max(1, colspan)
# Normalize columns
if any(col.attrs.get('rowspan') for col in columns):
self.__set_rowspan_labels(columns)
normalized_columns = []
for i, (count, label) in enumerate(rowspan_labels):
if count >= 2:
normalized_columns.append(label)
rowspan_labels[i] = count - 1, label
for col in columns:
try:
colspan = int(col.attrs.get('colspan') or 1)
except Exception:
colspan = 1
normalized_columns += [col.text.strip()] * max(1, colspan)
self.__rows.append((normalized_labels, normalized_columns))
def get_rows(self) -> List[Tuple[List[str], List[str]]]:
return self.__rows
def make_pretty_table_rows(self, default_header_line: Optional[str] = None) -> Any:
"""
Format a table
:param default_header_line: Which table line handles as header by default, 'first_column' or 'first_row'
:return: The table formatted in JSON structure.
"""
rows: List[Union[str, Dict[str, Any]]] = []
temp_row: Dict[str, Any] = {}
tbl_rows = self.__rows
headers = self.__headers
if default_header_line and default_header_line != 'none':
if not headers and not any(labels for labels, cols in tbl_rows):
if default_header_line in ('first_column', 'first_row'):
# The first column or row is considered as header
if default_header_line == 'first_column':
# transpose
tbl_rows = [([], list(cols)) for cols in zip(*[cols for labels, cols in tbl_rows])]
labels, headers = tbl_rows[0]
tbl_rows = tbl_rows[1:]
else:
raise ValueError(f'Unknown default header line: {default_header_line}')
for labels, cols in tbl_rows:
labels = labels[-1:]
headers = labels + headers[len(labels):len(headers) - len(labels)]
if not cols:
continue
elif len(cols) == 1:
if len(headers) >= 1:
# If there 1 header and 1 column, treat as key-value
key = headers[0]
vals = temp_row.get(key)
if vals is None:
temp_row[key] = cols[0]
elif type(vals) == list:
temp_row[key] = vals + [cols[0]]
else:
temp_row[key] = [vals, cols[0]]
else:
if temp_row:
rows.append(temp_row)
temp_row = {}
# Single value in a table - just create an array of strings
rows.append(cols[0])
elif len(cols) == 2 and len(headers) == 0:
# If there are 2 columns and no headers, treat as key-value
key = cols[0]
vals = temp_row.get(key)
if vals is None:
temp_row[key] = cols[1]
elif type(vals) == list:
temp_row[key] = vals + [cols[1]]
else:
temp_row[key] = [vals, cols[1]]
else:
if temp_row:
rows.append(temp_row)
temp_row = {}
rows.append({headers[i] if i < len(headers) else 'cell' + str(i): col for i, col in enumerate(cols)})
if temp_row:
rows.append(temp_row)
if len(rows) == 1 and type(rows[0]) == dict:
return rows[0]
return rows
def find_table_title(base: Optional[Union[BeautifulSoup, Tag, NavigableString]],
node: Union[BeautifulSoup, Tag, NavigableString]) -> Optional[str]:
"""
Search for a table title from a node.
:param base: The top node of the tree.
:param node: The node from which searching starts.
:return: A title found.
"""
title = ''
orig = node
prev = node.previous_element
while prev and node is not base:
node = prev
if isinstance(node, Tag) and node.name in ('h1', 'h2', 'h3', 'h4', 'h5', 'h6'):
title = ' '.join(node.text.strip().split())
break
prev = node.previous_element
if not title or title.count(' ') >= TITLE_THRESHOLD:
message = ''
node = orig
prev = node.previous_element
while prev and node is not base:
node = prev
if isinstance(node, NavigableString):
message = (str(node) if message else str(node).rstrip()) + message
if message.lstrip() and any(c in message for c in ('\n', '\r')):
break
prev = node.previous_element
message = ' '.join(message.strip().split())
title = title if title and message.count(' ') >= title.count(' ') else message
return title
def list_columns(node: Union[BeautifulSoup, Tag, NavigableString], name: str) -> List[Tag]:
"""
List columns of the row.
:param node: The node which contains columns of the row.
:param name: The name of the tag of columns.
:return: The list of columns.
"""
vals = []
ancestor = node
name_list = ['table', 'td', 'th', name]
node = node.find(name_list)
while node and is_descendant(ancestor, node):
if node.name in name_list:
if node.name == name:
tnode = copy.copy(node)
for t in tnode.find_all('table'):
t.decompose()
vals.append(tnode)
node = node.find_next_sibling(True)
else:
node = node.find_next(name_list)
return vals
def is_descendant(ancestor: Optional[Union[BeautifulSoup, Tag, NavigableString]],
node: Optional[Union[BeautifulSoup, Tag, NavigableString]]) -> bool:
"""
Check if a node is descendant in the tree.
:param ancestor: The ancestor node.
:param node: The node to be checked.
:return: True - node is descendant, False - node is not descendant.
"""
return ancestor is not None and node is not None and any([ancestor is p for p in node.parents])
def parse_table(base: Optional[Union[BeautifulSoup, Tag, NavigableString]],
table_node: Union[BeautifulSoup, Tag, NavigableString]) -> Generator[Table, None, None]:
"""
Parse a HTML table and enumerate tables found in the table.
:param base: The top node of the HTML tree.
:param table_node: The table node to parse.
:return: Tables found.
"""
table = Table(title=find_table_title(base, table_node) or 'No Title')
has_nested_tables = False
node = table_node.find(['table', 'tr'])
while node and is_descendant(table_node, node):
if node.name == 'tr':
ths = list_columns(node, 'th')
tds = list_columns(node, 'td')
if tds:
table.add_row(columns=tds, labels=ths)
if ths and not table.get_header_labels():
table.set_header_labels(ths)
node = node.find_next(['table', 'tr'])
elif node.name == 'table':
has_nested_tables = True
yield from parse_table(base, node)
base = node.previous_element
node = node.find_next_sibling(True)
else:
node = node.find_next(['table', 'tr'])
# Not to make a table if tr only has tables
has_table = True
if has_nested_tables:
rows = table.get_rows()
if len(rows) == 1:
labels, cols = rows[0]
if len(cols) == 1 and not cols[0]:
has_table = False
if has_table:
yield table
def parse_tables(node: Union[BeautifulSoup, Tag, NavigableString]) -> Generator[Table, None, None]:
"""
Parse HTML tables and enumerate them.
:param node: The node from which searching starts.
:return: Tables found.
"""
base = None
node = node.find('table')
while node:
yield from parse_table(base, node)
base = node.next_sibling
while node:
next = node.find_next_sibling(True)
if next:
if next.name == 'table':
break
next = next.find_next('table')
if next:
break
node = node.parent
node = next
def main():
args = demisto.args()
html = args.get('value') or ''
overwriting_title = args.get('title')
filter_indexes = argToList(args.get('filter_indexes'))
filter_titles = argToList(args.get('filter_titles'))
default_header_line = args.get('default_header_line') or 'none'
tables = []
try:
soup = BeautifulSoup(html, 'html.parser')
index = -1
for table in parse_tables(soup):
rows = table.make_pretty_table_rows(default_header_line)
if not rows:
continue
index = index + 1
if filter_indexes and\
index not in filter_indexes and\
str(index) not in filter_indexes:
continue
original_title = table.get_title()
if filter_titles and original_title not in filter_titles:
continue
tables.append({overwriting_title or original_title: rows})
except Exception as err:
return_error(err)
return_results(tables)
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | d17476da5c6d0fcd96966fd50079bdbc | 32.465181 | 117 | 0.536707 | 4.105947 | false | false | false | false |
demisto/content | Packs/cyberark_AIM/Integrations/CyberArkAIM_v2/CyberArkAIM_v2.py | 2 | 7151 | from requests_ntlm import HttpNtlmAuth
import tempfile
import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
# disable insecure warnings
requests.packages.urllib3.disable_warnings()
class Client(BaseClient):
def __init__(self, server_url: str, use_ssl: bool, proxy: bool, app_id: str, folder: str, safe: str,
credentials_object: str, username: str, password: str, cert_text: str, key_text: str):
super().__init__(base_url=server_url, verify=use_ssl, proxy=proxy)
self._app_id = app_id
self._folder = folder
self._safe = safe
self._credentials_list = argToList(credentials_object)
self._username = username
self._password = password
self._cert_text = cert_text
self._key_text = key_text
self.auth = self.create_windows_authentication_param()
self.crt, self.cf, self.kf = self.create_crt_param()
def create_windows_authentication_param(self):
auth = None
if self._username:
# if username and password were added - use ntlm authentication
auth = HttpNtlmAuth(self._username, self._password)
return auth
def create_crt_param(self):
if not self._cert_text and not self._key_text:
return None, None, None
elif not self._cert_text or not self._key_text:
raise Exception('You can not configure either certificate text or key, both are required.')
elif self._cert_text and self._key_text:
cert_text_list = self._cert_text.split('-----')
# replace spaces with newline characters
cert_text_fixed = '-----'.join(
cert_text_list[:2] + [cert_text_list[2].replace(' ', '\n')] + cert_text_list[3:])
cf = tempfile.NamedTemporaryFile(delete=False)
cf.write(cert_text_fixed.encode())
cf.flush()
key_text_list = self._key_text.split('-----')
# replace spaces with newline characters
key_text_fixed = '-----'.join(
key_text_list[:2] + [key_text_list[2].replace(' ', '\n')] + key_text_list[3:])
kf = tempfile.NamedTemporaryFile(delete=False)
kf.write(key_text_fixed.encode())
kf.flush()
return (cf.name, kf.name), cf, kf
def get_credentials(self, creds_object: str):
url_suffix = '/AIMWebService/api/Accounts'
params = {
"AppID": self._app_id,
"Safe": self._safe,
"Folder": self._folder,
"Object": creds_object,
}
return self._http_request("GET", url_suffix, params=params, auth=self.auth, cert=self.crt)
def list_credentials(self):
credential_result = [self.get_credentials(credentials) for credentials in self._credentials_list]
return credential_result
def list_credentials_command(client):
"""Lists all credentials available.
:param client: the client object with the given params
:return: the credentials info without the explicit password
"""
creds_list = client.list_credentials()
for cred in creds_list:
# the password value in the json appears under the key "Content"
if "Content" in cred:
del cred["Content"]
# notice that the raw_response doesn't contain the password either
results = CommandResults(
outputs=creds_list,
raw_response=creds_list,
outputs_prefix='CyberArkAIM',
outputs_key_field='Name',
)
return results
def fetch_credentials(client, args: dict):
"""Fetches the available credentials.
:param client: the client object with the given params
:param args: demisto args dict
:return: a credentials object
"""
creds_name = args.get('identifier')
demisto.debug('name of cred used: ', creds_name)
if creds_name:
try:
creds_list = [client.get_credentials(creds_name)]
except Exception as e:
demisto.debug(f"Could not fetch credentials: {creds_name}. Error: {e}")
creds_list = []
else:
creds_list = client.list_credentials()
credentials = []
for cred in creds_list:
credentials.append({
"user": cred.get("UserName"),
"password": cred.get("Content"),
"name": cred.get("Name"),
})
demisto.credentials(credentials)
def test_module(client: Client) -> str:
"""Performing a request to the AIM server with the given params
:param client: the client object with the given params
:return: ok if the request succeeded
"""
if client._credentials_list:
client.list_credentials()
else:
try:
# Running a dummy credential just to check connection itself.
client.get_credentials("test_cred")
except DemistoException as e:
if 'Error in API call [500]' in e.message or 'Error in API call [404]' in e.message:
return 'ok'
else:
raise e
return "ok"
def main():
params = demisto.params()
url = params.get('url')
use_ssl = not params.get('insecure', False)
proxy = params.get('proxy', False)
app_id = params.get('app_id') or ""
folder = params.get('folder')
safe = params.get('safe')
credentials_object = params.get('credential_names') or ""
cert_text = params.get('cert_text') or ""
key_text = params.get('key_text') or ""
username = ""
password = ""
if params.get('credentials'):
# credentials are not mandatory in this integration
username = params.get('credentials').get('identifier')
password = params.get('credentials').get('password')
try:
client = Client(server_url=url, use_ssl=use_ssl, proxy=proxy, app_id=app_id, folder=folder, safe=safe,
credentials_object=credentials_object, username=username, password=password,
cert_text=cert_text, key_text=key_text)
command = demisto.command()
demisto.debug(f'Command being called in CyberArk AIM is: {command}')
if command == 'test-module':
return_results(test_module(client))
elif command == 'cyberark-aim-list-credentials':
return_results(list_credentials_command(client))
elif command == 'fetch-credentials':
fetch_credentials(client, demisto.args())
else:
raise NotImplementedError(f'{command} is not an existing CyberArk AIM command')
except Exception as err:
return_error(f'Unexpected error: {str(err)}', error=traceback.format_exc())
finally:
try:
if client.crt:
cf_name, kf_name = client.crt
if client.cf:
client.cf.close()
os.remove(cf_name)
if client.cf:
client.kf.close()
os.remove(kf_name)
except Exception as err:
return_error(f"CyberArk AIM error: {str(err)}")
if __name__ in ['__main__', 'builtin', 'builtins']:
main()
| mit | bf7f4c1f83eeb39b0dc7afe06310eb50 | 35.671795 | 110 | 0.599776 | 4.019674 | false | false | false | false |
demisto/content | Packs/FireMonSecurityManager/Integrations/FireMonSecurityManager/FireMonSecurityManager.py | 2 | 17978 | import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
""" IMPORTS """
from typing import Any, Dict
""" CONSTANTS """
DATE_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
AUTH_URL = "securitymanager/api/authentication/login"
WORKFLOW_URL = "/policyplanner/api/domain/{0}/workflow/version/latest/all"
CREATE_PP_TICKET_URL = "/policyplanner/api/domain/{0}/workflow/{1}/packet"
PCA_URL_SUFFIX = "/orchestration/api/domain/{}/change/device/{}/pca"
RULE_REC_URL = "orchestration/api/domain/{}/change/rulerec"
PAGED_SEARCH_URL = "securitymanager/api/siql/secrule/paged-search"
create_pp_payload = {
"sources": [""],
"destinations": [""],
"action": "",
"services": [""],
"requirementType": "RULE",
"childKey": "add_access",
"variables": {},
}
def get_rule_rec_request_payload():
return {
"apps": [],
"destinations": [""],
"services": [""],
"sources": [""],
"users": [],
"requirementType": "RULE",
"childKey": "add_access",
"variables": {"expiration": "null", "review": "null"},
"action": "",
}
def get_create_pp_ticket_payload():
return {
"variables": {
"summary": "Request Test06",
"businessNeed": "",
"priority": "LOW",
"dueDate": "2021-05-29 13:44:58",
"applicationName": "",
"customer": "",
"externalTicketId": "",
"notes": "",
"requesterName": "System Administrator",
"requesterEmail": "",
"applicationOwner": "",
"integrationRecord": "",
"carbonCopy": [""],
},
"policyPlanRequirements": [],
}
class Client(BaseClient):
def __init__(self, base_url: str, verify: bool, proxy: bool, username: str, password: str):
super().__init__(base_url=base_url, verify=verify, proxy=proxy)
self._username = username
self._password = password
def authenticate_user(self):
headers = {"Accept": "application/json", "Content-Type": "application/json"}
api_response = self._http_request(
method="POST",
url_suffix=AUTH_URL,
json_data={"username": self._username, "password": self._password},
headers=headers,
)
return api_response
def get_all_workflow(self, auth_token, domain_id, parameters):
headers = {"Accept": "application/json", "Content-Type": "application/json", "X-FM-Auth-Token": auth_token}
workflow_url = WORKFLOW_URL.format(domain_id)
api_response = self._http_request(method="GET", url_suffix=workflow_url, params=parameters, headers=headers)
list_of_workflow = []
for workflow in api_response.get("results"):
if workflow["workflow"]["pluginArtifactId"] == "access-request":
workflow_name = workflow["workflow"]["name"]
list_of_workflow.append(workflow_name)
return list_of_workflow
def get_list_of_workflow(self, auth_token, domain_id, parameters):
headers = {"Accept": "application/json", "Content-Type": "application/json", "X-FM-Auth-Token": auth_token}
workflow_url = WORKFLOW_URL.format(domain_id)
api_response = self._http_request(method="GET", url_suffix=workflow_url, params=parameters, headers=headers)
return api_response
def get_workflow_id_by_workflow_name(self, domain_id, workflow_name, auth_token, parameters):
list_of_workflow = self.get_list_of_workflow(auth_token, domain_id, parameters)
count_of_workflow = list_of_workflow.get("total")
if count_of_workflow > 10:
parameters = {"includeDisabled": False, "pageSize": count_of_workflow}
list_of_workflow = self.get_list_of_workflow(auth_token, domain_id, parameters)
for workflow in list_of_workflow.get("results"):
if (workflow["workflow"]["pluginArtifactId"] == "access-request") and (
workflow["workflow"]["name"] == workflow_name
):
workflow_id = workflow["workflow"]["id"]
return workflow_id
def create_pp_ticket(self, auth_token, payload):
parameters = {"includeDisabled": False, "pageSize": 10}
workflow_id = self.get_workflow_id_by_workflow_name(
payload["domainId"], payload["workflowName"], auth_token, parameters
)
headers = {"Accept": "application/json", "Content-Type": "application/json", "X-FM-Auth-Token": auth_token}
data = get_create_pp_ticket_payload()
data["variables"]["priority"] = payload["priority"]
data["variables"]["dueDate"] = payload["due_date"].replace("T", " ")[:-6]
list_of_requirements = payload["requirements"]
for i in range(len(list_of_requirements)):
req_payload = list_of_requirements[i]
input_data = create_pp_payload
input_data["sources"] = list(req_payload["sources"].split(","))
input_data["destinations"] = list(req_payload["destinations"].split(","))
input_data["services"] = list(req_payload["services"].split(","))
input_data["action"] = req_payload["action"]
data["policyPlanRequirements"].append(dict(input_data))
create_pp_ticket_url = CREATE_PP_TICKET_URL.format(payload["domainId"], workflow_id)
api_response = self._http_request(
method="POST", url_suffix=create_pp_ticket_url, headers=headers, json_data=data
)
return api_response
def validate_pca_change(self, payload_pca, pca_url_suffix, headers):
api_response = self._http_request(
method="POST", url_suffix=pca_url_suffix, json_data=payload_pca, headers=headers, params=None, timeout=40
)
return api_response
def rule_rec_api(self, auth_token, payload):
"""Calling orchestration rulerec api by passing json data as request body, headers, params and domainId
which returns you list of rule recommendations for given input as response"""
parameters = {
"deviceGroupId": payload["deviceGroupId"],
"addressMatchingStrategy": "INTERSECTS",
"modifyBehavior": "MODIFY",
"strategy": None,
}
data = get_rule_rec_request_payload()
data["destinations"] = payload["destinations"]
data["sources"] = payload["sources"]
data["services"] = payload["services"]
data["action"] = payload["action"]
rule_rec_api_response = self._http_request(
method="POST",
url_suffix=RULE_REC_URL.format(payload["domainId"]),
json_data=data,
params=parameters,
headers={
"Content-Type": "application/json",
"Accept": "application/json",
"X-FM-Auth-Token": auth_token,
},
)
return rule_rec_api_response
def rule_rec_output(self, auth_token, payload):
"""Calling orchestration rulerec api by passing json data as request body, headers, params and domainId
which returns you list of rule recommendations for given input as response"""
parameters = {
"deviceId": payload["deviceId"],
"addressMatchingStrategy": "INTERSECTS",
"modifyBehavior": "MODIFY",
"strategy": None,
}
data = get_rule_rec_request_payload()
data["destinations"] = payload["destinations"]
data["sources"] = payload["sources"]
data["services"] = payload["services"]
data["action"] = payload["action"]
rule_rec_api_response = self._http_request(
method="POST",
url_suffix=RULE_REC_URL.format(payload["domainId"]),
json_data=data,
params=parameters,
headers={
"Content-Type": "application/json",
"Accept": "application/json",
"X-FM-Auth-Token": auth_token,
},
)
return rule_rec_api_response
def get_paged_search_secrule(self, auth_token: str, payload: Dict[str, Any]):
"""Calling siql paged search api for searching security rules
using `SIQL` language query
Args:
auth_token (str): authentication token
payload (Dict[str, Any]): payload to be used for making request
"""
parameters: Dict[str, Any] = {
"q": payload["q"],
"pageSize": payload["pageSize"],
"page": payload["page"],
}
secrule_page_search_response = self._http_request(
method="GET",
url_suffix=PAGED_SEARCH_URL,
params=parameters,
headers={
"Content-Type": "application/json",
"Accept": "application/json",
"X-FM-Auth-Token": auth_token,
},
)
return secrule_page_search_response
def test_module(client):
response = client.authenticate_user()
if response.get("authorized"):
return "ok"
else:
return "Error in API call in FireMonSecurityManager Integrations"
def authenticate_command(client):
response = client.authenticate_user()
return CommandResults(
outputs_prefix="FireMonSecurityManager.Authentication",
outputs_key_field="token",
outputs=response.get("token"),
readable_output=tableToMarkdown(
name="FireMon SecurityManager Authentication Token:", t={"token": response.get("token")}, removeNull=True
),
raw_response=response,
)
def create_pp_ticket_command(client, args):
auth_token_cmd_result = authenticate_command(client)
auth_token = auth_token_cmd_result.outputs
payload = dict(
domainId=args.get("domain_id"),
workflowName=args.get("workflow_name"),
requirements=args.get("requirement"),
priority=args.get("priority"),
due_date=args.get("due_date"),
)
response = client.create_pp_ticket(auth_token, payload)
return CommandResults(
outputs_prefix="FireMonSecurityManager.CreatePPTicket",
outputs_key_field="pp_ticket",
outputs=response,
readable_output=tableToMarkdown(name="FireMon SecurityManager Create PP Ticket:", t=response, removeNull=True),
raw_response=response,
)
def pca_command(client, args):
auth_token_cmd_result = authenticate_command(client)
auth_token = auth_token_cmd_result.outputs
payload = dict(
sources=list(args.get("sources").split(",")),
destinations=list(args.get("destinations").split(",")),
services=list(args.get("services").split(",")),
action=args.get("action"),
domainId=args.get("domain_id"),
deviceGroupId=args.get("device_group_id"),
)
payload_rule_rec = client.rule_rec_api(auth_token, payload)
result = {}
list_of_device_changes = payload_rule_rec["deviceChanges"]
if len(list_of_device_changes) == 0:
return CommandResults(
outputs_prefix="FireMonSecurityManager.PCA",
outputs_key_field="pca",
outputs="No matching rule found for this requirement, " "Please go back and update the requirement",
readable_output=tableToMarkdown(
name="FireMon SecurityManager PCA:",
t={"pca": "No matching rule found for this requirement, " "Please go back and update the requirement"},
removeNull=True,
),
raw_response="No matching rule found for this requirement, " "Please go back and update the requirement",
)
for i in range(len(list_of_device_changes)):
filtered_rules = []
list_of_rule_changes = list_of_device_changes[i]["ruleChanges"]
device_id = list_of_device_changes[i]["deviceId"]
headers = {"Content-Type": "application/json", "accept": "application/json", "X-FM-Auth-Token": auth_token}
for j in range(len(list_of_rule_changes)):
if list_of_rule_changes[j]["action"] != "NONE":
filtered_rules.append(list_of_rule_changes[j])
if filtered_rules is None:
return "No Rules Needs to be changed!"
result[i] = client.validate_pca_change(
filtered_rules, PCA_URL_SUFFIX.format(args.get("domain_id"), device_id), headers
)
if "requestId" in result[i]:
del result[i]["requestId"]
if "pcaResult" in result[i]:
if "startDate" in result[i]["pcaResult"]:
del result[i]["pcaResult"]["startDate"]
if "endDate" in result[i]["pcaResult"]:
del result[i]["pcaResult"]["endDate"]
if "affectedRules" in result[i]["pcaResult"]:
del result[i]["pcaResult"]["affectedRules"]
if "device" in result[i]["pcaResult"]:
if "parents" in result[i]["pcaResult"]["device"]:
del result[i]["pcaResult"]["device"]["parents"]
if "children" in result[i]["pcaResult"]["device"]:
del result[i]["pcaResult"]["device"]["children"]
if "gpcDirtyDate" in result[i]["pcaResult"]["device"]:
del result[i]["pcaResult"]["device"]["gpcDirtyDate"]
if "gpcComputeDate" in result[i]["pcaResult"]["device"]:
del result[i]["pcaResult"]["device"]["gpcComputeDate"]
if "gpcImplementDate" in result[i]["pcaResult"]["device"]:
del result[i]["pcaResult"]["device"]["gpcImplementDate"]
if "state" in result[i]["pcaResult"]["device"]:
del result[i]["pcaResult"]["device"]["state"]
if "managedType" in result[i]["pcaResult"]["device"]:
del result[i]["pcaResult"]["device"]["managedType"]
if "gpcStatus" in result[i]["pcaResult"]["device"]:
del result[i]["pcaResult"]["device"]["gpcStatus"]
if "updateMemberRuleDoc" in result[i]["pcaResult"]["device"]:
del result[i]["pcaResult"]["device"]["updateMemberRuleDoc"]
if "devicePack" in result[i]["pcaResult"]["device"]:
del result[i]["pcaResult"]["device"]["devicePack"]
return CommandResults(
outputs_prefix="FireMonSecurityManager.PCA",
outputs_key_field="pca",
outputs=result,
readable_output=tableToMarkdown(
name="FireMon SecurityManager PCA:",
t=result[0]["pcaResult"]["preChangeAssessmentControls"],
removeNull=True,
),
raw_response=list_of_device_changes,
)
def get_paged_search_secrule(client: Client, auth_token: str, payload: Dict[str, Any]) -> List:
"""Make subsequent requests using client and other arguments
Args:
client (Client): `Client` class object
auth_token (str): authentication token to use
payload (Dict[str, Any]): parameter payload to use
Returns:
(List[Dict[str, Any]]): results list
"""
result = list()
response = client.get_paged_search_secrule(auth_token, payload)
total_pages = response.get("total", 0) // payload.get("pageSize")
result.extend(response.get("results", list()))
while payload.get("page") < total_pages: # NOTE: Check if we can implement async here
payload["page"] += 1
response = client.get_paged_search_secrule(auth_token, payload)
result.extend(response.get("results", list()))
return result
def secmgr_secrule_search_command(client: Client, args: Dict[str, Any]):
"""Searches for security rules using the SIQL language query
Args:
client (Client): `Client` class object
args (Dict[str, Any]): demisto arguments passed
"""
auth_token_cmd_result = authenticate_command(client)
auth_token = auth_token_cmd_result.outputs
# page size can't be less than 1
page_size = 1 if int(args.get("pageSize", 10)) < 1 else int(args.get("pageSize", 10))
payload = dict(
q=str(args.get("q")),
pageSize=page_size,
page=int(args.get("page", 0)),
)
results = get_paged_search_secrule(client, auth_token, payload)
return CommandResults(
outputs_prefix="FireMonSecurityManager.SIQL",
outputs_key_field="matchId",
outputs=results,
readable_output=tableToMarkdown(
name="FireMon SecurityManager SIQL:",
t=results,
removeNull=True,
headerTransform=pascalToSpace,
),
raw_response=results,
)
def main():
username = demisto.params().get("credentials").get("identifier")
password = demisto.params().get("credentials").get("password")
verify_certificate = not demisto.params().get("insecure", False)
base_url = urljoin(demisto.params()["url"])
proxy = demisto.params().get("proxy", False)
try:
client = Client(
base_url=base_url, verify=verify_certificate, proxy=proxy, username=username, password=password
)
if demisto.command() == "test-module":
result = test_module(client)
demisto.results(result)
elif demisto.command() == "firemon-user-authentication":
return_results(authenticate_command(client))
elif demisto.command() == "firemon-create-pp-ticket":
return_results(create_pp_ticket_command(client, demisto.args()))
elif demisto.command() == "firemon-pca":
return_results(pca_command(client, demisto.args()))
elif demisto.command() == "firemon-secmgr-secrule-search":
return_results(secmgr_secrule_search_command(client, demisto.args()))
except Exception as e:
return_error(f"Failed to execute {demisto.command()} command. Error: {str(e)}")
if __name__ in ("__main__", "__builtin__", "builtins"):
main()
| mit | b1184062b5e6efb36a3cbca76e4347e5 | 39.674208 | 119 | 0.595339 | 3.995999 | false | false | false | false |
demisto/content | Packs/qualys/Scripts/QualysCreateIncidentFromReport/QualysCreateIncidentFromReport_test.py | 2 | 1514 | import demistomock as demisto
from QualysCreateIncidentFromReport import main, get_asset_id_for_ip
def test_main(mocker):
"""
Tests the full flow of the script
Given: A valid report and successful responses
When: Running the QualysCreateIncidentReport script
Then: Return a successful response
"""
with open('test_data/qualys_host_list_rawresponse.xml') as f:
raw_response = f.read()
mocker.patch.object(demisto, 'args', return_value=dict())
mocker.patch.object(demisto, 'getFilePath',
return_value={'id': id, 'path': 'test_data/test_report.xml', 'name': 'test_report.xml'})
mocker.patch.object(demisto, 'executeCommand',
side_effect=[[{'Contents': raw_response, 'Type': 'notes'}],
[{"Contents": {"total": 1}, 'Type': 'notes'}]])
demisto_results = mocker.spy(demisto, 'results')
main()
demisto_results.assert_called_once_with("Done.")
def test_get_asset_id_for_ip(mocker):
"""
Tests parsing the data returned by qualys-host-list
Given: A valid response from qualys
When: Parsing for the incidentid
Then: Return a valid id
"""
with open('test_data/qualys_host_list_rawresponse.xml') as f:
raw_response = f.read()
mocker.patch.object(demisto, 'executeCommand', return_value=[{'Contents': raw_response, 'Type': 'note'}])
assert '69291564' == get_asset_id_for_ip('1.1.1.1')
| mit | 886a48666321dd9bda2f4b505fcda0d6 | 37.820513 | 112 | 0.61823 | 3.692683 | false | true | false | false |
demisto/content | Tests/sdknightly/create_entities_for_nightly_sdk.py | 2 | 5120 | import argparse
import json
import os
import shutil
import subprocess
from pathlib import Path
from typing import Tuple
def run_command(cmd: str) -> Tuple[str, str]:
return subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf-8').communicate()
def create_incident_field(path: Path, incident_to_associate: str) -> str:
"""
Creates an incident field
Args:
path: A path of the pack
incident_to_associate: an incident type to associate the incident field
Returns:
The path to the incident field
"""
hello_field_path = 'Packs/HelloWorld/IncidentFields/incidentfield-Hello_World_Status.json'
with open(hello_field_path) as stream:
field = json.load(stream)
name = 'Hello World IncidentField Test'
cliname = name.lower().replace(' ', '')
field.update({
'name': name,
'cliName': cliname,
'id': f'incident_{cliname}',
'associatedTypes': [incident_to_associate]
})
dest_incident = path / 'IncidentFields'
if not os.path.isdir(dest_incident):
os.mkdir(dest_incident)
field_path = dest_incident / f'incidentfield-{name.replace(" ", "_")}.json'
with open(field_path, 'w+') as stream:
json.dump(field, stream, indent=4)
return str(field_path)
def create_layout(path: Path, layout_name: str) -> str:
"""
Creates a layout field
Args:
path: A path of the pack
layout_name: a layout name to create
Returns:
The path to the layout
"""
layout_path_sample = Path('Packs/HelloWorld/Layouts/layoutscontainer-Hello_World_Alert.json')
with open(layout_path_sample) as stream:
layout = json.load(stream)
dest_layout = path / 'Layouts'
if not os.path.isdir(dest_layout):
os.mkdir(dest_layout)
layout.update({
'id': layout_name,
'name': layout_name
})
layout_path = dest_layout / f'layoutscontainer-{layout_name.replace(" ", "_")}.json'
with open(layout_path, 'w+') as stream:
json.dump(layout, stream, indent=4)
return str(layout_path)
def create_incident_type(path: Path, layout_name: str) -> str:
"""
Creates an incident type
Args:
path: A path of the pack
layout_name: a layout to associate the incident field
Returns:
The path to the incident type
"""
incident_type_path_sample = Path('Packs/HelloWorld/IncidentTypes/incidenttype-Hello_World_Alert.json')
with open(incident_type_path_sample) as stream:
incident_type = json.load(stream)
name = 'Hello World Alert Test'
incident_type.update({
'name': name,
'id': name,
'layout': layout_name
})
dest_incident_path = path / 'IncidentTypes'
if not os.path.isdir(dest_incident_path):
os.mkdir(dest_incident_path)
incident_path = dest_incident_path / f'incidenttype-{name.replace(" ", "_")}.json'
with open(incident_path, 'w+') as stream:
json.dump(incident_type, stream, indent=4)
return str(incident_path)
def create_mapper(path: Path) -> str:
"""
Creates a mapper
Args:
path: A path of the pack
Returns:
The path to the mapper
"""
mapper_path_sample = Path('Packs/HelloWorld/Classifiers/classifier-mapper-incoming-HelloWorld.json')
with open(mapper_path_sample) as stream:
mapper = json.load(stream)
name = 'Hello World Test - Incoming Mapper'
_id = 'HelloWorld-mapper Test'
mapper.update({
'name': name,
'id': _id
})
dest_mapper_path = path / 'Classifiers'
if not os.path.isdir(dest_mapper_path):
os.mkdir(dest_mapper_path)
mapper_path = dest_mapper_path / 'classifier-mapper-incoming-HelloWorldTest.json'
with open(mapper_path, 'w+') as stream:
json.dump(mapper, stream, indent=4)
return str(mapper_path)
def main():
parser = argparse.ArgumentParser(description="Creates incident field, incident type, mapper and a "
"layout in a given pack.")
parser.add_argument('pack_name')
parser.add_argument('--artifacts-folder', required=False)
args = parser.parse_args()
pack_path = Path('Packs') / args.pack_name
layout_name = 'Hello World Test Layout'
uploaded_entities = [
create_layout(pack_path, layout_name),
create_incident_field(pack_path, 'Hello World Alert Test'),
create_incident_type(pack_path, layout_name),
create_mapper(pack_path)
]
print("Created entities:")
print("\t" + "\n\t".join(uploaded_entities))
if args.artifacts_folder:
entities_folder = Path(args.artifacts_folder) / 'UploadedEntities'
if not os.path.isdir(entities_folder):
os.mkdir(entities_folder)
print(f"Storing files to {entities_folder}")
for file in uploaded_entities:
file_name = file.split('/')[-1]
shutil.copyfile(file, entities_folder / file_name)
print(f"file: {file_name} stored.")
if __name__ in '__main__':
main()
| mit | e64d42ff636457b9dd606b479bf3e445 | 30.411043 | 120 | 0.631641 | 3.664996 | false | false | false | false |
demisto/content | Packs/EmailCommunication/Scripts/PreprocessEmail/PreprocessEmail.py | 2 | 19552 | import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
import json
import re
import random
from datetime import datetime as dt
ERROR_TEMPLATE = 'ERROR: PreprocessEmail - {function_name}: {reason}'
# List of strings that mail clients use to separate new message content from previous thread messages when replying
QUOTE_MARKERS = ['<div class="gmail_quote">',
'<hr tabindex="-1" style="display:inline-block; width:98%"><div id="divRplyFwdMsg"',
'<hr style="display:inline-block;width:98%" tabindex="-1"><div id="divRplyFwdMsg"']
def get_utc_now():
""" A wrapper function for datetime.utcnow
Helps handle tests
Returns:
datetime: current UTC time
"""
return dt.utcnow()
def get_query_window():
"""
Check if the user defined the list `XSOAR - Email Communication Days To Query` to give a custom value for the time
to query back for related incidents. If yes, use this value, else use the default value of 60 days.
"""
user_defined_time = demisto.executeCommand('getList', {'listName': 'XSOAR - Email Communication Days To Query'})
if is_error(user_defined_time):
demisto.debug('Error occurred while trying to load the `XSOAR - Email Communication Days To Query` list. Using'
' the default query time - 60 days')
return '60 days'
try:
query_time = user_defined_time[0].get('Contents')
return f'{int(query_time)} days'
except ValueError:
demisto.error('Invalid input for number of days to query in the `XSOAR - Email Communication Days To Query` '
'list. Input should be a number only, representing the number of days to query back.\nUsing the '
'default query time - 60 days')
return '60 days'
def create_email_html(email_html='', entry_id_list=None):
"""Modify the email's html body to use entry IDs instead of CIDs and remove the original message body if exists.
Args:
email_html (str): The attachments of the email.
entry_id_list (list): The files entry ids list.
Returns:
str. Email Html.
"""
# Removing the conversation's history
for marker in QUOTE_MARKERS:
index = email_html.find(marker)
if index != -1:
email_html = f'{email_html[:index]}</body></html>'
# Replacing the images' sources
for image_name, image_entry_id in entry_id_list:
if re.search(f'src="[^>]+"(?=[^>]+alt="{image_name}")', email_html):
email_html = re.sub(f'src="[^>]+"(?=[^>]+alt="{image_name}")',
f'src=entry/download/{image_entry_id}',
email_html
)
# Handling inline attachments from Outlook mailboxes
# Note: when tested, entry id list and inline attachments were in the same order, so there was no need in
# special validation that the right src was being replaced.
else:
email_html = re.sub('(src="cid(.*?"))',
f'src=entry/download/{image_entry_id}',
email_html, count=1,
)
return email_html
def get_entry_id_list(attachments, files):
"""Get the entry ids for the email attachments from the email's related incident's files entry.
Args:
attachments (list): The attachments of the email.
files (list): The uploaded files in the context of the related incident.
Returns:
list of tuples. (attachment_name, file_entry_id).
"""
if not (attachments and files):
return []
entry_id_list = []
files = [files] if not isinstance(files, list) else files
for attachment in attachments:
attachment_name = attachment.get('name', '')
for file in files:
if attachment_name == file.get('Name'):
entry_id_list.append((attachment_name, file.get('EntryID')))
return entry_id_list
def add_entries(email_reply, email_related_incident):
"""Add the entries to the related incident
Args:
email_reply: The email reply.
email_related_incident: The related incident.
"""
entries_str = json.dumps(
[{"Type": 1, "ContentsFormat": 'html', "Contents": email_reply, "tags": ['email-thread']}])
res = demisto.executeCommand("addEntries", {"entries": entries_str, 'id': email_related_incident})
if is_error(res):
demisto.error(ERROR_TEMPLATE.format('addEntries', res['Contents']))
raise DemistoException(ERROR_TEMPLATE.format('addEntries', res['Contents']))
def set_email_reply(email_from, email_to, email_cc, html_body, attachments):
"""Set the email reply from the given details.
Args:
email_from: The email author mail.
email_to: The email recipients.
email_cc: The email cc.
html_body: The email body.
attachments: The email attachments.
Returns:
str. Email reply.
"""
email_reply = f"""
From: *{email_from}*
To: *{email_to}*
CC: *{email_cc}*
"""
if attachments:
attachment_names = [attachment.get('name', '') for attachment in attachments]
email_reply += f'Attachments: {attachment_names}\n\n'
email_reply += f'{html_body}\n'
return email_reply
def get_incident_by_query(query):
"""
Get a query and return all incidents details matching the given query.
Args:
query: Query for the incidents that should be returned.
Returns:
dict. The details of all incidents matching the query.
"""
# In order to avoid performance issues, limit the number of days to query back for modified incidents. By default
# the limit is 60 days and can be modified by the user by adding a list called
# `XSOAR - Email Communication Days To Query` (see README for more information).
query_time = get_query_window()
query_from_date = str(parse_date_range(query_time)[0])
res = demisto.executeCommand("GetIncidentsByQuery", {"query": query, "fromDate": query_from_date,
"timeField": "modified",
"populateFields": "id,status,type,emailsubject"})[0]
if is_error(res):
return_results(ERROR_TEMPLATE.format('GetIncidentsByQuery', res['Contents']))
raise DemistoException(ERROR_TEMPLATE.format('GetIncidentsByQuery', res['Contents']))
incidents_details = json.loads(res['Contents'])
return incidents_details
def check_incident_status(incident_details, email_related_incident):
"""Get the incident details and checks the incident status.
Args:
incident_details: The incident details.
email_related_incident: The related incident.
"""
status = incident_details.get('status')
if status == 2: # closed incident status
res = demisto.executeCommand("reopenInvestigation", {"id": email_related_incident})
if is_error(res):
demisto.error(ERROR_TEMPLATE.format(f'Reopen incident {email_related_incident}', res['Contents']))
raise DemistoException(ERROR_TEMPLATE.format(f'Reopen incident {email_related_incident}', res['Contents']))
def get_attachments_using_instance(email_related_incident, labels):
"""Use the instance from which the email was received to fetch the attachments.
Only supported with: EWS V2, Gmail
Args:
email_related_incident (str): ID of the incident to attach the files to.
labels (Dict): Incident's labels to fetch the relevant data from.
"""
message_id = ''
instance_name = ''
integration_name = ''
for label in labels:
if label.get('type') == 'Email/ID':
message_id = label.get('value')
elif label.get('type') == 'Instance':
instance_name = label.get('value')
elif label.get('type') == 'Brand':
integration_name = label.get('value')
if integration_name in ['EWS v2', 'EWSO365']:
demisto.executeCommand("executeCommandAt",
{'command': 'ews-get-attachment', 'incidents': email_related_incident,
'arguments': {'item-id': str(message_id), 'using': instance_name}})
elif integration_name in ['Gmail', 'Gmail Single User']:
demisto.executeCommand("executeCommandAt",
{'command': 'gmail-get-attachments', 'incidents': email_related_incident,
'arguments': {'user-id': 'me', 'message-id': str(message_id), 'using': instance_name}})
# Note: attachments are downloaded by default when emails are fetched using the graph integrations,
# so this method isn't needed for them.
else:
demisto.debug('Attachments could only be retrieved from EWS v2 or Gmail')
def get_incident_related_files(incident_id):
"""Get the email reply attachments after they were uploaded to the server and saved
to context of the email reply related incident.
Args:
incident_id (str): The ID of the incident whose context we'd like to get.
"""
try:
res = demisto.executeCommand("getContext", {'id': incident_id})
return dict_safe_get(res[0], ['Contents', 'context', 'File'], default_return_value=[])
except Exception:
return []
def update_latest_message_field(incident_id, item_id):
"""Update the 'emaillatestmessage' field on the email related incident with the ID of the latest email reply.
Args:
incident_id (str): The ID of the incident whose field we'd like to set.
item_id (str): The email reply ID.
"""
try:
demisto.executeCommand('setIncident', {'id': incident_id, 'customFields': {'emaillatestmessage': item_id}})
except Exception:
demisto.debug(f'SetIncident Failed.'
f'"emaillatestmessage" field was not updated with {item_id} value for incident: {incident_id}')
def get_email_related_incident_id(email_related_incident_code, email_original_subject):
"""
Get the email generated code and the original text subject of an email and return the incident matching to the
email code and original subject.
"""
query = f'(emailgeneratedcode:{email_related_incident_code}) or (emailgeneratedcodes:*{email_related_incident_code}*)'
incidents_details = get_incident_by_query(query)
for incident in incidents_details:
if email_original_subject in incident.get('emailsubject', ''):
return incident.get('id')
else:
# If 'emailsubject' doesn't match, check 'EmailThreads' context entries
try:
incident_context = demisto.executeCommand("getContext", {"id": incident.get('id')})
incident_email_threads = dict_safe_get(incident_context[0], ['Contents', 'context', 'EmailThreads'])
except Exception as e:
demisto.error(f'Exception while retrieving thread context: {e}')
if incident_email_threads:
if isinstance(incident_email_threads, dict):
incident_email_threads = [incident_email_threads]
search_result = next((i for i, item in enumerate(incident_email_threads) if
email_original_subject in item["EmailSubject"]), None)
if search_result is not None:
return incident.get('id')
def get_unique_code():
"""
Create an 8-digit unique random code that should be used to identify new created incidents.
Args: None
Returns:
8-digit code returned as a string
"""
code_is_unique = False
while not code_is_unique:
code = f'{random.randrange(1, 10 ** 8):08}'
query = f'(emailgeneratedcode:*{code}*) or (emailgeneratedcodes:*{code}*)'
incidents_details = get_incident_by_query(query)
if len(incidents_details) == 0:
code_is_unique = True
return code
def create_thread_context(email_code, email_cc, email_bcc, email_text, email_from, email_html,
email_latest_message, email_received, email_replyto, email_subject, email_to,
incident_id, attachments):
"""Creates a new context entry to store the email in the incident context. Checks current threads
stored on the incident to get the thread number associated with this new message, if present.
Args:
email_code: The random code that was generated when the email was received
email_cc: The email CC
email_bcc: The email BCC
email_text: The email body plaintext
email_from: The email sender address
email_html: The email body HTML
email_latest_message: The email message ID
email_received: Mailbox that received the email at XSOAR is fetching from
email_replyto: The replyTo address from the email
email_subject: The email subject
email_to: The address the email was delivered to
incident_id: ID of the related incident
attachments: File attachments from the email
"""
thread_number = str()
thread_found = False
try:
# Get current email threads from context if any are present
incident_context = demisto.executeCommand("getContext", {'id': incident_id})
incident_email_threads = dict_safe_get(incident_context[0], ['Contents', 'context', 'EmailThreads'])
# Check if there is already a thread for this email code
if incident_email_threads:
if isinstance(incident_email_threads, dict):
incident_email_threads = [incident_email_threads]
search_result = next((i for i, item in enumerate(incident_email_threads) if
item["EmailCommsThreadId"] == email_code), None)
if search_result is not None:
thread_number = incident_email_threads[search_result]['EmailCommsThreadNumber']
thread_found = True
if not thread_found:
# If no related thread is found, determine the highest thread number
max_thread_number = 0
for message in incident_email_threads:
if int(message['EmailCommsThreadNumber']) > max_thread_number:
max_thread_number = int(message['EmailCommsThreadNumber'])
thread_number = str(max_thread_number + 1)
else:
thread_number = '0'
if len(thread_number) == 0:
demisto.error('Failed to identify a Thread Number to set. Email not appended to incident context')
if attachments:
attachment_names = [attachment.get('name', '') for attachment in attachments]
else:
attachment_names = ["None"]
email_message = {
'EmailCommsThreadId': email_code,
'EmailCommsThreadNumber': thread_number,
'EmailCC': email_cc,
'EmailBCC': email_bcc,
'EmailBody': email_text,
'EmailFrom': email_from,
'EmailHTML': email_html,
'MessageID': email_latest_message,
'EmailReceived': email_received,
'EmailReplyTo': email_replyto,
'EmailSubject': email_subject,
'EmailTo': email_to,
'EmailAttachments': f'{attachment_names}',
'MessageDirection': 'inbound',
'MessageTime': get_utc_now().strftime("%Y-%m-%dT%H:%M:%SUTC")
}
# Add email message to context key
try:
demisto.executeCommand('executeCommandAt', {
'command': 'Set', 'incidents': incident_id, 'arguments':
{'key': 'EmailThreads', 'value': email_message, 'append': 'true'}})
except Exception as e:
demisto.error(f"Failed to append new email to context of incident {incident_id}. Reason: {e}")
except Exception as e:
demisto.error(f"Unable to add new email message to Incident {incident_id}. Reason: \n {e}")
def main():
incident = demisto.incident()
custom_fields = incident.get('CustomFields')
email_from = custom_fields.get('emailfrom', '')
email_cc = custom_fields.get('emailcc', '')
email_bcc = custom_fields.get('emailbcc', '')
email_to = custom_fields.get('emailto', '')
email_subject = custom_fields.get('emailsubject', '')
email_text = custom_fields.get('emailbody', '')
email_html = custom_fields.get('emailhtml', '')
email_received = custom_fields.get('emailreceived', '')
email_replyto = custom_fields.get('emailreplyto', '')
attachments = incident.get('attachment', [])
email_latest_message = custom_fields.get('emaillatestmessage', '')
try:
email_related_incident_code = email_subject.split('<')[1].split('>')[0]
email_original_subject = email_subject.split('<')[-1].split('>')[1].strip()
email_related_incident = get_email_related_incident_id(email_related_incident_code, email_original_subject)
update_latest_message_field(email_related_incident, email_latest_message)
query = f"id:{email_related_incident}"
incident_details = get_incident_by_query(query)[0]
check_incident_status(incident_details, email_related_incident)
get_attachments_using_instance(email_related_incident, incident.get('labels'))
# Adding a 5 seconds sleep in order to wait for all the attachments to get uploaded to the server.
time.sleep(5)
files = get_incident_related_files(email_related_incident)
entry_id_list = get_entry_id_list(attachments, files)
html_body = create_email_html(email_html, entry_id_list)
if incident_details['type'] == 'Email Communication':
# Add new email message as Entry if type is 'Email Communication'
demisto.debug(
f"Incoming email related to Email Communication Incident {email_related_incident}. Appending message there.")
email_reply = set_email_reply(email_from, email_to, email_cc, html_body, attachments)
add_entries(email_reply, email_related_incident)
else:
# For all other incident types, add message details as context entry
demisto.debug(
f"Incoming email related to Incident {email_related_incident}. Appending message there.")
create_thread_context(email_related_incident_code, email_cc, email_bcc, email_text, email_from, html_body,
email_latest_message, email_received, email_replyto, email_subject, email_to,
email_related_incident, attachments)
# Return False - tell pre-processing to not create new incident
return_results(False)
except (IndexError, ValueError, DemistoException) as e:
demisto.executeCommand('setIncident', {'id': incident.get('id'),
'customFields': {'emailgeneratedcode': get_unique_code()}})
# Return True - tell pre-processing to create new incident
return_results(True)
if type(e).__name__ == 'IndexError':
demisto.debug('No related incident was found. A new incident was created.')
else:
demisto.debug(f"A new incident was created. Reason: \n {e}")
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | a33931faa330f5ab07597cf2614b884e | 42.838565 | 125 | 0.62275 | 4.103253 | false | false | false | false |
demisto/content | Packs/PassiveTotal/Scripts/RiskIQPassiveTotalPDNSWidgetScript/RiskIQPassiveTotalPDNSWidgetScript.py | 2 | 1554 | from CommonServerPython import *
import traceback
from typing import Dict, Union, Any
def set_arguments_for_widget_view(indicator_data: Dict[str, Any]) -> Union[Dict[str, str], str]:
"""
Prepare argument for commands or message to set custom layout of indicator
"""
indicator_type = indicator_data.get('indicator_type', '').lower()
if indicator_type == 'riskiqasset':
riskiq_asset_type = indicator_data.get('CustomFields', {}).get('riskiqassettype', '')
if riskiq_asset_type == '':
return 'Please provide value in the "RiskIQAsset Type" field to fetch detailed information of the asset.'
if riskiq_asset_type == 'Domain' or riskiq_asset_type == 'IP Address':
return {
'query': indicator_data.get('value', '')
}
else:
return 'No PDNS Record(s) were found for the given argument(s).'
else:
return {
'query': indicator_data.get('value', '')
}
def main() -> None:
try:
arguments = set_arguments_for_widget_view(demisto.args().get('indicator'))
if isinstance(arguments, str):
demisto.results(arguments)
else:
demisto.results(demisto.executeCommand('pt-get-pdns-details', arguments))
except Exception as e:
demisto.error(traceback.format_exc()) # print the traceback
return_error(f'Could not load widget:\n{e}')
# python2 uses __builtin__ python3 uses builtins
if __name__ == '__builtin__' or __name__ == 'builtins':
main()
| mit | 29208e0cbda6134ef2212da78a32ff38 | 36 | 117 | 0.614543 | 3.964286 | false | false | false | false |
demisto/content | Packs/SignalSciences/Integrations/SignalSciences/SignalSciences.py | 2 | 56744 | import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
''' IMPORTS '''
import json
import requests
''' GLOBAL VARS '''
''' GLOBAL VARS '''
USE_SSL = not demisto.params().get('insecure', False)
handle_proxy()
EMAIL = demisto.params()['Email']
TOKEN = demisto.params()['Token']
CORPNAME = demisto.params()['corpName']
FETCH_INTERVAL = demisto.params()['fetch_interval']
SITES_TO_FETCH = demisto.params().get('sites_to_fetch', None)
SERVER_URL = 'https://dashboard.signalsciences.net/api/v0/'
'''SUFFIX ENDPOINTS'''
GET_SITES_SUFFIX = 'corps/{0}/sites'
WHITELIST_SUFFIX = 'corps/{0}/sites/{1}/whitelist'
BLACKLIST_SUFFIX = 'corps/{0}/sites/{1}/blacklist'
DELETE_WHITELIST_IP_SUFFIX = 'corps/{0}/sites/{1}/whitelist/{2}'
DELETE_BLACKLIST_IP_SUFFIX = 'corps/{0}/sites/{1}/blacklist/{2}'
SITE_CREATE_LIST_SUFFIX = 'corps/{0}/sites/{1}/lists'
SITE_ACCESS_LIST_SUFFIX = 'corps/{0}/sites/{1}/lists/{2}'
SITE_CREATE_ALERT_SUFFIX = 'corps/{0}/sites/{1}/alerts'
SITE_ACCESS_ALERT_SUFFIX = 'corps/{0}/sites/{1}/alerts/{2}'
CREATE_CORP_LIST_SUFFIX = 'corps/{0}/lists'
ACCESS_CORP_LIST_SUFFIX = 'corps/{0}/lists/{1}'
GET_EVENTS_SUFFIX = '/corps/{0}/sites/{1}/events'
ACCESS_EVENT_SUFFIX = '/corps/{0}/sites/{1}/events/{2}'
EXPIRE_EVENT_SUFFIX = '/corps/{0}/sites/{1}/events/{2}/expire'
GET_REQUESTS_SUFFIX = '/corps/{0}/sites/{1}/requests'
ACCESS_REQUEST_SUFFIX = '/corps/{0}/sites/{1}/requests/{2}'
'''TABLE TITLES'''
WHITELIST_TITLE = 'Signal Sciences - Whitelist'
BLACKLIST_TITLE = 'Signal Sciences - Blacklist'
SITES_LIST_TITLE = "Sites list"
ADD_IP_TO_WHITELIST_TITLE = 'Signal Sciences - Adding an IP to Whitelist'
ADD_IP_TO_BLACKLIST_TITLE = 'Signal Sciences - Adding an IP to Blacklist'
ADD_ALERT_TITLE = 'Signal Sciences - Adding a new custom alert'
UPDATE_LIST_TITLE = 'Signal Sciences - Updating a list'
ALERT_LIST_TITLE = 'Signal Sciences - Alert list'
LIST_OF_SITE_LISTS_TITLE = 'Signal Sciences - list of site lists'
LIST_OF_CORP_LISTS_TITLE = 'Signal Sciences - list of corp lists'
LIST_OF_EVENTS_TITLE = 'Signal Sciences - list of events'
LIST_OF_REQUESTS_TITLE = 'Signal Sciences - list of requests'
CREATE_SITE_LIST_TITLE = "Signal Sciences - creating a new site list \n\n List {0} has been successfully created"
CREATE_CORP_LIST_TITLE = "Signal Sciences - creating a new corp list \n\n List {0} has been successfully created"
DELETE_CORP_LIST_TITLE = "### Signal Sciences - deleting corp list \n\n List {0} has been successfully removed"
EXPIRE_EVENT_TITLE = "### Signal Sciences - expiring event \n\n Event {0} has been successfully expired"
WHITELIST_REMOVE_IP_TITLE = '### Signal Sciences - Removing an IP from Whitelist \n\n ' \
'The IP {0} has been successfully removed from Whitelist.'
DELETE_SITE_LIST_TITLE = "### Signal Sciences - deleting site list \n\n The list has been succesfully removed"
BLACKLIST_REMOVE_IP_TITLE = '### Signal Sciences - Removing an IP from Blacklist \n\n ' \
'The IP {0} has been successfully removed from Blacklist.'
IP_ADDED_TO_WHITELIST_TITLE = "The IP {0} has been successfully added to whitelist."
IP_ADDED_TO_BLACKLIST_TITLE = "The IP {0} has been successfully added to blacklist."
'''TABLE HEADERS'''
ADD_IP_HEADERS = ['Source', 'Note', 'Expiration date']
WHITELIST_OR_BLACKLIST_HEADERS = ['ID', 'Source', 'Expiry Date', 'Note', 'Created Date', 'Created By']
LIST_HEADERS = ['Name', 'ID', 'Type', 'Entries', 'Description', 'Created By', 'Created Date', 'Updated Date']
GET_SITE_HEADERS = ['Name', 'Created Date']
EVENT_HEADERS = ['ID', 'Timestamp', 'Source', 'Remote Country Code', 'Action', 'Reasons', 'Remote Hostname',
'User Agents', 'Request Count', 'Tag Count', 'Window', 'Date Expires', 'Expired By']
REQUEST_HEADER = ['ID', 'Timestamp', 'Remote Country Code', 'Remote Hostname', 'Remote IP', 'User Agent',
'Method', 'Server Name', 'Protocol', 'Path', 'URI', 'Response Code', 'Response Size',
'Response Millis', 'Agent Response Code', 'Tags']
ALERT_HEADERS = ['ID', 'Site ID', 'Created Date', 'Tag Name', 'Action', 'Long Name', 'Interval (In Minutes)',
'Threshold', 'Block Duration Seconds', 'Skip Notifications', 'Enabled']
'''List Types dict'''
LEGAL_SIGSCI_LIST_TYPES = {
'ip',
'country',
'string',
'wildcard'
}
''' HELPER FUNCTIONS '''
def camel_case_to_spaces(string_in_camel_case):
"""Given a string in camelcase, will turn it into spaces
Args:
string_in_camel_case(String): the string in camel case
Returns:
A new string, separated by spaces and every word starts with a capital letter
"""
string_with_underscores = camel_case_to_underscore(string_in_camel_case)
new_string_with_spaces = string_with_underscores.replace('_', ' ')
return new_string_with_spaces.title()
def dict_keys_from_camelcase_to_spaces(dict_with_camelcase_keys):
"""Given a dict with keys in camelcase, returns a copy of it with keys in spaces (helloWorld becomes Hello World)
Args:
dict_with_camelcase_keys(Dictionary): the original dictionary, with keys in camelcase
Returns:
A new dictionary, with keys separated by spaces
"""
dict_with_spaces_in_keys = {}
for key in dict_with_camelcase_keys:
key_with_spaces = camel_case_to_spaces(key)
dict_with_spaces_in_keys[key_with_spaces] = dict_with_camelcase_keys[key]
return dict_with_spaces_in_keys
def return_list_of_dicts_with_spaces(list_of_camelcase_dicts):
"""Given a list of dicts, iterates over it and for each dict makes all the keys with spaces instead of camelcase
Args:
list_of_camelcase_dicts(List): array of dictionaries
Returns:
A new array of dictionaries, with keys including spaces instead of camelcase
"""
dicts_with_spaces = []
for dict_camelcase in list_of_camelcase_dicts:
dict_with_spaces = dict_keys_from_camelcase_to_spaces(dict_camelcase)
dicts_with_spaces.append(dict_with_spaces)
return dicts_with_spaces
def has_api_call_failed(res):
"""
Note: In SigSci, if an API call fails it returns a json with only 'message' in it.
"""
if 'message' in res:
return True
return False
def is_error_status(status):
if int(status) >= 400:
return True
return False
def return_error_message(results_json):
error_message = results_json.get("message", None)
if error_message is None:
return_error("Error: An error occured")
return_error(f"Error: {error_message}")
def http_request(method, url, params_dict=None, data=None, use_format_instead_of_raw=False):
LOG(f'running {method} request with url={url}\nparams={json.dumps(params_dict)}')
headers = {
'Content-Type': 'application/json',
'x-api-user': EMAIL,
'x-api-token': TOKEN
}
try:
# Some commands in Signal Sciences require sending the data in raw, and some in format
# To send in format, we use the 'data' argument in requests. for raw, we use the 'json' argument.
if use_format_instead_of_raw:
res = requests.request(method,
url,
verify=USE_SSL,
params=params_dict,
headers=headers,
data=json.dumps(data))
else:
res = requests.request(method,
url,
verify=USE_SSL,
params=params_dict,
headers=headers,
json=data)
if is_error_status(res.status_code):
return_error_message(res.json())
# references to delete from whitelist/blacklist only
if 'whitelist/' in url or 'blacklist/' in url:
return {}
if res.status_code == 204:
return {}
res_json = res.json()
if has_api_call_failed(res_json):
return {}
return res_json
except Exception as e:
LOG(e)
raise (e)
def is_legal_list_type(list_type):
return list_type.lower() in LEGAL_SIGSCI_LIST_TYPES
def represents_int(string_var):
if '.' in string_var:
return False
if string_var[0] in ('-', '+'):
return string_var[1:].isdigit()
return string_var.isdigit()
def is_legal_interval_for_alert(interval):
"""
Note: legal values for the interval on an alert are only 1, 10 or 60.
This function verifies the value given is compatible with this demand.
"""
if not represents_int(interval):
return False
interval_int = int(interval)
if not (interval_int == 1 or interval_int == 10 or interval_int == 60):
return False
return True
def validate_list_description_length(description):
if description is not None:
if len(description) > 140:
return_error("Error: Description given is too long. Description must be 140 characters or shorter")
def validate_update_list_args(method, description):
if not (method == "Add" or method == "Remove"):
return_error("Error: Method given is illegal. Method must be 'Add' or 'Remove'")
validate_list_description_length(description)
def validate_create_list_args(list_type, description):
if not is_legal_list_type(list_type):
return_error(f"Error: {list_type} is not a legal type for a list. Legal types are IP, String, "
"Country or Wildcard")
validate_list_description_length(description)
def validate_alert_args(siteName, long_name, tag_name, interval, threshold, enabled, action):
if not represents_int(threshold):
return_error(f"Error: {threshold} is not a valid threshold value. Threshold must be an integer")
if not is_legal_interval_for_alert(interval):
return_error(f"Error: {interval} is not a valid interval value. Interval value must be 1, 10 or 60")
if len(long_name) < 3 or len(long_name) > 25:
return_error("Error: Illegal value for long_name argument - long_name must be between 3 and 25 characters long")
if not (enabled.lower() == 'true' or enabled.lower() == 'false'):
return_error("Error: Illegal value for 'enabled' argument - value must be 'True' or 'False'")
if not (action == 'info' or action == 'flagged'):
return_error("Error: Illegal value for 'action' argument - value must be 'info' or 'flagged'")
def validate_get_events_args(from_time, until_time, sort, limit, page, action, ip, status):
if from_time is not None and not represents_int(str(from_time)):
return_error("Error: from_time must be an integer.")
if until_time is not None and not represents_int(str(until_time)):
return_error("Error: until_time must be an integer.")
if sort is not None and not (sort == "asc" or sort == "desc"):
return_error("Error: sort value must be 'asc' or 'desc'.")
if limit is not None and (not represents_int(str(limit)) or int(limit) < 0 or int(limit) > 1000):
return_error("Error: limit must be an integer, larger than 0 and at most 1000")
if action is not None and not (action == "flagged" or action == "info"):
return_error("Error: action value must be 'flagged' or 'info'")
if ip is not None and not is_ip_valid(str(ip)):
return_error("Error: illegal value for 'ip' argument. Must be a valid ip address")
if status is not None and not (status == 'active' or status == 'expired'):
return_error("Error: status value must be 'active' or 'expired'")
if page is not None and not represents_int(str(page)):
return_error("Error: page must be an integer.")
def create_get_event_data_from_args(from_time, until_time, sort, since_id, max_id,
limit, page, action, tag, ip, status):
get_events_request_data = {}
if from_time is not None:
get_events_request_data['from'] = int(from_time)
if until_time is not None:
get_events_request_data['until'] = int(until_time)
if sort is not None:
get_events_request_data['sort'] = sort
if since_id is not None:
get_events_request_data['since_id'] = since_id
if max_id is not None:
get_events_request_data['max_id'] = max_id
if limit is not None:
get_events_request_data['limit'] = int(limit)
if page is not None:
get_events_request_data['page'] = int(page)
if action is not None:
get_events_request_data['action'] = action
if tag is not None:
get_events_request_data['tag'] = tag
if ip is not None:
get_events_request_data['ip'] = ip
if status is not None:
get_events_request_data['status'] = status
return get_events_request_data
def event_entry_context_from_response(response_data):
entry_context = {
'ID': response_data.get('id', ''),
'Timestamp': response_data.get('timestamp', ''),
'Source': response_data.get('source', ''),
'Action': response_data.get('action', ''),
'Reasons': response_data.get('reasons', ''),
'RemoteCountryCode': response_data.get('remoteCountryCode', ''),
'RemoteHostname': response_data.get('RemoteHostname', ''),
'UserAgents': response_data.get('userAgents', ''),
'RequestCount': response_data.get('requestCount', ''),
'TagCount': response_data.get('tagCount', ''),
'Window': response_data.get('window', ''),
'DateExpires': response_data.get('expires', ''),
'ExpiredBy': response_data.get('expiredBy', ''),
}
return entry_context
def adjust_event_human_readable(entry_context_with_spaces, entry_context):
"""Change keys in human readable data to match the headers.
"""
entry_context_with_spaces["ID"] = entry_context.get("ID", "")
def validate_fetch_requests_args(page, limit):
if limit is not None and (not represents_int(limit) or int(limit) < 0 or int(limit) > 1000):
return_error("Error: limit must be an integer, larger than 0 and at most 1000")
if page is not None and not represents_int(page):
return_error("Error: page must be an integer")
def request_entry_context_from_response(response_data):
entry_context = {
'ID': response_data.get('id', ''),
'ServerHostName': response_data.get('serverHostName', ''),
'RemoteIP': response_data.get('remoteIP', ''),
'RemoteHostname': response_data.get('RemoteHostname', ''),
'RemoteCountryCode': response_data.get('remoteCountryCode', ''),
'UserAgent': response_data.get('userAgent', ''),
'Timestamp': response_data.get('timestamp', ''),
'Method': response_data.get('method', ''),
'ServerName': response_data.get('serverName', ''),
'Protocol': response_data.get('protocol', ''),
'Path': response_data.get('path', ''),
'URI': response_data.get('uri', ''),
'ResponseCode': response_data.get('responseCode', ''),
'ResponseSize': response_data.get('responseSize', ''),
'ResponseMillis': response_data.get('responseMillis', ''),
'AgentResponseCode': response_data.get('agentResponseCode', ''),
'Tags': response_data.get('tags', ''),
}
return entry_context
def adjust_request_human_readable(entry_context_with_spaces, entry_context):
"""Change keys in human readable data to match the headers.
"""
entry_context_with_spaces["ID"] = entry_context.get("ID", "")
entry_context_with_spaces["URI"] = entry_context.get("URI", "")
entry_context_with_spaces["Remote IP"] = entry_context.get("RemoteIP", "")
def list_entry_context_from_response(response_data):
entry_context = {
'ID': response_data.get('id', ''),
'Name': response_data.get('name', ''),
'Type': response_data.get('type', ''),
'Entries': response_data.get('entries', ''),
'Description': response_data.get('description', ''),
'CreatedBy': response_data.get('createdBy', ''),
'CreatedDate': response_data.get('created', ''),
'UpdatedDate': response_data.get('updated', '')
}
return entry_context
def adjust_list_human_readable(entry_context_with_spaces, entry_context):
"""Change keys in human readable data to match the headers.
"""
entry_context_with_spaces["ID"] = entry_context.get("ID", "")
def alert_entry_context_from_response(response_data):
entry_context = {
'ID': response_data.get('id', ''),
'LongName': response_data.get('longName', ''),
'SiteID': response_data.get('siteId', ''),
'TagName': response_data.get('tagName', ''),
'Interval': response_data.get('interval', ''),
'Threshold': response_data.get('threshold', ''),
'BlockDurationSeconds': response_data.get('blockDurationSeconds', ''),
'SkipNotifications': response_data.get('skipNotifications', ''),
'Enabled': response_data.get('enabled', ''),
'Action': response_data.get('action', ''),
'CreatedDate': response_data.get('created', ''),
}
return entry_context
def adjust_alert_human_readable(entry_context_with_spaces, entry_context):
"""Change keys in human readable data to match the headers.
"""
entry_context_with_spaces["Interval (In Minutes)"] = entry_context_with_spaces.get("Interval", "")
entry_context_with_spaces["ID"] = entry_context.get("ID", "")
entry_context_with_spaces["Site ID"] = entry_context.get("siteID", "")
def check_ip_is_valid(ip):
if not is_ip_valid(ip):
return_error(f"Error: {ip} is invalid IP. Please enter a valid IP address")
def gen_entries_data_for_update_list_request(entries_list, method):
"""Using the recieved args, generates the data object required by the API
in order to update a list (site or corp alike).
Args:
entries_list (list): a list containing IP addresses
method (string): The method we want to apply on the entries, either 'Add' or 'Remove'.
States if the IPs should be added or removed to the site/corp list.
Returns:
dict. Contains additions and deletions list with the entries we want to act on.
"""
entries = {
"additions": [],
"deletions": []
} # type: Dict
entries_list_in_list_format = entries_list.split(',')
if method == "Add":
entries["additions"] = entries_list_in_list_format
else:
entries["deletions"] = entries_list_in_list_format
return entries
def gen_context_for_add_to_whitelist_or_blacklist(response_data):
full_data = []
for data in response_data:
full_data.append({
'ID': data.get('id', ''),
'Note': data.get('note', ''),
'Source': data.get('source', ''),
'CreatedBy': data.get('createdBy', ''),
'CreatedDate': data.get('created', ''),
'ExpiryDate': data.get('expires', '')
})
return full_data
def gen_human_readable_for_add_to_whitelist_or_blacklist(ip_context):
human_readable = []
for context in ip_context:
human_readable.append({
'Note': context['Note'],
'Source': context['Source'],
'Expiration date': context['ExpiryDate'] if context['ExpiryDate'] else "Not Set"
})
return human_readable
def add_ip_to_whitelist_or_blacklist(url, ip, note, expires=None):
res_list = []
error_list = []
for single_ip in argToList(ip):
try:
check_ip_is_valid(single_ip)
data = {
'source': single_ip,
'note': note
}
if expires is not None:
data['expires'] = expires
res_list.append(http_request('PUT', url, data=data))
except SystemExit:
# handle exceptions in return_error
pass
except Exception as e:
error_list.append(f'failed adding ip: {single_ip} to balcklist error: {e}')
demisto.error(f'failed adding ip: {single_ip} to balcklist\n{traceback.format_exc()}')
return res_list, error_list
def get_all_sites_in_corp():
get_sites_request_response = get_sites()
data_of_sites_in_corp = get_sites_request_response.get('data', [])
return data_of_sites_in_corp
def get_list_of_all_site_names_in_corp():
data_of_sites_in_corp = get_all_sites_in_corp()
list_of_all_sites_names_in_corp = []
for site_data in data_of_sites_in_corp:
site_name = site_data['name']
list_of_all_sites_names_in_corp.append(site_name)
return list_of_all_sites_names_in_corp
def get_list_of_site_names_to_fetch():
list_of_site_names_to_fetch = None
if SITES_TO_FETCH:
list_of_site_names_to_fetch = SITES_TO_FETCH.split(',')
else:
list_of_site_names_to_fetch = get_list_of_all_site_names_in_corp()
return list_of_site_names_to_fetch
def remove_milliseconds_from_iso(date_in_iso_format):
date_parts_arr = date_in_iso_format.split('.')
date_in_iso_without_milliseconds = date_parts_arr[0]
return date_in_iso_without_milliseconds
def get_events_from_given_sites(list_of_site_names_to_fetch, desired_from_time_in_posix):
events_from_given_sites = [] # type: List[Any]
for site_name in list_of_site_names_to_fetch:
fetch_from_site_response_json = get_events(siteName=site_name, from_time=desired_from_time_in_posix)
events_fetched_from_site = fetch_from_site_response_json.get('data', [])
events_from_given_sites.extend(events_fetched_from_site)
return events_from_given_sites
def datetime_to_posix_without_milliseconds(datetime_object):
timestamp_in_unix_millisecond = date_to_timestamp(datetime_object, 'datetime.datetime')
posix_with_ms = timestamp_in_unix_millisecond
posix_without_ms = str(posix_with_ms).split(',')[0]
return posix_without_ms
'''COMMANDS'''
def test_module():
try:
url = SERVER_URL + 'corps'
http_request('GET', url)
except Exception as e:
raise Exception(e)
demisto.results("ok")
def create_corp_list(list_name, list_type, entries_list, description=None):
"""This method sends a request to the Signal Sciences API to create a new corp list.
Note:
Illegal entries (not compatible with the type) will result in a 404.
They will be handled by the http_request function.
Args:
list_name (string): A name for the newly created list.
list_type (string): The desired type for the newly created list.
entries_list (list): A list of entries, consistent with the given type.
description (string): A description for the newly created list.
Returns:
dict. The data returned from the Signal Sciences API in response to the request, loaded into a json.
"""
validate_create_list_args(list_type, description)
url = SERVER_URL + CREATE_CORP_LIST_SUFFIX.format(CORPNAME)
entries_list_in_list_format = entries_list.split(',')
data_for_request = {
'name': list_name.lower(),
'type': list_type.lower(),
'entries': entries_list_in_list_format
}
if description is not None:
data_for_request['description'] = description
new_list_data = http_request('POST', url, data=data_for_request)
return new_list_data
def create_corp_list_command():
args = demisto.args()
response_data = create_corp_list(args['list_name'], args['list_type'], args['entries_list'],
args.get('description', None))
entry_context = list_entry_context_from_response(response_data)
entry_context_with_spaces = dict_keys_from_camelcase_to_spaces(entry_context)
human_readable = tableToMarkdown(CREATE_CORP_LIST_TITLE.format(args['list_name']), entry_context_with_spaces,
headers=LIST_HEADERS, removeNull=True)
adjust_list_human_readable(entry_context_with_spaces, entry_context)
return_outputs(
raw_response=response_data,
readable_output=human_readable,
outputs={
'SigSciences.Corp.List(val.ID==obj.ID)': entry_context,
}
)
def get_corp_list(list_id):
url = SERVER_URL + ACCESS_CORP_LIST_SUFFIX.format(CORPNAME, list_id)
list_data = http_request('GET', url)
return list_data
def get_corp_list_command():
args = demisto.args()
response_data = get_corp_list(args['list_id'])
entry_context = list_entry_context_from_response(response_data)
title = f"Found data about list with ID: {args['list_id']}"
entry_context_with_spaces = dict_keys_from_camelcase_to_spaces(entry_context)
adjust_list_human_readable(entry_context_with_spaces, entry_context)
human_readable = tableToMarkdown(title, entry_context_with_spaces, headers=LIST_HEADERS, removeNull=True)
return_outputs(
raw_response=response_data,
readable_output=human_readable,
outputs={
'SigSciences.Corp.List(val.ID==obj.ID)': entry_context,
}
)
def delete_corp_list(list_id):
url = SERVER_URL + ACCESS_CORP_LIST_SUFFIX.format(CORPNAME, list_id)
list_data = http_request('DELETE', url)
return list_data
def delete_corp_list_command():
args = demisto.args()
response_data = delete_corp_list(args['list_id'])
demisto.results({
'Type': entryTypes['note'],
'ContentsFormat': formats['markdown'],
'Contents': response_data,
'HumanReadable': DELETE_CORP_LIST_TITLE.format(args['list_id'])
})
def update_corp_list(list_id, method, entries_list, description=None):
validate_update_list_args(method, description)
entries_in_update_format = gen_entries_data_for_update_list_request(entries_list, method)
url = SERVER_URL + ACCESS_CORP_LIST_SUFFIX.format(CORPNAME, list_id)
data_for_request = {
'entries': entries_in_update_format
}
if description is not None:
data_for_request['description'] = description
response_data = http_request('PATCH', url, data=data_for_request)
return response_data
def update_corp_list_command():
args = demisto.args()
response_data = update_corp_list(args['list_id'], args['method'], args['entries_list'],
args.get('description', None))
entry_context = list_entry_context_from_response(response_data)
entry_context_with_spaces = dict_keys_from_camelcase_to_spaces(entry_context)
adjust_list_human_readable(entry_context_with_spaces, entry_context)
human_readable = tableToMarkdown(UPDATE_LIST_TITLE, entry_context_with_spaces,
headers=LIST_HEADERS, removeNull=True)
return_outputs(
raw_response=response_data,
readable_output=human_readable,
outputs={
'SigSciences.Corp.List(val.ID==obj.ID)': entry_context,
}
)
def get_all_corp_lists():
url = SERVER_URL + CREATE_CORP_LIST_SUFFIX.format(CORPNAME)
response_data = http_request('GET', url)
return response_data
def get_all_corp_lists_command():
response_data = get_all_corp_lists()
list_of_corp_lists = response_data.get('data', [])
corp_lists_contexts = []
for corp_list_data in list_of_corp_lists:
cur_corp_list_context = list_entry_context_from_response(corp_list_data)
corp_lists_contexts.append(cur_corp_list_context)
sidedata = f"Number of corp lists in corp: {len(list_of_corp_lists)}"
corp_lists_contexts_with_spaces = return_list_of_dicts_with_spaces(corp_lists_contexts)
for i in range(len(corp_lists_contexts)):
adjust_list_human_readable(corp_lists_contexts_with_spaces[i], corp_lists_contexts[i])
human_readable = tableToMarkdown(LIST_OF_CORP_LISTS_TITLE, corp_lists_contexts_with_spaces, headers=LIST_HEADERS,
removeNull=True, metadata=sidedata)
return_outputs(
raw_response=response_data,
readable_output=human_readable,
outputs={
'SigSciences.Corp.List(val.ID==obj.ID)': corp_lists_contexts,
}
)
def get_events(siteName, from_time=None, until_time=None, sort=None, since_id=None, max_id=None, limit=None, page=None,
action=None, tag=None, ip=None, status=None):
validate_get_events_args(from_time, until_time, sort, limit, page, action, ip, status)
url = SERVER_URL + GET_EVENTS_SUFFIX.format(CORPNAME, siteName)
data_for_request = create_get_event_data_from_args(from_time, until_time, sort, since_id, max_id,
limit, page, action, tag, ip, status)
events_data_response = http_request('GET', url, params_dict=data_for_request)
return events_data_response
def get_events_command():
args = demisto.args()
response_data = get_events(args['siteName'], args.get('from_time', None),
args.get('until_time', None), args.get('sort', None),
args.get('since_id', None), args.get('max_id', None),
args.get('limit', None), args.get('page', None),
args.get('action', None), args.get('tag', None),
args.get('ip', None), args.get('status', None))
list_of_events = response_data.get('data', [])
events_contexts = []
for event_data in list_of_events:
cur_event_context = event_entry_context_from_response(event_data)
events_contexts.append(cur_event_context)
events_contexts_with_spaces = return_list_of_dicts_with_spaces(events_contexts)
for i in range(len(events_contexts)):
adjust_list_human_readable(events_contexts_with_spaces[i], events_contexts[i])
sidedata = f"Number of events in site: {len(list_of_events)}"
human_readable = tableToMarkdown(LIST_OF_EVENTS_TITLE, events_contexts_with_spaces, removeNull=True,
headers=EVENT_HEADERS, metadata=sidedata)
return_outputs(
raw_response=response_data,
readable_output=human_readable,
outputs={
'SigSciences.Corp.Site.Event(val.ID==obj.ID)': events_contexts,
}
)
def get_event_by_id(siteName, event_id):
url = SERVER_URL + ACCESS_EVENT_SUFFIX.format(CORPNAME, siteName, event_id)
event_data_response = http_request('GET', url)
return event_data_response
def get_event_by_id_command():
args = demisto.args()
response_data = get_event_by_id(args['siteName'], args['event_id'])
entry_context = event_entry_context_from_response(response_data)
title = f"Found data about event with ID: {args['event_id']}"
entry_context_with_spaces = dict_keys_from_camelcase_to_spaces(entry_context)
adjust_event_human_readable(entry_context_with_spaces, entry_context)
human_readable = tableToMarkdown(title, entry_context_with_spaces, headers=EVENT_HEADERS, removeNull=True)
return_outputs(
raw_response=response_data,
readable_output=human_readable,
outputs={
'SigSciences.Corp.Site.Event(val.ID==obj.ID)': entry_context,
}
)
def expire_event(siteName, event_id):
url = SERVER_URL + EXPIRE_EVENT_SUFFIX.format(CORPNAME, siteName, event_id)
event_data_response = http_request('POST', url)
return event_data_response
def expire_event_command():
args = demisto.args()
response_data = expire_event(args['siteName'], args['event_id'])
demisto.results({
'Type': entryTypes['note'],
'ContentsFormat': formats['markdown'],
'Contents': response_data,
'HumanReadable': EXPIRE_EVENT_TITLE.format(args['event_id'])
})
def get_requests(siteName, page, limit, query):
url = SERVER_URL + GET_REQUESTS_SUFFIX.format(CORPNAME, siteName)
validate_fetch_requests_args(page, limit)
data_for_request = {}
if page is not None:
data_for_request['page'] = page
if limit is not None:
data_for_request['limit'] = limit
if query is not None:
data_for_request['q'] = query
requests_data_response = http_request('GET', url, data=data_for_request)
return requests_data_response
def get_requests_command():
args = demisto.args()
response_data = get_requests(args['siteName'], args.get('page', None), args.get('limit', None),
args.get('query', None))
list_of_requests = response_data.get('data', [])
requests_contexts = []
for request_data in list_of_requests:
cur_request_context = request_entry_context_from_response(request_data)
requests_contexts.append(cur_request_context)
requests_contexts_with_spaces = return_list_of_dicts_with_spaces(requests_contexts)
for i in range(len(requests_contexts)):
adjust_list_human_readable(requests_contexts_with_spaces[i], requests_contexts[i])
sidedata = f"Number of requests in site: {len(list_of_requests)}"
human_readable = tableToMarkdown(LIST_OF_REQUESTS_TITLE, requests_contexts_with_spaces, headers=REQUEST_HEADER,
removeNull=True, metadata=sidedata)
return_outputs(
raw_response=response_data,
readable_output=human_readable,
outputs={
'SigSciences.Corp.Site.Request(val.ID==obj.ID)': requests_contexts,
}
)
def get_request_by_id(siteName, request_id):
url = SERVER_URL + ACCESS_REQUEST_SUFFIX.format(CORPNAME, siteName, request_id)
request_data_response = http_request('GET', url)
return request_data_response
def get_request_by_id_command():
args = demisto.args()
response_data = get_request_by_id(args['siteName'], args['request_id'])
entry_context = request_entry_context_from_response(response_data)
title = f"Found data about request with ID: {args['request_id']}"
entry_context_with_spaces = dict_keys_from_camelcase_to_spaces(entry_context)
adjust_request_human_readable(entry_context_with_spaces, entry_context)
human_readable = tableToMarkdown(title, entry_context_with_spaces, headers=REQUEST_HEADER, removeNull=True)
return_outputs(
raw_response=response_data,
readable_output=human_readable,
outputs={
'SigSciences.Corp.Site.Request(val.ID==obj.ID)': entry_context,
}
)
def create_site_list(siteName, list_name, list_type, entries_list, description=None):
validate_create_list_args(list_type, description)
url = SERVER_URL + SITE_CREATE_LIST_SUFFIX.format(CORPNAME, siteName)
entries_list_in_list_format = entries_list.split(',')
data_for_request = {
'name': list_name.lower(),
'type': list_type.lower(),
'entries': entries_list_in_list_format
}
if description is not None:
data_for_request['description'] = description
new_list_data = http_request('POST', url, data=data_for_request)
return new_list_data
def create_site_list_command():
args = demisto.args()
response_data = create_site_list(args['siteName'], args['list_name'],
args['list_type'], args['entries_list'], args.get('description', None))
entry_context = list_entry_context_from_response(response_data)
entry_context_with_spaces = dict_keys_from_camelcase_to_spaces(entry_context)
adjust_list_human_readable(entry_context_with_spaces, entry_context)
human_readable = tableToMarkdown(CREATE_SITE_LIST_TITLE.format(args['list_name']), entry_context_with_spaces,
headers=LIST_HEADERS, removeNull=True)
return_outputs(
raw_response=response_data,
readable_output=human_readable,
outputs={
'SigSciences.Corp.Site.List(val.ID==obj.ID)': entry_context,
}
)
def get_site_list(siteName, list_id):
url = SERVER_URL + SITE_ACCESS_LIST_SUFFIX.format(CORPNAME, siteName, list_id)
list_data = http_request('GET', url)
return list_data
def get_site_list_command():
args = demisto.args()
response_data = get_site_list(args['siteName'], args['list_id'])
entry_context = list_entry_context_from_response(response_data)
entry_context_with_spaces = dict_keys_from_camelcase_to_spaces(entry_context)
adjust_list_human_readable(entry_context_with_spaces, entry_context)
title = f"Found data about list with ID: {args['list_id']}"
human_readable = tableToMarkdown(title, entry_context_with_spaces, headers=LIST_HEADERS, removeNull=True)
return_outputs(
raw_response=response_data,
readable_output=human_readable,
outputs={
'SigSciences.Corp.Site.List(val.ID==obj.ID)': entry_context,
}
)
def delete_site_list(siteName, list_id):
url = SERVER_URL + SITE_ACCESS_LIST_SUFFIX.format(CORPNAME, siteName, list_id)
list_data = http_request('DELETE', url)
return list_data
def delete_site_list_command():
args = demisto.args()
response_data = delete_site_list(args['siteName'], args['list_id'])
demisto.results({
'Type': entryTypes['note'],
'ContentsFormat': formats['markdown'],
'Contents': response_data,
'HumanReadable': DELETE_SITE_LIST_TITLE.format(args['list_id'])
})
def update_site_list(siteName, list_id, method, entries_list, description=None):
validate_update_list_args(method, description)
entries_in_update_format = gen_entries_data_for_update_list_request(entries_list, method)
url = SERVER_URL + SITE_ACCESS_LIST_SUFFIX.format(CORPNAME, siteName, list_id)
data_for_request = {
'entries': entries_in_update_format
}
if description is not None:
data_for_request['description'] = description
response_data = http_request('PATCH', url, data=data_for_request)
return response_data
def update_site_list_command():
args = demisto.args()
response_data = update_site_list(args['siteName'], args['list_id'],
args['method'], args['entries_list'], args.get('description', None))
entry_context = list_entry_context_from_response(response_data)
entry_context_with_spaces = dict_keys_from_camelcase_to_spaces(entry_context)
adjust_list_human_readable(entry_context_with_spaces, entry_context)
human_readable = tableToMarkdown(UPDATE_LIST_TITLE, entry_context_with_spaces,
headers=LIST_HEADERS, removeNull=True)
return_outputs(
raw_response=response_data,
readable_output=human_readable,
outputs={
'SigSciences.Corp.Site.List(val.ID==obj.ID)': entry_context,
}
)
def get_all_site_lists(siteName):
url = SERVER_URL + SITE_CREATE_LIST_SUFFIX.format(CORPNAME, siteName)
response_data = http_request('GET', url)
return response_data
def get_all_site_lists_command():
args = demisto.args()
response_data = get_all_site_lists(args['siteName'])
list_of_site_lists = response_data.get('data', [])
site_lists_contexts = []
for site_list_data in list_of_site_lists:
cur_site_context = list_entry_context_from_response(site_list_data)
site_lists_contexts.append(cur_site_context)
site_lists_contexts_with_spaces = return_list_of_dicts_with_spaces(site_lists_contexts)
for i in range(len(site_lists_contexts)):
adjust_list_human_readable(site_lists_contexts_with_spaces[i], site_lists_contexts[i])
sidedata = f"Number of site lists in site: {len(list_of_site_lists)}"
human_readable = tableToMarkdown(LIST_OF_SITE_LISTS_TITLE, site_lists_contexts_with_spaces, headers=LIST_HEADERS,
removeNull=True, metadata=sidedata)
return_outputs(
raw_response=response_data,
readable_output=human_readable,
outputs={
'SigSciences.Corp.Site.List(val.ID==obj.ID)': site_lists_contexts,
}
)
def add_alert(siteName, long_name, tag_name, interval, threshold, enabled, action):
validate_alert_args(siteName, long_name, tag_name, interval, threshold, enabled, action)
url = SERVER_URL + SITE_CREATE_ALERT_SUFFIX.format(CORPNAME, siteName)
data_for_request = {
'tagName': tag_name,
'longName': long_name,
'interval': int(interval),
'threshold': int(threshold),
'enabled': bool(enabled),
'action': action
}
response_data = http_request('POST', url, data=data_for_request)
return response_data
def add_alert_command():
args = demisto.args()
response_data = add_alert(args['siteName'], args['long_name'], args['tag_name'],
args['interval'], args['threshold'], args['enabled'], args['action'])
entry_context = alert_entry_context_from_response(response_data)
entry_context_with_spaces = dict_keys_from_camelcase_to_spaces(entry_context)
# changing key of Interval to Interval (In Minutes) for human readable
adjust_alert_human_readable(entry_context_with_spaces, entry_context)
human_readable = tableToMarkdown(ADD_ALERT_TITLE, entry_context_with_spaces, headers=ALERT_HEADERS, removeNull=True)
return_outputs(
raw_response=response_data,
readable_output=human_readable,
outputs={
'SigSciences.Corp.Site.Alert(val.ID==obj.ID)': entry_context,
}
)
def get_alert(siteName, alert_id):
url = SERVER_URL + SITE_ACCESS_ALERT_SUFFIX.format(CORPNAME, siteName, alert_id)
response_data = http_request('GET', url)
return response_data
def get_alert_command():
args = demisto.args()
response_data = get_alert(args['siteName'], args['alert_id'])
entry_context = alert_entry_context_from_response(response_data)
entry_context_with_spaces = dict_keys_from_camelcase_to_spaces(entry_context)
# changing key of Interval to Interval (In Minutes) for human readable
adjust_alert_human_readable(entry_context_with_spaces, entry_context)
title = f"Data found for alert id: {args['alert_id']}"
human_readable = tableToMarkdown(title, entry_context_with_spaces, headers=ALERT_HEADERS, removeNull=True)
return_outputs(
raw_response=response_data,
readable_output=human_readable,
outputs={
'SigSciences.Corp.Site.Alert(val.ID==obj.ID)': entry_context,
}
)
def delete_alert(siteName, alert_id):
url = SERVER_URL + SITE_ACCESS_ALERT_SUFFIX.format(CORPNAME, siteName, alert_id)
response_data = http_request('DELETE', url)
return response_data
def delete_alert_command():
args = demisto.args()
response_data = delete_alert(args['siteName'], args['alert_id'])
title = f"Alert {args['alert_id']} deleted succesfully"
demisto.results({
'Type': entryTypes['note'],
'ContentsFormat': formats['markdown'],
'Contents': response_data,
'HumanReadable': title
})
def update_alert(siteName, alert_id, tag_name, long_name, interval, threshold, enabled, action):
validate_alert_args(siteName, long_name, tag_name, interval, threshold, enabled, action)
url = SERVER_URL + SITE_ACCESS_ALERT_SUFFIX.format(CORPNAME, siteName, alert_id)
data_for_request = {
'tagName': tag_name,
'longName': long_name,
'interval': int(interval),
'threshold': int(threshold),
'action': action,
'enabled': bool(enabled)
}
request_response = http_request('PATCH', url, data=data_for_request)
return request_response
def update_alert_command():
args = demisto.args()
response_data = update_alert(args['siteName'], args['alert_id'], args['tag_name'], args['long_name'],
args['interval'], args['threshold'], args['enabled'], args['action'])
title = f"Updated alert {args['alert_id']}. new values:"
entry_context = alert_entry_context_from_response(response_data)
entry_context_with_spaces = dict_keys_from_camelcase_to_spaces(entry_context)
# changing key of Interval to Interval (In Minutes) for human readable
adjust_alert_human_readable(entry_context_with_spaces, entry_context)
human_readable = tableToMarkdown(title, entry_context_with_spaces, headers=ALERT_HEADERS, removeNull=True)
return_outputs(
raw_response=response_data,
readable_output=human_readable,
outputs={
'SigSciences.Corp.Site.Alert(val.ID==obj.ID)': entry_context,
}
)
def get_all_alerts(siteName):
url = SERVER_URL + SITE_CREATE_ALERT_SUFFIX.format(CORPNAME, siteName)
response_data = http_request('GET', url)
return response_data
def get_all_alerts_command():
args = demisto.args()
response_data = get_all_alerts(args['siteName'])
alerts_list = response_data.get('data', [])
alerts_contexts = []
for alert_data in alerts_list:
cur_alert_context = alert_entry_context_from_response(alert_data)
alerts_contexts.append(cur_alert_context)
alerts_contexts_with_spaces = return_list_of_dicts_with_spaces(alerts_contexts)
# changing key of Interval to Interval (In Minutes) for human readable in all alert contexts
for i in range(len(alerts_contexts)):
adjust_alert_human_readable(alerts_contexts_with_spaces[i], alerts_contexts[i])
sidedata = f"Number of alerts in site: {len(alerts_list)}"
return_outputs(
raw_response=response_data,
readable_output=tableToMarkdown(ALERT_LIST_TITLE, alerts_contexts_with_spaces,
headers=ALERT_HEADERS, removeNull=True, metadata=sidedata),
outputs={
'SigSciences.Corp.Site.Alert(val.ID==obj.ID)': alerts_contexts,
}
)
def get_whitelist(siteName):
url = SERVER_URL + WHITELIST_SUFFIX.format(CORPNAME, siteName)
site_whitelist = http_request('GET', url)
return site_whitelist
def get_whitelist_command():
"""Get the whitelist data for siteName"""
args = demisto.args()
site_whitelist = get_whitelist(args['siteName'])
data = site_whitelist.get('data', [])
whitelist_ips_contexts = gen_context_for_add_to_whitelist_or_blacklist(data)
whitelist_ips_contexts_with_spaces = return_list_of_dicts_with_spaces(whitelist_ips_contexts)
sidedata = f"Number of IPs in the Whitelist {len(data)}"
return_outputs(
raw_response=site_whitelist,
readable_output=tableToMarkdown(WHITELIST_TITLE, whitelist_ips_contexts_with_spaces,
WHITELIST_OR_BLACKLIST_HEADERS, removeNull=True, metadata=sidedata),
outputs={
'SigSciences.Corp.Site.Whitelist(val.ID==obj.ID)': whitelist_ips_contexts,
}
)
def get_blacklist(siteName):
url = SERVER_URL + BLACKLIST_SUFFIX.format(CORPNAME, siteName)
site_blacklist = http_request('GET', url)
return site_blacklist
def get_blacklist_command():
"""Get blacklist data for siteName"""
args = demisto.args()
site_blacklist = get_blacklist(args['siteName'])
data = site_blacklist.get('data', [])
blacklist_ips_contexts = gen_context_for_add_to_whitelist_or_blacklist(data)
blacklist_ips_contexts_with_spaces = return_list_of_dicts_with_spaces(blacklist_ips_contexts)
sidedata = f"Number of IPs in the Blacklist {len(data)}"
return_outputs(
raw_response=site_blacklist,
readable_output=tableToMarkdown(BLACKLIST_TITLE, blacklist_ips_contexts_with_spaces,
WHITELIST_OR_BLACKLIST_HEADERS, removeNull=True, metadata=sidedata),
outputs={
'SigSciences.Corp.Site.Blacklist(val.ID==obj.ID)': blacklist_ips_contexts,
}
)
def add_ip_to_whitelist(siteName, ip, note, expires=None):
url = SERVER_URL + WHITELIST_SUFFIX.format(CORPNAME, siteName)
return add_ip_to_whitelist_or_blacklist(url, ip, note, expires)
def add_ip_to_whitelist_command():
"""Add an ip to the whitelist"""
args = demisto.args()
response_data, errors_data = add_ip_to_whitelist(args['siteName'], args['ip'], args['note'], args.get('expires', None))
if response_data:
whitelist_ip_context = gen_context_for_add_to_whitelist_or_blacklist(response_data)
human_readable = gen_human_readable_for_add_to_whitelist_or_blacklist(whitelist_ip_context)
return_outputs(
raw_response=response_data,
readable_output=tableToMarkdown(ADD_IP_TO_WHITELIST_TITLE, human_readable, headers=ADD_IP_HEADERS,
removeNull=True, metadata=IP_ADDED_TO_WHITELIST_TITLE.format(args['ip'])),
outputs={
'SigSciences.Corp.Site.Whitelist(val.ID==obj.ID)': whitelist_ip_context,
}
)
if errors_data:
return_error('\n'.join(errors_data))
def add_ip_to_blacklist(siteName, ip, note, expires=None):
url = SERVER_URL + BLACKLIST_SUFFIX.format(CORPNAME, siteName)
return add_ip_to_whitelist_or_blacklist(url, ip, note, expires)
def add_ip_to_blacklist_command():
"""Add an ip to the blacklist"""
args = demisto.args()
response_data, errors_data = add_ip_to_blacklist(args['siteName'], args['ip'], args['note'], args.get('expires', None))
if response_data:
blacklist_ip_context = gen_context_for_add_to_whitelist_or_blacklist(response_data)
human_readable = gen_human_readable_for_add_to_whitelist_or_blacklist(blacklist_ip_context)
return_outputs(
raw_response=response_data,
readable_output=tableToMarkdown(ADD_IP_TO_BLACKLIST_TITLE, human_readable,
headers=ADD_IP_HEADERS, removeNull=True,
metadata=IP_ADDED_TO_BLACKLIST_TITLE.format(args['ip'])),
outputs={
'SigSciences.Corp.Site.Blacklist(val.ID==obj.ID)': blacklist_ip_context,
}
)
if errors_data:
return_error('/n'.join(errors_data))
def whitelist_remove_ip(siteName, ip):
check_ip_is_valid(ip)
site_whitelist = get_whitelist(siteName)
data = site_whitelist.get('data', [])
for item in data:
if item.get('source', '') == ip:
url = SERVER_URL + DELETE_WHITELIST_IP_SUFFIX.format(CORPNAME, siteName, item.get('id', ''))
res = http_request('DELETE', url)
if 'res' not in locals():
return_error(f"The IP {ip} was not found on the Whitelist")
return site_whitelist
def whitelist_remove_ip_command():
"""Remove an ip from the whitelist"""
args = demisto.args()
response_data = whitelist_remove_ip(args['siteName'], args['IP'])
demisto.results({
'Type': entryTypes['note'],
'ContentsFormat': formats['markdown'],
'Contents': response_data,
'HumanReadable': WHITELIST_REMOVE_IP_TITLE.format(args['IP']),
})
def blacklist_remove_ip(siteName, ip):
check_ip_is_valid(ip)
site_blacklist = get_blacklist(siteName)
data = site_blacklist.get('data', [])
for item in data:
if item.get('source', '') == ip:
url = SERVER_URL + DELETE_BLACKLIST_IP_SUFFIX.format(CORPNAME, siteName, item.get('id', ''))
res = http_request('DELETE', url)
if 'res' not in locals():
return_error(f"The IP {ip} was not found on the Blacklist")
return site_blacklist
def blacklist_remove_ip_command():
"""Remove an ip from the blacklist"""
args = demisto.args()
response_data = blacklist_remove_ip(args['siteName'], args['IP'])
demisto.results({
'Type': entryTypes['note'],
'ContentsFormat': formats['markdown'],
'Contents': response_data,
'HumanReadable': BLACKLIST_REMOVE_IP_TITLE.format(args['IP']),
})
def get_sites():
url = SERVER_URL + GET_SITES_SUFFIX.format(CORPNAME)
res = http_request('GET', url)
return res
def get_sites_command():
"""Get the sites list"""
res = get_sites()
data = res.get('data', [])
outputs = []
for item in data:
output = {}
output['Name'] = item.get('name', '')
output['CreatedDate'] = item.get('created', '')
outputs.append(output)
outputs_with_spaces = return_list_of_dicts_with_spaces(outputs)
return_outputs(
raw_response=res,
readable_output=tableToMarkdown(SITES_LIST_TITLE, outputs_with_spaces, headers=GET_SITE_HEADERS,
removeNull=True),
outputs={
'SigSciences.Sites(val.Name==obj.Name)': outputs,
}
)
def fetch_incidents():
now_utc = datetime.utcnow()
most_recent_event_time = ""
last_run_data = demisto.getLastRun()
if last_run_data:
last_run_time = last_run_data['time']
else:
date_time_interval_ago = now_utc - timedelta(minutes=int(FETCH_INTERVAL))
date_time_interval_ago_posix = datetime_to_posix_without_milliseconds(date_time_interval_ago)
last_run_time = date_time_interval_ago_posix
list_of_sites_to_fetch = get_list_of_site_names_to_fetch()
events_array = get_events_from_given_sites(list_of_sites_to_fetch, last_run_time)
incidents = []
for event in events_array:
event_time = event['timestamp']
event_time = datetime.strptime(event_time[:-1], "%Y-%m-%dT%H:%M:%S")
event_time = datetime_to_posix_without_milliseconds(event_time)
if event_time > last_run_time:
incidents.append({
'name': str(event['id']) + " - SignalSciences",
'occurred': event['timestamp'],
'rawJSON': json.dumps(event)
})
if event_time > most_recent_event_time:
most_recent_event_time = event_time
demisto.incidents(incidents)
demisto.setLastRun({'time': most_recent_event_time})
''' EXECUTION CODE '''
LOG(f'command is {demisto.command()}')
try:
if not re.match(r'[0-9a-z_.-]+', CORPNAME):
raise ValueError('Corporation Name should match the pattern [0-9a-z_.-]+')
if demisto.command() == 'test-module':
test_module()
elif demisto.command() == 'fetch-incidents':
fetch_incidents()
elif demisto.command() == 'sigsci-get-whitelist':
get_whitelist_command()
elif demisto.command() == 'sigsci-get-blacklist':
get_blacklist_command()
elif demisto.command() == 'sigsci-whitelist-add-ip':
add_ip_to_whitelist_command()
elif demisto.command() == 'sigsci-blacklist-add-ip':
add_ip_to_blacklist_command()
elif demisto.command() == 'sigsci-whitelist-remove-ip':
whitelist_remove_ip_command()
elif demisto.command() == 'sigsci-blacklist-remove-ip':
blacklist_remove_ip_command()
elif demisto.command() == 'sigsci-get-sites':
get_sites_command()
elif demisto.command() == 'sigsci-create-corp-list':
create_corp_list_command()
elif demisto.command() == 'sigsci-get-corp-list':
get_corp_list_command()
elif demisto.command() == 'sigsci-delete-corp-list':
delete_corp_list_command()
elif demisto.command() == 'sigsci-update-corp-list':
update_corp_list_command()
elif demisto.command() == 'sigsci-get-all-corp-lists':
get_all_corp_lists_command()
elif demisto.command() == 'sigsci-create-site-list':
create_site_list_command()
elif demisto.command() == 'sigsci-get-site-list':
get_site_list_command()
elif demisto.command() == 'sigsci-delete-site-list':
delete_site_list_command()
elif demisto.command() == 'sigsci-update-site-list':
update_site_list_command()
elif demisto.command() == 'sigsci-get-all-site-lists':
get_all_site_lists_command()
elif demisto.command() == 'sigsci-add-alert':
add_alert_command()
elif demisto.command() == 'sigsci-get-alert':
get_alert_command()
elif demisto.command() == 'sigsci-delete-alert':
delete_alert_command()
elif demisto.command() == 'sigsci-update-alert':
update_alert_command()
elif demisto.command() == 'sigsci-get-all-alerts':
get_all_alerts_command()
elif demisto.command() == 'sigsci-get-events':
get_events_command()
elif demisto.command() == 'sigsci-expire-event':
expire_event_command()
elif demisto.command() == 'sigsci-get-event-by-id':
get_event_by_id_command()
elif demisto.command() == 'sigsci-get-requests':
get_requests_command()
elif demisto.command() == 'sigsci-get-request-by-id':
get_request_by_id_command()
except Exception as e:
return_error(e)
| mit | 1bc9bc8bb03a1bfe62cfb8658634c1e3 | 38.570432 | 123 | 0.642376 | 3.55294 | false | false | false | false |
demisto/content | Utils/tests/update_contribution_pack_in_base_branch_test.py | 2 | 5715 | from Utils.update_contribution_pack_in_base_branch import get_pr_files
import os
github_response_1 = [
{
"sha": "1",
"filename": "hmm",
"status": "modified",
"additions": 1,
"deletions": 0,
"changes": 1,
"blob_url": "https://github.com/demisto/content/blob/1/Packs/Slack/Integrations/Slack/README.md",
"raw_url": "https://github.com/demisto/content/raw/1/Packs/Slack/Integrations/Slack/README.md",
"contents_url": "https://api.github.com/repos/demisto/content",
"patch": "@@ -1,4 +1,5 @@\n <p>\n+ shtak\n Send messages and notifications to your Slack Team.\n"
},
{
"sha": "2",
"filename": "what",
"status": "modified",
"additions": 2,
"deletions": 2,
"changes": 4,
"blob_url": "https://github.com/demisto/content/blob/1/Packs/Slack/pack_metadata.json",
"raw_url": "https://github.com/demisto/content/raw/1/Packs/Slack/pack_metadata.json",
"contents_url": "https://api.github.com/repos/demisto/content/contents",
"patch": "@@ -13,7 +13,7 @@\n \"tags\": [],\n \"useCases\": []"
}
]
github_response_2 = [
{
"sha": "3",
"filename": "nope",
"status": "modified",
"additions": 1,
"deletions": 0,
"changes": 1,
"blob_url": "https://github.com/demisto/content/blob/1/Packs/Slack/Integrations/Slack/README.md",
"raw_url": "https://github.com/demisto/content/raw/1/Packs/Slack/Integrations/Slack/README.md",
"contents_url": "https://api.github.com/repos/demisto/content",
"patch": "@@ -1,4 +1,5 @@\n <p>\n+ shtak\n Send messages and notifications to your Slack Team.\n"
},
{
"sha": "4",
"filename": "Packs/Slack/pack_metadata.json",
"status": "modified",
"additions": 2,
"deletions": 2,
"changes": 4,
"blob_url": "https://github.com/demisto/content/blob/1/Packs/Slack/pack_metadata.json",
"raw_url": "https://github.com/demisto/content/raw/1/Packs/Slack/pack_metadata.json",
"contents_url": "https://api.github.com/repos/demisto/content/contents",
"patch": "@@ -13,7 +13,7 @@\n \"tags\": [],\n \"useCases\": []"
}
]
github_response_3 = [
{
"sha": "1",
"filename": "hmm",
"status": "modified",
"additions": 1,
"deletions": 0,
"changes": 1,
"blob_url": "https://github.com/demisto/content/blob/1/Packs/Slack1/Integrations/Slack/README.md",
"raw_url": "https://github.com/demisto/content/raw/1/Packs/Slack1/Integrations/Slack/README.md",
"contents_url": "https://api.github.com/repos/demisto/content",
"patch": "@@ -1,4 +1,5 @@\n <p>\n+ shtak\n Send messages and notifications to your Slack Team.\n"
},
{
"sha": "2",
"filename": "Packs/AnotherPackName/pack_metadata.json",
"status": "modified",
"additions": 2,
"deletions": 2,
"changes": 4,
"blob_url": "https://github.com/demisto/content/blob/1/Packs/Slack1/pack_metadata.json",
"raw_url": "https://github.com/demisto/content/raw/1/Packs/Slack1/pack_metadata.json",
"contents_url": "https://api.github.com/repos/demisto/content/contents",
"patch": "@@ -13,7 +13,7 @@\n \"tags\": [],\n \"useCases\": []"
}
]
github_response_4: list = []
def pack_names(files):
return set(map(lambda x: x.split(os.path.sep)[1], files))
def test_get_pr_files(requests_mock):
"""
Scenario: Get a pack dir name from pull request files
Given
- A pull request
- A file in the pull request is in a pack
When
- Getting the pack dir name from a pull request
Then
- Ensure the pack dir name is returned correctly
"""
pr_number = '1'
requests_mock.get(
'https://api.github.com/repos/demisto/content/pulls/1/files',
[{'json': github_response_1, 'status_code': 200},
{'json': github_response_2, 'status_code': 200},
{'json': github_response_4, 'status_code': 200}]
)
pack_dir = pack_names(get_pr_files(pr_number))
assert pack_dir == {'Slack'}
def test_get_multiple_pr_files(requests_mock):
"""
Scenario: Get a list of pack dir names from pull request files
Given
- A pull request
- Files in the pull request are in a pack
When
- Getting the pack dir names from a pull request
Then
- Ensure pack dir names are returned correctly
"""
pr_number = '1'
requests_mock.get(
'https://api.github.com/repos/demisto/content/pulls/1/files',
[{'json': github_response_1, 'status_code': 200},
{'json': github_response_2, 'status_code': 200},
{'json': github_response_3, 'status_code': 200},
{'json': github_response_4, 'status_code': 200}]
)
pack_dir = pack_names(get_pr_files(pr_number))
assert pack_dir == {'Slack', 'AnotherPackName'}
def test_get_pr_files_no_pack(requests_mock):
"""
Scenario: Get a pack dir name from pull request files
Given
- A pull request
- No file in the pull request is in a pack
When
- Getting the pack dir name from a pull request
Then
- Ensure the pack dir name is empty
"""
pr_number = '1'
requests_mock.get(
'https://api.github.com/repos/demisto/content/pulls/1/files',
[{'json': github_response_1, 'status_code': 200},
{'json': github_response_4, 'status_code': 200}]
)
pack_dir = pack_names(get_pr_files(pr_number))
assert pack_dir == set()
| mit | 4d6b63ad47ff45506808b0192eda84bd | 32.421053 | 108 | 0.574453 | 3.192737 | false | false | false | false |
demisto/content | Packs/XMatters/Integrations/xMatters/xMatters.py | 2 | 28630 | import demistomock as demisto
from CommonServerPython import *
import requests
import json
import dateparser
import traceback
import urllib.parse
from typing import Any, Dict, Tuple, List, Optional, cast
''' CONSTANTS '''
DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
# Disable insecure warnings
requests.packages.urllib3.disable_warnings()
''' CLIENT CLASS '''
class Client(BaseClient):
"""Client class to interact with the service API
This Client implements API calls, and does not contain any Demisto logic.
Should only do requests and return data.
It inherits from BaseClient defined in CommonServer Python.
Most calls use _http_request() that handles proxy, SSL verification, etc.
"""
def xm_get_user(self, user: str):
"""Retrieves a user in xMatters. Good for testing authentication
:type user: ``str``
:param user: The user to retrieve
:return: Result from getting the user
:rtype: ``Dict[str, Any]``
"""
res = self._http_request(
method='GET',
url_suffix='/api/xm/1/people?webLogin=' + urllib.parse.quote(user)
)
return res
def xm_trigger_workflow(self, recipients: Optional[str] = None,
subject: Optional[str] = None, body: Optional[str] = None,
incident_id: Optional[str] = None,
close_task_id: Optional[str] = None) -> Dict[str, Any]:
"""Triggers a workflow in xMatters.
:type recipients: ``Optional[str]``
:param recipients: recipients for the xMatters alert.
:type subject: ``Optional[str]``
:param subject: Subject for the message in xMatters.
:type body: ``Optional[str]``
:param body: Body for the message in xMatters.
:type incident_id: ``Optional[str]``
:param incident_id: ID of incident that the message is related to.
:type close_task_id: ``Optional[str]``
:param close_task_id: Task ID from playbook to close.
:return: result of the http request
:rtype: ``Dict[str, Any]``
"""
request_params: Dict[str, Any] = {
}
if recipients:
request_params['recipients'] = recipients
if subject:
request_params['subject'] = subject
if body:
request_params['body'] = body
if incident_id:
request_params['incident_id'] = incident_id
if close_task_id:
request_params['close_task_id'] = close_task_id
res = self._http_request(
method='POST',
url_suffix='',
params=request_params,
)
return res
def search_alerts(self, max_fetch: int = 100, alert_status: Optional[str] = None, priority: Optional[str] = None,
start_time: Optional[int] = None, property_name: Optional[str] = None,
property_value: Optional[str] = None, request_id: Optional[str] = None,
from_time: Optional[str] = None, to_time: Optional[str] = None,
workflow: Optional[str] = None, form: Optional[str] = None) -> List[Dict[str, Any]]:
"""Searches for xMatters alerts using the '/events' API endpoint
All the parameters are passed directly to the API as HTTP POST parameters in the request
:type max_fetch: ``str``
:param max_fetch: The maximum number of events or incidents to retrieve
:type alert_status: ``Optional[str]``
:param alert_status: status of the alert to search for. Options are: 'ACTIVE' or 'SUSPENDED'
:type priority: ``Optional[str]``
:param priority:
severity of the alert to search for. Comma-separated values.
Options are: "LOW", "MEDIUM", "HIGH"
:type start_time: ``Optional[int]``
:param start_time: start timestamp (epoch in seconds) for the alert search
:type property_name: ``Optional[str]``
:param property_name: Name of property to match when searching for alerts.
:type property_value: ``Optional[str]``
:param property_value: Value of property to match when searching for alerts.
:type request_id: ``Optional[str]``
:param request_id: Matches requestId in xMatters.
:type from_time: ``Optional[str]``
:param from_time: UTC time of the beginning time to search for events.
:type to_time: ``Optional[str]``
:param to_time: UTC time of the end time to search for events.
:type workflow: ``Optional[str]``
:param workflow: Workflow that events are from in xMatters.
:type form: ``Optional[str]``
:param form: Form that events are from in xMatters.
:return: list containing the found xMatters events as dicts
:rtype: ``List[Dict[str, Any]]``
"""
request_params: Dict[str, Any] = {}
request_params['limit'] = max_fetch
if alert_status:
request_params['status'] = alert_status
if priority:
request_params['priority'] = priority
if from_time:
request_params['from'] = from_time
elif start_time:
request_params['from'] = start_time
if to_time:
request_params['to'] = to_time
if property_value and property_name:
request_params['propertyName'] = property_name
request_params['propertyValue'] = property_value
if request_id:
request_params['requestId'] = request_id
if workflow:
request_params['plan'] = workflow
if form:
request_params['form'] = form
res = self._http_request(
method='GET',
url_suffix='/api/xm/1/events',
params=request_params
)
data = res.get('data')
has_next = True
while has_next:
if 'links' in res and 'next' in res['links']:
res = self._http_request(
method='GET',
url_suffix=res.get('links').get('next')
)
for val in res.get('data'):
data.append(val)
else:
has_next = False
return data
def search_alert(self, event_id: str):
"""Searches for xMatters alerts using the '/events' API endpoint
The event_id is passed as a parameter to the API call.
:type event_id: ``Required[str]``
:param event_id: The event ID or UUID of the event to retrieve
"""
res = self._http_request(
method='GET',
url_suffix='/api/xm/1/events/' + event_id,
ok_codes=(200, 404)
)
return res
''' HELPER FUNCTIONS '''
def convert_to_demisto_severity(severity: str) -> int:
"""Maps xMatters severity to Cortex XSOAR severity
Converts the xMatters alert severity level ('Low', 'Medium',
'High') to Cortex XSOAR incident severity (1 to 4)
for mapping.
:type severity: ``str``
:param severity: severity as returned from the HelloWorld API (str)
:return: Cortex XSOAR Severity (1 to 4)
:rtype: ``int``
"""
# In this case the mapping is straightforward, but more complex mappings
# might be required in your integration, so a dedicated function is
# recommended. This mapping should also be documented.
return {
'low': 1, # low severity
'medium': 2, # medium severity
'high': 3, # high severity
}[severity.lower()]
def arg_to_timestamp(arg: Any, arg_name: str, required: bool = False) -> Optional[int]:
"""Converts an XSOAR argument to a timestamp (seconds from epoch)
This function is used to quickly validate an argument provided to XSOAR
via ``demisto.args()`` into an ``int`` containing a timestamp (seconds
since epoch). It will throw a ValueError if the input is invalid.
If the input is None, it will throw a ValueError if required is ``True``,
or ``None`` if required is ``False.
:type arg: ``Any``
:param arg: argument to convert
:type arg_name: ``str``
:param arg_name: argument name
:type required: ``bool``
:param required:
throws exception if ``True`` and argument provided is None
:return:
returns an ``int`` containing a timestamp (seconds from epoch) if conversion works
returns ``None`` if arg is ``None`` and required is set to ``False``
otherwise throws an Exception
:rtype: ``Optional[int]``
"""
if arg is None:
if required is True:
raise ValueError(f'Missing "{arg_name}"')
return None
if isinstance(arg, str) and arg.isdigit():
# timestamp is a str containing digits - we just convert it to int
return int(arg)
if isinstance(arg, str):
# we use dateparser to handle strings either in ISO8601 format, or
# relative time stamps.
# For example: format 2019-10-23T00:00:00 or "3 days", etc
date = dateparser.parse(arg, settings={'TIMEZONE': 'UTC'})
if date is None:
# if d is None it means dateparser failed to parse it
raise ValueError(f'Invalid date: {arg_name}')
return int(date.timestamp())
if isinstance(arg, (int, float)):
# Convert to int if the input is a float
return int(arg)
raise ValueError(f'Invalid date: "{arg_name}"')
''' COMMAND FUNCTIONS '''
def fetch_incidents(client: Client,
max_fetch: int = 100,
last_run: Dict[str, int] = {},
first_fetch_time: Optional[int] = None,
alert_status: Optional[str] = None,
priority: Optional[str] = None,
property_name: Optional[str] = None,
property_value: Optional[str] = None
) -> Tuple[Dict[str, int], List[dict]]:
"""This function retrieves new alerts every interval (default is 1 minute).
This function has to implement the logic of making sure that incidents are
fetched only onces and no incidents are missed. By default it's invoked by
XSOAR every minute. It will use last_run to save the timestamp of the last
incident it processed. If last_run is not provided, it should use the
integration parameter first_fetch_time to determine when to start fetching
the first time.
:type client: ``Client``
:param Client: xMatters client to use
:type last_run: ``Optional[Dict[str, int]]``
:param last_run:
A dict with a key containing the latest incident created time we got
from last fetch
:type first_fetch_time: ``Optional[int]``
:param first_fetch_time:
If last_run is None (first time we are fetching), it contains
the timestamp in milliseconds on when to start fetching incidents
:type alert_status: ``Optional[str]``
:param alert_status:
status of the alert to search for. Options are: 'ACTIVE',
'SUSPENDED', or 'TERMINATED'
:type max_fetch: ``str``
:param max_fetch:
The maximum number of events or incidents to fetch.
:type priority: ``str``
:param priority:
Comma-separated list of the priority to search for.
Options are: "LOW", "MEDIUM", "HIGH"
:type property_name: ``Optional[str]``
:param property_name: Property name to match with events.
:type property_value: ``Optional[str]``
:param property_value: Property value to match with events.
:return:
A tuple containing two elements:
next_run (``Dict[str, int]``): Contains the timestamp that will be
used in ``last_run`` on the next fetch.
incidents (``List[dict]``): List of incidents that will be created in XSOAR
:rtype: ``Tuple[Dict[str, int], List[dict]]``
"""
# Get the last fetch time, if exists
# last_run is a dict with a single key, called last_fetch
last_fetch = last_run.get('last_fetch', None)
# Handle first fetch time
if last_fetch is None:
# if missing, use what provided via first_fetch_time
last_fetch = first_fetch_time
else:
# otherwise use the stored last fetch
last_fetch = int(last_fetch)
# for type checking, making sure that latest_created_time is int
latest_created_time = cast(int, last_fetch)
# Initialize an empty list of incidents to return
# Each incident is a dict with a string as a key
incidents: List[Dict[str, Any]] = []
if last_fetch is not None:
start_time = timestamp_to_datestring(last_fetch * 1000)
else:
start_time = None
# demisto.info("This is the current timestamp: " + str(start_time))
# demisto.info("MS - last_fetch: " + str(last_fetch))
alerts = client.search_alerts(
max_fetch=max_fetch,
alert_status=alert_status,
start_time=start_time,
priority=priority,
property_name=property_name,
property_value=property_value
)
for alert in alerts:
try:
# If no created_time set is as epoch (0). We use time in ms so we must
# convert it from the HelloWorld API response
incident_created_time = alert.get('created')
# If no name is present it will throw an exception
if "name" in alert:
incident_name = alert['name']
else:
incident_name = "No Message Subject"
datetimeformat = '%Y-%m-%dT%H:%M:%S.000Z'
if isinstance(incident_created_time, str):
parseddate = dateparser.parse(incident_created_time)
if isinstance(parseddate, datetime):
occurred = parseddate.strftime(datetimeformat)
date = dateparser.parse(occurred, settings={'TIMEZONE': 'UTC'})
if isinstance(date, datetime):
incident_created_time = int(date.timestamp())
incident_created_time_ms = incident_created_time * 1000
else:
incident_created_time = 0
incident_created_time_ms = 0
else:
date = None
incident_created_time = 0
incident_created_time_ms = 0
else:
date = None
incident_created_time = 0
incident_created_time_ms = 0
demisto.info("MS - incident_created_time: " + str(last_fetch))
# to prevent duplicates, we are only adding incidents with creation_time > last fetched incident
if last_fetch:
if incident_created_time <= last_fetch:
continue
details = ""
if 'plan' in alert:
details = details + alert['plan']['name'] + " - "
if 'form' in alert:
details = details + alert['form']['name']
incident = {
'name': incident_name,
'details': details,
'occurred': timestamp_to_datestring(incident_created_time_ms),
'rawJSON': json.dumps(alert),
'type': 'xMatters Alert', # Map to a specific XSOAR incident Type
'severity': convert_to_demisto_severity(alert.get('priority', 'Low')),
}
incidents.append(incident)
# Update last run and add incident if the incident is newer than last fetch
if isinstance(date, datetime) and date.timestamp() > latest_created_time:
latest_created_time = incident_created_time
except Exception as e:
demisto.info("Issue with event")
demisto.info(str(alert))
demisto.info(str(e))
pass
# Save the next_run as a dict with the last_fetch key to be stored
next_run = {'last_fetch': latest_created_time}
return next_run, incidents
def event_reduce(e):
return {"Created": e.get('created'),
"Terminated": e.get('terminated'),
"ID": e.get('id'),
"EventID": e.get('eventId'),
"Name": e.get('name'),
"PlanName": e.get('plan').get('name'),
"FormName": e.get('form').get('name'),
"Status": e.get('status'),
"Priority": e.get('priority'),
"Properties": e.get('properties'),
"SubmitterName": e.get('submitter').get('targetName')}
def xm_trigger_workflow_command(client: Client, recipients: str,
subject: str, body: str, incident_id: str,
close_task_id: str) -> CommandResults:
out = client.xm_trigger_workflow(
recipients=recipients,
subject=subject,
body=body,
incident_id=incident_id,
close_task_id=close_task_id
)
"""
This function runs when the xm-trigger-workflow command is run.
:type client: ``Client``
:param Client: xMatters client to use
:type recipients: ``str``
:param recipients: Recipients to alert from xMatters.
:type subject: ``str``
:param subject: Subject of the alert in xMatters.
:type body: ``str``
:param body: Body of the alert in xMatters.
:type incident_id: ``str``
:param incident_id: Incident ID of the event in XSOAR.
:type close_task_id: ``str``
:param close_task_id: ID of task to close in a playbook.
:return: Output of xm-trigger-workflow command being run.
:rtype: ``CommandResults``
"""
outputs = {}
outputs['requestId'] = out['requestId']
return CommandResults(
readable_output="Successfully sent a message to xMatters.",
outputs=outputs,
outputs_prefix='xMatters.Workflow',
outputs_key_field='requestId'
)
def xm_get_events_command(client: Client, request_id: Optional[str] = None, status: Optional[str] = None,
priority: Optional[str] = None, from_time: Optional[str] = None,
to_time: Optional[str] = None, workflow: Optional[str] = None,
form: Optional[str] = None, property_name: Optional[str] = None,
property_value: Optional[str] = None) -> CommandResults:
"""
This function runs when the xm-get-events command is run.
:type client: ``Client``
:param Client: xMatters client to use
:type request_id: ``Optional[str]```
:param request_id: The the request ID associated with the events.
:type status: ``Optional[str]``
:param status:
status of the alert to search for. Options are: 'ACTIVE',
'SUSPENDED', or 'TERMINATED'
:type priority: ``Optional[str]``
:param priority:
Comma-separated list of the priority to search for.
Options are: "LOW", "MEDIUM", "HIGH"
:type from_time: ``Optional[str]``
:param from_time: UTC time for the start of the search.
:type to_time: ``Optional[str]``
:param to_time: UTC time for the end of the search.
:type workflow: ``Optional[str]``
:param workflow: Name of workflow to match the search.
:type form: ``Optional[str]``
:param form: Name of form to match in the search.
:type property_name: ``Optional[str]``
:param property_name: Property name to match with events.
:type property_value: ``Optional[str]``
:param property_value: Property value to match with events.
:return: Events from the search.
:rtype: ``CommandResults``
"""
out = client.search_alerts(
request_id=request_id,
alert_status=status,
priority=priority,
from_time=from_time,
to_time=to_time,
workflow=workflow,
form=form,
property_name=property_name,
property_value=property_value
)
reduced_out: Dict[str, List[Any]]
if len(out) == 0:
reduced_out = {"xMatters.GetEvent.Event": []}
readable_output = "Could not find Events with given criteria in xMatters"
else:
reduced_out = {"xMatters.GetEvents.Events": [event_reduce(event) for event in out]}
readable_output = f'Retrieved Events from xMatters: {reduced_out}'
return CommandResults(
readable_output=readable_output,
outputs=reduced_out,
outputs_prefix='xMatters.GetEvents',
outputs_key_field='event_id'
)
def xm_get_event_command(client: Client, event_id: str) -> CommandResults:
"""
This function is run when the xm-get-event command is run.
:type client: ``Client``
:param Client: xMatters client to use
:type event_id: ``str``
:param event_id: Event ID to search for in xMatters
:return: Output of xm-get-event command
:rtype: ``CommandResults``
"""
out = client.search_alert(event_id=event_id)
reduced_out: Dict[str, Any]
if out.get('code') == 404:
reduced_out = {"xMatters.GetEvent.Event": {}}
readable_output = f'Could not find Event "{event_id}" from xMatters'
else:
reduced = event_reduce(out)
reduced_out = {"xMatters.GetEvent.Event": reduced}
readable_output = f'Retrieved Event "{event_id}" from xMatters:\nEventID: {reduced.get("EventID")}\n' \
f'Created: {reduced.get("Created")}\nTerminated: {reduced.get("Terminated")}\n' \
f'Name: {reduced.get("Name")}\nStatus: {reduced.get("Status")}'
return CommandResults(
readable_output=readable_output,
outputs=reduced_out,
outputs_prefix='xMatters.GetEvent',
outputs_key_field='event_id'
)
def test_module(from_xm: Client, to_xm: Client, user: str, max_fetch: int) -> str:
"""Tests API connectivity and authentication'
Returning 'ok' indicates that the integration works like it is supposed to.
Connection to the service is successful.
Raises exceptions if something goes wrong.
:type from_xm: ``Client``
:param Client: xMatters client to use to pull events from
:type to_xm: ``Client``
:param Client: xMatters client to use to post an event to.
:return: 'ok' if test passed, anything else will fail the test.
:rtype: ``str``
"""
# INTEGRATION DEVELOPER TIP
# Client class should raise the exceptions, but if the test fails
# the exception text is printed to the Cortex XSOAR UI.
# If you have some specific errors you want to capture (i.e. auth failure)
# you should catch the exception here and return a string with a more
# readable output (for example return 'Authentication Error, API Key
# invalid').
# Cortex XSOAR will print everything you return different than 'ok' as
# an error
max_fetch_int = int(max_fetch)
try:
if max_fetch_int <= 0 or max_fetch_int > 200:
raise ValueError
except ValueError:
raise ValueError("Max Fetch must be between 0 and 201")
try:
to_xm.xm_trigger_workflow(
recipients='nobody',
subject='Test - please ignore',
body='Test - please ignore'
)
# return f'RequestId: {res["requestId"]}'
except DemistoException as e:
if 'Forbidden' in str(e):
return 'Authorization Error: Check the URL of an HTTP trigger in a flow'
else:
raise e
try:
from_xm.xm_get_user(user=user)
except DemistoException as e:
if 'Forbidden' in str(e):
return 'Authorization Error: Username and Password fields and verify the user exists'
else:
raise e
return 'ok'
''' MAIN FUNCTION '''
def main() -> None:
"""main function, parses params and runs command functions
:return:
:rtype:
"""
instance = demisto.params().get('instance')
username = demisto.params().get('username')
password = demisto.params().get('password')
property_name = demisto.params().get('property_name')
property_value = demisto.params().get('property_value')
base_url = demisto.params().get('url')
max_fetch = demisto.params().get('max_fetch', 20)
# if your Client class inherits from BaseClient, SSL verification is
# handled out of the box by it, just pass ``verify_certificate`` to
# the Client constructor
verify_certificate = not demisto.params().get('insecure', False)
# How much time before the first fetch to retrieve incidents
first_fetch_time = arg_to_timestamp(
arg=demisto.params().get('first_fetch', '3 days'),
arg_name='First fetch time',
required=True
)
# Using assert as a type guard (since first_fetch_time is always an int when required=True)
assert isinstance(first_fetch_time, int)
# if your Client class inherits from BaseClient, system proxy is handled
# out of the box by it, just pass ``proxy`` to the Client constructor
proxy = demisto.params().get('proxy', False)
# INTEGRATION DEVELOPER TIP
# You can use functions such as ``demisto.debug()``, ``demisto.info()``,
# etc. to print information in the XSOAR server log. You can set the log
# level on the server configuration
# See: https://xsoar.pan.dev/docs/integrations/code-conventions#logging
demisto.debug(f'Command being called is {demisto.command()}')
try:
to_xm_client = Client(
base_url=base_url,
verify=verify_certificate,
auth=(username, password),
proxy=proxy)
from_xm_client = Client(
base_url="https://" + instance,
verify=verify_certificate,
auth=(username, password),
proxy=proxy)
if demisto.command() == 'xm-trigger-workflow':
return_results(xm_trigger_workflow_command(
to_xm_client,
demisto.args().get('recipients'),
demisto.args().get('subject'),
demisto.args().get('body'),
demisto.args().get('incident_id'),
demisto.args().get('close_task_id')
))
elif demisto.command() == 'fetch-incidents':
# Set and define the fetch incidents command to run after activated via integration settings.
alert_status = demisto.params().get('status', None)
priority = demisto.params().get('priority', None)
next_run, incidents = fetch_incidents(
client=from_xm_client,
last_run=demisto.getLastRun(), # getLastRun() gets the last run dict
first_fetch_time=first_fetch_time,
max_fetch=max_fetch,
alert_status=alert_status,
priority=priority,
property_name=property_name,
property_value=property_value
)
# saves next_run for the time fetch-incidents is invoked
demisto.setLastRun(next_run)
# fetch-incidents calls ``demisto.incidents()`` to provide the list
# of incidents to crate
demisto.incidents(incidents)
elif demisto.command() == 'xm-get-events':
return_results(xm_get_events_command(
client=from_xm_client,
request_id=demisto.args().get('request_id'),
status=demisto.args().get('status'),
priority=demisto.args().get('priority'),
from_time=demisto.args().get('from'),
to_time=demisto.args().get('to'),
workflow=demisto.args().get('workflow'),
form=demisto.args().get('form'),
property_name=demisto.args().get('property_name'),
property_value=demisto.args().get('property_value')
))
elif demisto.command() == 'xm-get-event':
return_results(xm_get_event_command(
client=from_xm_client,
event_id=demisto.args().get('event_id')
))
elif demisto.command() == 'test-module':
return_results(test_module(
from_xm=from_xm_client,
to_xm=to_xm_client,
user=username,
max_fetch=max_fetch
))
# Log exceptions and return errors
except Exception as e:
demisto.error(traceback.format_exc()) # print the traceback
return_error(f'Failed to execute {demisto.command()} command.\nError:\n{str(e)}')
''' ENTRY POINT '''
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | 3e6ea05b56b33e90526879910ac5ea0b | 34 | 117 | 0.593014 | 4.072546 | false | false | false | false |
demisto/content | Packs/DeveloperTools/Scripts/CompareIndicators/CompareIndicators_test.py | 2 | 3339 | from CompareIndicators import collect_unique_indicators_from_lists, extract_list_from_args, demisto
def test_collect_unique_indicators_from_lists__empty():
"""
Given:
- Empty lists
When:
- Calling collect_unique_indicators_from_lists
Then:
- Return 2 empty lists
"""
list1 = []
list2 = []
actual = collect_unique_indicators_from_lists(list1, list2)
assert actual == ([], [])
def test_collect_unique_indicators_from_lists__partial_empty1():
"""
Given:
- Base list is empty
- Compare to list is populated
When:
- Calling collect_unique_indicators_from_lists
Then:
- First result is empty
- Second result is same as compare to list
"""
expected = ['1.1.1.1', '2.2.0.0-2.2.15.255', 'abcd']
list1 = []
list2 = ['1.1.1.1', '2.2.2.2/20', 'abcd']
actual = collect_unique_indicators_from_lists(list1, list2)
assert actual[0] == []
for e in expected:
assert e in actual[1]
def test_collect_unique_indicators_from_lists__partial_empty2():
"""
Given:
- Base list is populated
- Compare to list is empty
When:
- Calling collect_unique_indicators_from_lists
Then:
- First result is same as base list
- Second result is empty
"""
expected = ['1.1.1.1', '2.2.0.0-2.2.15.255', 'abcd']
list1 = ['1.1.1.1', '2.2.2.2/20', 'abcd']
list2 = []
actual = collect_unique_indicators_from_lists(list1, list2)
assert actual[1] == []
for e in expected:
assert e in actual[0]
def test_collect_unique_indicators_from_lists__populated_lists():
"""
Given:
- Both lists are populated
- list1 and list2 have 1) unique iocs, 2) fully matching iocs, 3) partially matching iocs
When:
- Calling collect_unique_indicators_from_lists
Then:
- Partial results and unique results are returned back
"""
# partial partial partial unique
expected1 = ['1.1.1.3', '3.3.3.6', '1.1.1.0-1.1.1.1', 'abcd']
# partial unique
expected2 = ['3.3.3.2', 'bcde']
list1 = ['abcd', '1.1.1.0/30', '2.2.2.2', '3.3.3.3-3.3.3.6']
list2 = ['bcde', '1.1.1.2', '2.2.2.2', '3.3.3.2-3.3.3.5']
actual = collect_unique_indicators_from_lists(list1, list2)
for e in expected1:
assert e in actual[0]
for e in expected2:
assert e in actual[1]
def test_extract_list_from_args__file_doesnt_exist(mocker):
"""
Given:
- A list of 1 is provided
When:
- Calling extract_lists_from_args
Then:
- Return a list with the ioc
"""
mocker.patch.object(demisto, 'getFilePath', side_effect=ValueError)
actual = extract_list_from_args({'test': '1.1.1.1'}, 'test')
assert actual == ['1.1.1.1']
def test_extract_list_from_args__file_exists(mocker):
"""
Given:
- A list is provided via entry id
- The file exists
When:
- Calling extract_lists_from_args
Then:
- Return a list with the iocs in the file
"""
mocker.patch.object(demisto, 'getFilePath',
return_value={'path': 'test_files/ips.txt'})
actual = extract_list_from_args({'test': '12@1'}, 'test')
assert actual == ['1.1.1.1', '2.2.2.2']
| mit | 4f8a606394e6e83ed64f58a6194ddd32 | 29.633028 | 99 | 0.580413 | 3.204415 | false | true | false | false |
demisto/content | Packs/ApiModules/Scripts/SiemApiModule/SiemApiModule.py | 2 | 4888 | # pylint: disable=no-name-in-module
# pylint: disable=no-self-argument
from abc import ABC
from typing import Any, Callable, Optional
import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
from enum import Enum
from pydantic import BaseConfig, BaseModel, AnyUrl, validator, Field
from requests.auth import HTTPBasicAuth
class Method(str, Enum):
GET = 'GET'
POST = 'POST'
PUT = 'PUT'
HEAD = 'HEAD'
PATCH = 'PATCH'
DELETE = 'DELETE'
def load_json(v: Any) -> dict:
if not isinstance(v, (dict, str)):
raise ValueError('headers are not dict or a valid json')
if isinstance(v, str):
try:
v = json.loads(v)
if not isinstance(v, dict):
raise ValueError('headers are not from dict type')
except json.decoder.JSONDecodeError as exc:
raise ValueError('headers are not valid Json object') from exc
if isinstance(v, dict):
return v
class IntegrationHTTPRequest(BaseModel):
method: Method
url: AnyUrl
verify: bool = True
headers: dict = dict() # type: ignore[type-arg]
auth: Optional[HTTPBasicAuth]
data: Any = None
params: dict = dict() # type: ignore[type-arg]
class Config(BaseConfig):
arbitrary_types_allowed = True
_normalize_headers = validator('headers', pre=True, allow_reuse=True)(
load_json
)
class Credentials(BaseModel):
identifier: Optional[str]
password: str
def set_authorization(request: IntegrationHTTPRequest, auth_credendtials):
"""Automatic authorization.
Supports {Authorization: Bearer __token__}
or Basic Auth.
"""
creds = Credentials.parse_obj(auth_credendtials)
if creds.password and creds.identifier:
request.auth = HTTPBasicAuth(creds.identifier, creds.password)
auth = {'Authorization': f'Bearer {creds.password}'}
if request.headers:
request.headers |= auth # type: ignore[assignment, operator]
else:
request.headers = auth # type: ignore[assignment]
class IntegrationOptions(BaseModel):
"""Add here any option you need to add to the logic"""
proxy: Optional[bool] = False
limit: Optional[int] = Field(None, ge=1)
class IntegrationEventsClient(ABC):
def __init__(
self,
request: IntegrationHTTPRequest,
options: IntegrationOptions,
session=requests.Session(),
):
self.request = request
self.options = options
self.session = session
self._set_proxy()
self._skip_cert_verification()
@abstractmethod
def set_request_filter(self, after: Any):
"""TODO: set the next request's filter.
Example:
"""
self.request.headers['after'] = after
def __del__(self):
try:
self.session.close()
except AttributeError as err:
demisto.debug(
f'ignore exceptions raised due to session not used by the client. {err=}'
)
def call(self, request: IntegrationHTTPRequest) -> requests.Response:
try:
response = self.session.request(**request.dict())
response.raise_for_status()
return response
except Exception as exc:
msg = f'something went wrong with the http call {exc}'
demisto.debug(msg)
raise DemistoException(msg) from exc
def _skip_cert_verification(
self, skip_cert_verification: Callable = skip_cert_verification
):
if not self.request.verify:
skip_cert_verification()
def _set_proxy(self):
if self.options.proxy:
ensure_proxy_has_http_prefix()
else:
skip_proxy()
class IntegrationGetEvents(ABC):
def __init__(
self, client: IntegrationEventsClient, options: IntegrationOptions
) -> None:
self.client = client
self.options = options
def run(self):
stored = []
for logs in self._iter_events():
stored.extend(logs)
if self.options.limit:
demisto.debug(
f'{self.options.limit=} reached. \
slicing from {len(logs)=}. \
limit must be presented ONLY in commands and not in fetch-events.'
)
if len(stored) >= self.options.limit:
return stored[: self.options.limit]
return stored
def call(self) -> requests.Response:
return self.client.call(self.client.request)
@staticmethod
@abstractmethod
def get_last_run(events: list) -> dict:
"""Logic to get the last run from the events
Example:
"""
return {'after': events[-1]['created']}
@abstractmethod
def _iter_events(self):
"""Create iterators with Yield"""
pass
| mit | d4e818f5bd65ab7c1e1b545924deaebf | 28.095238 | 89 | 0.611088 | 4.250435 | false | false | false | false |
demisto/content | Packs/TrendMicroVisionOne/Integrations/TrendMicroVisionOne/TrendMicroVisionOne_test.py | 2 | 26715 | from TrendMicroVisionOne import (
Client,
add_or_remove_from_block_list,
quarantine_or_delete_email_message,
isolate_or_restore_connection,
terminate_process,
add_or_delete_from_exception_list,
add_to_suspicious_list,
delete_from_suspicious_list,
get_file_analysis_status,
get_file_analysis_report,
collect_file,
download_information_collected_file,
submit_file_to_sandbox,
get_task_status,
get_endpoint_info,
add_note,
update_status,
)
# Provide valid API KEY
api_key = "test api key"
proxy = "false"
verify = "true"
# Mock function for add to block list and remove from block list
def add_remove_blocklist_mock_response(*args, **kwargs):
return_value = {
"actionId": "88139521",
"taskStatus": "pending",
"data": {
"createdTime": 1589525651,
"executedTime": 1589525725,
"finishedTime": 1589525725,
"taskStatus": "success",
"error": {}
}
}
return return_value
# Test cases for add to block list
def test_add_blocklist(mocker):
"""Test add to block list with positive scenario."""
mocker.patch(
"TrendMicroVisionOne.Client.http_request",
add_remove_blocklist_mock_response)
client = Client("https://api.xdr.trendmicro.com", api_key, proxy, verify)
args = {
"valueType": "file_sha1",
"targetValue": "2de5c1125d5f991842727ed8ea8b5fda0ffa249b",
"productId": "sao",
"description": "block info",
}
result = add_or_remove_from_block_list(
client, "trendmicro-visionone-add-to-block-list", args
)
assert result.outputs["taskStatus"] == "pending"
assert isinstance(result.outputs["actionId"], str)
assert result.outputs_prefix == "VisionOne.BlockList"
assert result.outputs_key_field == "actionId"
# Test cases for remove from block list
def test_remove_block_list(mocker):
"""Test remove block list positive scenario."""
mocker.patch(
"TrendMicroVisionOne.Client.http_request",
add_remove_blocklist_mock_response)
client = Client("https://api.xdr.trendmicro.com", api_key, proxy, verify)
args = {
"valueType": "file_sha1",
"targetValue": "2de5c1125d5f991842727ed8ea8b5fda0ffa249b",
"productId": "sao",
"description": "block info",
}
result = add_or_remove_from_block_list(
client, "trendmicro-visionone-remove-from-block-list", args
)
assert result.outputs["taskStatus"] == "pending"
assert isinstance(result.outputs["actionId"], str)
assert result.outputs_prefix == "VisionOne.BlockList"
assert result.outputs_key_field == "actionId"
# Mock function for quarantine and delete email message
def quarantine_delete_email_mock_response(*args, **kwargs):
return_value = {
"actionId": "88139521",
"taskStatus": "pending",
"data": {
"createdTime": 1589525651,
"executedTime": 1589525725,
"finishedTime": 1589525725,
"taskStatus": "success",
"error": {},
},
}
return return_value
# Test cases for quarantine email message
def test_quarantine_email_message(mocker):
"""Test quarantine email message positive scenario."""
mocker.patch(
"TrendMicroVisionOne.Client.http_request",
quarantine_delete_email_mock_response
)
client = Client("https://api.xdr.trendmicro.com", api_key, proxy, verify)
args = {
"messageId": (
"<CANUJTKTjto9GAHTr9V=TFqMZhRXqVn="
"MfSqmTdAMyv9PDX3k+vQ0w@mail.gmail.com>"
),
"mailBox": "kjshdfjksahd@trendenablement.com",
"messageDeliveryTime": "2021-12-09T14:00:12.000Z",
"productId": "sca",
"description": "quarantine info",
}
result = quarantine_or_delete_email_message(
client, "trendmicro-visionone-quarantine-email-message", args
)
assert result.outputs["taskStatus"] == "pending"
assert isinstance(result.outputs["actionId"], str)
assert result.outputs_prefix == "VisionOne.Email"
assert result.outputs_key_field == "actionId"
# Test cases for delete email message
def test_delete_email_message(mocker):
"""Test delete email message with positive scenario."""
mocker.patch(
"TrendMicroVisionOne.Client.http_request",
quarantine_delete_email_mock_response
)
client = Client("https://api.xdr.trendmicro.com", api_key, proxy, verify)
args = {
"messageId": (
"<CANUJTKTqmuCT12v7mpbxZih_crrP"
"MfSqmTdAMyv9PDX3k+vQ0w@mail.gmail.com>"
),
"mailBox": "kjshdfjksahd@trendenablement.com",
"messageDeliveryTime": "2021-12-09T14:00:55.000Z",
"productId": "sca",
"description": "quarantine info",
}
result = quarantine_or_delete_email_message(
client, "trendmicro-visionone-delete-email-message", args
)
assert result.outputs["taskStatus"] == "pending"
assert isinstance(result.outputs["actionId"], str)
assert result.outputs_prefix == "VisionOne.Email"
assert result.outputs_key_field == "actionId"
# Mock function for isolate and restore endpoint
def isolate_restore_mock_response(*args, **kwargs):
return_value = {
"status": "string",
"actionId": "88139521",
"taskStatus": "pending",
"result": {
"computerId": "string",
},
"data": {
"createdTime": 1589525651,
"executedTime": 1589525725,
"finishedTime": 1589525725,
"taskStatus": "success",
"error": {},
},
}
return return_value
# Test cases for isolate endpoint
def test_isolate_endpoint(mocker):
"""Test isolate endpoint postive scenario."""
mocker.patch(
"TrendMicroVisionOne.Client.http_request",
isolate_restore_mock_response)
client = Client("https://api.xdr.trendmicro.com", api_key, proxy, verify)
args = {
"endpoint": "hostname",
"productId": "sao",
"description": "isolate endpoint info",
}
result = isolate_or_restore_connection(
client, "trendmicro-visionone-isolate-endpoint", args
)
assert result.outputs["taskStatus"] == "pending"
assert result.outputs_prefix == "VisionOne.Endpoint_Connection"
assert result.outputs_key_field == "actionId"
# Test cases for restore endpoint
def test_restore_endpoint(mocker):
"""Test restore endpoint positive scenario."""
mocker.patch(
"TrendMicroVisionOne.Client.http_request",
isolate_restore_mock_response)
client = Client("https://api.xdr.trendmicro.com", api_key, proxy, verify)
args = {
"endpoint": "hostname",
"productId": "sao",
"description": "restore endpoint info",
}
result = isolate_or_restore_connection(
client, "trendmicro-visionone-restore-endpoint-connection", args
)
assert result.outputs["taskStatus"] == "pending"
assert result.outputs_prefix == "VisionOne.Endpoint_Connection"
assert result.outputs_key_field == "actionId"
# Test cases for terminate process endpoint
def test_terminate_process_endpoint(mocker):
"""Test terminate process positive scenario."""
mocker.patch(
"TrendMicroVisionOne.Client.http_request",
isolate_restore_mock_response)
client = Client("https://api.xdr.trendmicro.com", api_key, proxy, verify)
args = {
"endpoint": "00:50:56:81:87:A8",
"fileSha1": "12a08b7a3c5a10b64700c0aca1a47941b50a4f8b",
"productId": "sao",
"description": "terminate info",
"filename": "testfile",
}
result = terminate_process(client, args)
assert result.outputs["taskStatus"] == "pending"
assert isinstance(result.outputs["actionId"], str)
assert result.outputs_prefix == "VisionOne.Terminate_Process"
assert result.outputs_key_field == "actionId"
# Mock function for add and delete exception list
def add_delete_exception_mock_response(*args, **kwargs):
return_value = 20
return return_value
# Test cases for add exception list endpoint.
def test_add_object_to_exception_list(mocker):
"""Test add to exception list with positive scenario."""
mocker.patch(
"TrendMicroVisionOne.Client.http_request",
add_delete_exception_mock_response)
mocker.patch(
"TrendMicroVisionOne.Client.exception_list_count",
add_delete_exception_mock_response
)
client = Client("https://api.xdr.trendmicro.com", api_key, proxy, verify)
args = {
"type": "domain",
"value": "1.alisiosanguera.com",
"description": "new key"
}
result = add_or_delete_from_exception_list(
client,
"trendmicro-visionone-add-objects-to-exception-list",
args
)
assert result.outputs["status_code"] is None
assert result.outputs_prefix == "VisionOne.Exception_List"
assert isinstance(result.outputs["total_items"], int)
assert result.outputs_key_field == "message"
# Test cases for delete exception list.
def test_delete_object_to_exception_list(mocker):
"""Test delete exception list positive scenario."""
mocker.patch(
"TrendMicroVisionOne.Client.http_request",
add_delete_exception_mock_response)
mocker.patch(
"TrendMicroVisionOne.Client.exception_list_count",
add_delete_exception_mock_response
)
client = Client("https://api.xdr.trendmicro.com", api_key, proxy, verify)
args = {
"type": "domain",
"value": "1.alisiosanguera.com.cn",
"description": "testing exception",
}
result = add_or_delete_from_exception_list(
client, "trendmicro-visionone-delete-objects-from-exception-list", args
)
assert result.outputs["status_code"] is None
assert isinstance(result.outputs["total_items"], int)
assert result.outputs_prefix == "VisionOne.Exception_List"
assert result.outputs_key_field == "message"
# Mock response for add and delete suspicious list
def add_delete_suspicious_mock_response(*args, **kwargs):
return_value = 20
return return_value
# Test cases for add suspicious object list
def test_add_object_to_suspicious_list(mocker):
"""Test add to suspicious list with poistive scenario."""
mocker.patch(
"TrendMicroVisionOne.Client.http_request",
add_delete_suspicious_mock_response)
mocker.patch(
"TrendMicroVisionOne.Client.suspicious_list_count",
add_delete_suspicious_mock_response
)
client = Client("https://api.xdr.trendmicro.com", api_key, proxy, verify)
args = {
"type": "domain",
"value": "1.alisiosanguera.com.cn",
"description": "Example Suspicious Object.",
"scanAction": "log",
"riskLevel": "high",
"expiredDay": 15,
}
result = add_to_suspicious_list(client, args)
assert result.outputs["status_code"] is None
assert isinstance(result.outputs["total_items"], int)
assert result.outputs_prefix == "VisionOne.Suspicious_List"
assert result.outputs_key_field == "message"
# Test cases for delete suspicious object list
def test_delete_object_from_suspicious_list(mocker):
"""Test delete object from suspicious list."""
mocker.patch(
"TrendMicroVisionOne.Client.http_request",
add_delete_suspicious_mock_response)
mocker.patch(
"TrendMicroVisionOne.Client.suspicious_list_count",
add_delete_suspicious_mock_response
)
client = Client("https://api.xdr.trendmicro.com", api_key, proxy, verify)
args = {"type": "domain", "value": "1.alisiosanguera.com.cn"}
result = delete_from_suspicious_list(client, args)
assert result.outputs["status_code"] is None
assert isinstance(result.outputs["total_items"], int)
assert result.outputs_prefix == "VisionOne.Suspicious_List"
assert result.outputs_key_field == "message"
# Mock response for Get file analysis status
def mock_file_status_response(*args, **kwargs):
return_response = {
"code": "Success",
"message": "Success",
"data": {
"taskId": "012e4eac-9bd9-4e89-95db-77e02f75a6f3",
"taskStatus": "finished",
"digest": {
"md5": "4ac174730d4143a119037d9fda81c7a9",
"sha1": "fb5608fa03de204a12fe1e9e5275e4a682107471",
"sha256": (
"65b0f656e79ab84ca17807158e3ea"
"c206bd58be6689ddeb95956a48748d138f9"
),
},
"analysisSummary": {
"analysisCompletionTime": "2021-05-07T03:08:40Z",
"riskLevel": "high",
"description": "",
"detectionNameList": [],
"threatTypeList": [],
"trueFileType": "exe",
},
"reportId": "012e4eac-9bd9-4e89-95db-77e02f75a6f3",
},
}
return return_response
# Test Cases for Get file analysis status
def test_get_file_status(mocker):
"""Test to get status of file"""
mocker.patch(
"TrendMicroVisionOne.Client.http_request",
mock_file_status_response)
args = {"taskId": "921674d0-9735-4f79-b7de-c852e00a003d"}
client = Client("https://api.xdr.trendmicro.com", api_key, proxy, verify)
result = get_file_analysis_status(client, args)
assert result.outputs["message"] == "Success"
assert result.outputs["code"] == "Success"
assert result.outputs["task_id"] == "012e4eac-9bd9-4e89-95db-77e02f75a6f3"
assert result.outputs["taskStatus"] == "finished"
assert result.outputs["report_id"] == (
"012e4eac-9bd9-4e89-95db-77e02f75a6f3")
assert result.outputs_prefix == "VisionOne.File_Analysis_Status"
assert result.outputs_key_field == "message"
def test_get_report_id(mocker):
"""Test to get status of file with report id"""
mocker.patch(
"TrendMicroVisionOne.Client.http_request",
mock_file_status_response)
args = {"taskId": "921674d0-9735-4f79-b7de-c852e00a003d"}
client = Client("https://api.xdr.trendmicro.com", api_key, proxy, verify)
result = get_file_analysis_status(client, args)
assert result.outputs["message"] == "Success"
assert result.outputs["code"] == "Success"
assert result.outputs["report_id"] == (
"012e4eac-9bd9-4e89-95db-77e02f75a6f3")
assert result.outputs_prefix == "VisionOne.File_Analysis_Status"
assert result.outputs_key_field == "message"
# Mock response for Get file analysis report
def mock_file_report_response(*args, **kwargs):
return_response = {
"code": "Success",
"message": "Success",
"data": [
{
"type": "ip",
"value": "6.6.6.6",
"riskLevel": "high",
"analysisCompletionTime": "2021-05-07T03:08:40Z",
"expiredTime": "2021-06-07T03:08:40Z",
"rootFileSha1": "fb5608fa03de204a12fe1e9e5275e4a682107471",
}
],
}
return return_response
# Test cases for get file analysis report
def test_get_file_analysis_report(mocker):
"""Test get file analysis report data."""
mocker.patch(
"TrendMicroVisionOne.Client.http_request",
mock_file_report_response)
client = Client("https://api.xdr.trendmicro.com", api_key, proxy, verify)
args = {
"reportId": "800f908d-9578-4333-91e5-822794ed5483",
"type": "suspiciousObject",
}
result = get_file_analysis_report(client, args)
assert result.outputs["message"] == "Success"
assert result.outputs["code"] == "Success"
assert isinstance(result.outputs["data"][0]["type"], str)
assert isinstance(result.outputs["data"][0]["value"], str)
assert isinstance(result.outputs["data"][0]["risk_level"], str)
assert isinstance(result.outputs["data"][0]["analysis_completion_time"], str)
assert isinstance(result.outputs["data"][0]["expired_time"], str)
assert isinstance(result.outputs["data"][0]["root_file_sha1"], str)
def test_get_file_analysis_report_1(mocker):
"""Test get file analysis report data."""
mocker.patch(
"TrendMicroVisionOne.Client.http_request",
mock_file_report_response)
client = Client("https://api.xdr.trendmicro.com", api_key, proxy, verify)
args = {
"reportId": "800f908d-9578-4333-91e5-822794ed5483",
"type": "suspiciousObject",
}
result = get_file_analysis_report(client, args)
assert result.outputs["message"] == "Success"
assert result.outputs["code"] == "Success"
assert len(result.outputs["data"]) > 0
# Mock function for isolate and restore endpoint
def mock_collect_file(*args, **kwargs):
return_value = {
"status": "string",
"actionId": "88139521",
"taskStatus": "pending",
"result": {
"computerId": "string",
},
"data": {
"createdTime": 1589525651,
"executedTime": 1589525725,
"finishedTime": 1589525725,
"taskStatus": "success",
"error": {},
}
}
return return_value
# Test cases for collect forensic file.
def test_collect_forensic_file(mocker):
"""Test collect file with positive scenario."""
mocker.patch(
"TrendMicroVisionOne.Client.http_request",
mock_collect_file)
client = Client("https://api.xdr.trendmicro.com", api_key, proxy, verify)
args = {
"endpoint": "hostname",
"description": "collect file",
"productId": "sao",
"filePath": (
"/file_path/sample.txt"
),
"os": "linux",
}
result = collect_file(client, args)
assert result.outputs["taskStatus"] == "pending"
assert isinstance(result.outputs["actionId"], str)
assert result.outputs_prefix == "VisionOne.Collect_Forensic_File"
assert result.outputs_key_field == "actionId"
# Mock for downloaded file information
def mock_download_collected_file_info_response(*args, **kwargs):
return_response = {
"data": {
"url": "string",
"expires": "2011-10-05T14:48:00.000Z",
"password": "string",
"filename": "string",
}
}
return return_response
# Test Cases for Collected downloaded file information.
def test_get_forensic_file_information(mocker):
"""Test endpoint to get collected file infomation based on action id"""
mocker.patch(
"TrendMicroVisionOne.Client.http_request",
mock_download_collected_file_info_response
)
args = {"actionId": "00000700"}
client = Client("https://api.xdr.trendmicro.com", api_key, proxy, verify)
result = download_information_collected_file(client, args)
assert isinstance(result.outputs["url"], str)
assert isinstance(result.outputs["expires"], str)
assert isinstance(result.outputs["password"], str)
assert isinstance(result.outputs["filename"], str)
# Mock response for submit file to sandbox.
def mock_submit_file_to_sandbox_reponse(*args, **kwargs):
return_response = {
"code": "Success",
"message": "Success",
"data": {
"taskId": "012e4eac-9bd9-4e89-95db-77e02f75a6f3",
"digest": {
"md5": "4ac174730d4143a119037d9fda81c7a9",
"sha1": "fb5608fa03de204a12fe1e9e5275e4a682107471",
"sha256": (
"65b0f656e79ab84ca17807158e3ea"
"c206bd58be6689ddeb95956a48748d138f9"
)
},
},
}
return return_response
# Mock response for submit file to sandbox.
def mocked_requests_get(*args, **kwargs):
class MockResponse:
def __init__(self, json_data, status_code, content):
self.json_data = json_data
self.status_code = status_code
self.content = content
def json(self):
return self.json_data
if args[0] == 'http://someurl.com/test.json':
return MockResponse({"key1": "value1"}, 200, "response")
elif args[0] == 'http://someotherurl.com/anothertest.json':
return MockResponse({"key2": "value2"}, 200, "response")
return MockResponse(None, 404, None)
# Mock response for submit file to sandbox.
def mocked_requests_post(*args, **kwargs):
class MockResponse:
def __init__(self, json_data, status_code, content):
self.json_data = json_data
self.status_code = status_code
self.content = content
def json(self):
return {
"code": "Success",
"message": "Success",
"data": {
"taskId": "012e4eac-9bd9-4e89-95db-77e02f75a6f3",
"digest": {
"md5": "4ac174730d4143a119037d9fda81c7a9",
"sha1": "fb5608fa03de204a12fe1e9e5275e4a682107471",
"sha256": (
"65b0f656e79ab84ca17807158e3ea",
"c206bd58be6689ddeb95956a48748d138f9"
)
},
},
}
def raise_for_status(self):
return True
if args[0] == 'http://someurl.com/test.json':
return MockResponse({"key1": "value1"}, 200, "response")
elif args[0] == 'http://someotherurl.com/anothertest.json':
return MockResponse({"key2": "value2"}, 200, "response")
return MockResponse(None, 404, None)
def test_submit_file_to_sandbox(mocker):
mocker.patch(
"TrendMicroVisionOne.requests.get",
mocked_requests_get
)
mocker.patch(
"TrendMicroVisionOne.requests.post",
mocked_requests_post
)
args = {
"fileUrl": "http://adsd.com",
"fileName": "XDR_ResponseApp_CollectFile_ID00000700_20211206T134158Z.7z",
"archivePassword": "6hn467c8",
"documentPassword": ""
}
client = Client("https://api.xdr.trendmicro.com", api_key, proxy, verify)
result = submit_file_to_sandbox(client, args)
assert result.outputs["message"] == "Success"
assert result.outputs["code"] == "Success"
# Mock function for check task status
def check_task_status_mock_response(*args, **kwargs):
return_value = {
"data": {
"createdTime": 1589525651,
"executedTime": 1589525725,
"finishedTime": 1589525725,
"taskStatus": "success",
"error": {}
}
}
return return_value
def test_check_task_status(mocker):
mocker.patch(
"TrendMicroVisionOne.Client.http_request",
check_task_status_mock_response)
mocker.patch(
"CommonServerPython.ScheduledCommand.raise_error_if_not_supported",
lambda: None
)
client = Client("https://api.xdr.trendmicro.com", api_key, proxy, verify)
args = {
"actionId": "00001108"
}
result = get_task_status(args, client)
assert result.outputs["taskStatus"] == "success"
# Mock for downloaded file information
def mock_get_endpoint_info_response(*args, **kwargs):
return_response = {
"status": "SUCCESS",
"errorCode": 0,
"message": "message",
"result": {
"logonAccount": {
"value": [
"DOMAIN\\username"
],
"updateAt": 0
},
"hostname": {
"value": "hostname",
"updateAt": 0
},
"macAddr": {
"value": "00:11:22:33:44:55",
"updateAt": 0
},
"ip": {
"value": "192.168.1.1",
"updateAt": 0
},
"osName": "Windows",
"osVersion": "10.0.19042",
"osDescription": "Windows 10 Pro (64 bit) build 19042",
"productCode": "xes"
}
}
return return_response
# Test case for get endpoint information.
def test_get_endpoint_information(mocker):
"""Test get information from endpoint based on computerid"""
mocker.patch(
"TrendMicroVisionOne.Client.http_request",
mock_get_endpoint_info_response
)
args = {"endpoint": "hostname"}
client = Client("https://api.xdr.trendmicro.com", api_key, proxy, verify)
result = get_endpoint_info(client, args)
assert result.outputs["status"] == "SUCCESS"
assert isinstance(result.outputs["message"], str)
assert isinstance(result.outputs["hostname"], str)
assert isinstance(result.outputs["ip"], str)
assert isinstance(result.outputs["macAddr"], str)
assert isinstance(result.outputs["osDescription"], str)
assert isinstance(result.outputs["osName"], str)
assert isinstance(result.outputs["osVersion"], str)
assert isinstance(result.outputs["productCode"], str)
# Mock function for add note.
def add_note_mock_response(*args, **kwargs):
return_value = {
"data": {
"id": 123
},
"info": {
"code": 3021000,
"msg": "Alert notes added successfully."
}
}
return return_value
# Test case for add note
def test_add_note(mocker):
mocker.patch(
"TrendMicroVisionOne.Client.http_request",
add_note_mock_response)
client = Client("https://api.xdr.trendmicro.com", api_key, proxy, verify)
args = {
"workbench_id": "WB-20837-20220418-00000",
"content": "This is a new note."
}
result = add_note(client, args)
assert result.outputs["response_msg"] == "Alert notes added successfully."
assert isinstance(result.outputs["Workbench_Id"], str)
assert isinstance(result.outputs["noteId"], int)
assert isinstance(result.outputs["response_code"], int)
# Mock function for update alert status
def update_status_mock_response(*args, **kwargs):
return_value = {
"data": {},
"info": {
"code": 3006000,
"msg": "Alert status changed successfully."
}
}
return return_value
# Test case for update alert status
def test_update_status(mocker):
mocker.patch(
"TrendMicroVisionOne.Client.http_request",
update_status_mock_response)
client = Client("https://api.xdr.trendmicro.com", api_key, proxy, verify)
args = {
"workbench_id": "WB-20837-20220418-00000",
"status": "in_progress"
}
result = update_status(client, args)
assert result.outputs["response_msg"] == "Alert status changed successfully."
assert isinstance(result.outputs["Workbench_Id"], str)
assert isinstance(result.outputs["response_code"], int)
| mit | 38d9d264545d83752a2e51cb0661fa76 | 33.338046 | 81 | 0.618117 | 3.537474 | false | true | false | false |
demisto/content | Packs/URLHaus/Integrations/URLHaus/URLHaus.py | 2 | 30095 | from typing import Tuple
from CommonServerPython import *
''' IMPORTS '''
import traceback
import requests
import zipfile
import io
import re
from datetime import datetime as dt
# Disable insecure warnings
requests.packages.urllib3.disable_warnings()
# disable-secrets-detection-start
# Whether compromised websites are considered malicious or not. See the blacklists output in
# https://urlhaus-api.abuse.ch/
# disable-secrets-detection-end
COMPROMISED_IS_MALICIOUS = demisto.params().get('compromised_is_malicious', False)
# Headers to be sent in requests
HEADERS = {
'Content-Type': 'application/x-www-form-urlencoded',
'Accept': 'application/json'
}
''' HELPER FUNCTIONS '''
def http_request(method, command, api_url, use_ssl, data=None):
retry = int(demisto.params().get('retry', 3))
try_num = 0
while try_num < retry:
try_num += 1
url = f'{api_url}/{command}/'
res = requests.request(method,
url,
verify=use_ssl,
data=data,
headers=HEADERS)
if res.status_code == 200:
return res
raise Exception(f'Error in API call {url} [{res.status_code}] - {res.reason}')
def reformat_date(date):
try:
return dt.strptime(date.rstrip(' UTC'), '%Y-%m-%d %H:%M:%S').strftime('%Y-%m-%dT%H:%M:%S')
except Exception:
return 'Unknown'
def extract_zipped_buffer(buffer):
with io.BytesIO() as bio:
bio.write(buffer)
with zipfile.ZipFile(bio) as z:
return z.read(z.namelist()[0])
def query_url_information(url, api_url, use_ssl):
return http_request('POST', 'url', api_url, use_ssl, f'url={url}')
def query_host_information(host, api_url, use_ssl):
return http_request('POST', 'host', api_url, use_ssl, f'host={host}')
def query_payload_information(hash_type, api_url, use_ssl, hash):
return http_request('POST', 'payload', api_url, use_ssl, f'{hash_type}_hash={hash}')
def download_malware_sample(sha256, api_url, use_ssl):
return http_request('GET', f'download/{sha256}', api_url=api_url, use_ssl=use_ssl)
''' COMMANDS + REQUESTS FUNCTIONS '''
def test_module(**kwargs):
"""
Performs basic get request to get item samples
"""
http_request('POST', 'url', kwargs.get('api_url'), kwargs.get('use_ssl'))
def url_calculate_score(status: str) -> Tuple[int, str]:
"""
Calculate DBot Score for the url command using url status.
Args:
status (str): A URL status.
Returns:
dbot_score,description (tuple): The DBot Score and the description associated with it.
"""
status_dict = {'online': (Common.DBotScore.BAD, "The URL is active (online) and currently serving a payload"),
'offline': (Common.DBotScore.SUSPICIOUS, "The URL is inadctive (offline) and serving no payload"),
'unknown': (Common.DBotScore.NONE, "The URL status could not be determined")}
if status_dict.get(status):
return status_dict[status]
raise Exception("Got bad url status")
def domain_calculate_score(blacklist: dict) -> Tuple[int, str]:
"""
Calculate DBot Score for the domain command using blacklist.
Args:
blacklist (dict): Containing spamhaus_dbl and surbl.
Returns:
dbot_score,description (tuple): The DBot Score and the description associated with it.
"""
spamhaus_dbl = blacklist.get('spamhaus_dbl', '')
surbl = blacklist.get('surbl', '')
if spamhaus_dbl:
if spamhaus_dbl == 'spammer_domain':
return Common.DBotScore.BAD, "The queried Domain is a known spammer domain"
if spamhaus_dbl == 'phishing_domain':
return Common.DBotScore.BAD, "The queried Domain is a known phishing domain"
if spamhaus_dbl == 'botnet_cc_domain':
return Common.DBotScore.BAD, "The queried Domain is a known botnet C&C domain"
if surbl:
if surbl == 'listed':
return Common.DBotScore.BAD, "The queried Domain is listed on SURBL"
if spamhaus_dbl:
if spamhaus_dbl == 'not listed':
return Common.DBotScore.NONE, "The queried Domain is not listed on Spamhaus DBL"
if surbl:
if surbl == 'not listed':
return Common.DBotScore.NONE, "The queried Domain is not listed on SURBL"
return Common.DBotScore.GOOD, "There is no information about Domain in the blacklist"
def file_calculate_score() -> Tuple[int, str]:
"""
Calculate DBot Score for the file command (always malicious).
Args:
-
Returns:
dbot_score,description (tuple): The DBot Score and the description associated with it.
"""
return Common.DBotScore.BAD, 'This file is malicious'
def determine_host_ioc_type(host: str) -> str:
"""
Determine the host ioc type.
Args:
host (str): The host.
Returns:
type (str): The type of the host.
"""
return 'ip' if is_ip_valid(host) else 'domain'
def url_create_relationships(uri: str, host: str, files: List[dict], create_relationships: bool,
max_num_of_relationships: int) -> List[EntityRelationship]:
"""
Returns a list of relationships if create_relationships is true (limited to max_num_of_relationships).
Args:
uri (str): The queried URL.
host (str): A host associated with the URL.
files (list): Files associated with the URL.
create_relationships (bool): Indicator for create relationships table.
max_num_of_relationships (int): Indicator for how many relationships to display.
Returns:
relationships (list): The EntityRelationship objects representing the URL relationships.
"""
relationships = []
if create_relationships and max_num_of_relationships is not None:
if host:
parsed_host = determine_host_ioc_type(host)
if parsed_host == 'domain':
relationships.append(EntityRelationship(
name=EntityRelationship.Relationships.HOSTED_ON, entity_a=uri,
entity_a_type=FeedIndicatorType.URL,
entity_b=host, entity_b_type=FeedIndicatorType.Domain,
reverse_name=EntityRelationship.Relationships.HOSTS))
if parsed_host == 'ip':
relationships.append(EntityRelationship(
name=EntityRelationship.Relationships.RELATED_TO, entity_a=uri, entity_a_type=FeedIndicatorType.URL,
entity_b=host, entity_b_type=FeedIndicatorType.IP,
reverse_name=EntityRelationship.Relationships.RELATED_TO))
if files:
for file in files:
if len(relationships) >= max_num_of_relationships:
break
file_sh256 = file.get('SHA256')
if file_sh256:
relationships.append(EntityRelationship(
name=EntityRelationship.Relationships.RELATED_TO,
entity_a=uri,
entity_a_type=FeedIndicatorType.URL,
entity_b=file_sh256, entity_b_type=FeedIndicatorType.File,
reverse_name=EntityRelationship.Relationships.RELATED_TO))
return relationships
def url_create_tags(urlhaus_data: dict) -> List[str]:
"""
Create url tags.
Args:
urlhaus_data (dict): The data retrieved from URLHaus db.
Returns:
tags (list): a list of tags to add.
"""
tags = urlhaus_data.get('Tags', [])
if urlhaus_data.get('Threat'):
tags.append(urlhaus_data.get('Threat'))
return tags
def url_create_payloads(url_information: dict) -> List[dict]:
"""
Returns a list of payloads.
Args:
url_information (dict): The data retrieved from URLHaus db.
Returns:
payloads (list): list of payloads associated with the URL.
"""
payloads = []
for payload in url_information.get('payloads') or []:
vt_data = payload.get('virustotal', None)
vt_information = None
if vt_data:
vt_information = {
'Result': float(vt_data.get('percent', 0)),
'Link': vt_data.get('link', '')
}
payloads.append({
'Name': payload.get('filename', 'unknown'),
'Type': payload.get('file_type', ''),
'MD5': payload.get('response_md5', ''),
'SHA256': payload.get('response_sha256', ''),
'VT': vt_information,
})
return payloads
def url_create_blacklist(url_information: dict) -> List[dict]:
"""
Create blacklist for url command.
Args:
url_information(dict).
Returns:
Blacklist(list).
"""
blacklist_information = []
blacklists = url_information.get('blacklists', {})
for bl_name, bl_status in blacklists.items():
blacklist_information.append({'Name': bl_name,
'Status': bl_status})
return blacklist_information
def build_context_url_ok_status(url_information: dict, uri: str, params: dict) -> CommandResults:
"""
Build the output context if the status is ok.
Args:
url_information (dict): The data retrieved from URLHaus db.
uri (str): The queried URL.
params (dict): The integration params.
Returns:
result (CommandResults): The CommandResults object representing the url command results.
"""
blacklist_information = url_create_blacklist(url_information)
date_added = reformat_date(url_information.get('date_added'))
payloads = url_create_payloads(url_information)
urlhaus_data = {
'ID': url_information.get('id', ''),
'Status': url_information.get('url_status', ''),
'Host': url_information.get('host', ''),
'DateAdded': date_added,
'Threat': url_information.get('threat', ''),
'Blacklist': blacklist_information,
'Tags': url_information.get('tags', []),
'Payload': payloads
}
# DBot score calculation
score, description = url_calculate_score(url_information.get('url_status', {}))
dbot_score = Common.DBotScore(
indicator=uri,
integration_name='URLhaus',
indicator_type=DBotScoreType.URL,
reliability=params.get('reliability'),
score=score,
malicious_description=description
)
relationships = url_create_relationships(uri, url_information.get('host', ''), payloads,
params.get('create_relationships', True),
params.get('max_num_of_relationships', 10))
url_indicator = Common.URL(url=uri, dbot_score=dbot_score, tags=url_create_tags(urlhaus_data),
relationships=relationships)
human_readable = tableToMarkdown(f'URLhaus reputation for {uri}',
{
'URLhaus link': url_information.get('urlhaus_reference', 'None'),
'Description': description,
'URLhaus ID': urlhaus_data['ID'],
'Status': urlhaus_data['Status'],
'Threat': url_information.get('threat', ''),
'Date added': date_added
})
return CommandResults(
readable_output=human_readable,
outputs_prefix='URLhaus.URL',
outputs_key_field='ID',
outputs=urlhaus_data,
raw_response=url_information,
indicator=url_indicator,
relationships=relationships)
def process_query_info(url_information: dict, uri: str, params: dict) -> CommandResults:
"""
Process the response.
Args:
url_information (dict): The data retrieved from URLHaus db.
uri (str): The queried URL.
params (dict): The integration params.
Returns:
result (CommandResults): The CommandResults object representing the url command results.
"""
if url_information['query_status'] == 'ok':
return build_context_url_ok_status(url_information, uri, params)
elif url_information['query_status'] == 'no_results' or url_information['query_status'] == 'invalid_url':
if re.match(urlRegex, uri):
return create_indicator_result_with_dbotscore_unknown(indicator=uri,
indicator_type=DBotScoreType.URL,
reliability=params.get('reliability'))
human_readable = f'## URLhaus reputation for {uri}\n' \
f'Invalid URL!'
return CommandResults(
readable_output=human_readable,
raw_response=url_information,
)
else:
raise DemistoException(f'Query results = {url_information["query_status"]}', res=url_information)
def run_url_command(url: str, params: dict) -> CommandResults:
"""
Query the url_information from URLHaus db.
Args:
params (dict): The integration params.
url (str): The queried URL.
Returns:
result (CommandResults): The CommandResults object representing the url command resultsgit .
"""
try:
url_information = query_url_information(url, params.get('api_url'), params.get('use_ssl')).json()
except UnicodeEncodeError:
return CommandResults(
readable_output='Service Does not support special characters.',
)
return process_query_info(url_information, url, params)
def url_command(params: dict):
"""
Split urls and call run_url_command on each of them.
Args:
params (dict): The integration params.
"""
urls = demisto.args().get('url', '')
for url in argToList(urls):
return_results(results=run_url_command(url, params))
def domain_create_relationships(urls: List[dict], domain: str, create_relationships: bool,
max_num_of_relationships: int) -> List[EntityRelationship]:
"""
Returns a list of relationships if create_relationships is true (limited to max_num_of_relationships).
Args:
domain (str): The queried Domain.
urls (list): Urls associated with the Domain.
create_relationships (bool): Indicator for create relationships table.
max_num_of_relationships (int): Indicator for how many relationships to display.
Returns:
relationships (list): The EntityRelationship objects representing the Domain relationships.
"""
relationships: list = []
if create_relationships and max_num_of_relationships is not None:
for url in urls:
if len(relationships) >= max_num_of_relationships:
break
relationships.append(EntityRelationship(
name=EntityRelationship.Relationships.HOSTS, entity_a=domain,
entity_a_type=FeedIndicatorType.Domain,
entity_b=url.get('url'), entity_b_type=FeedIndicatorType.URL,
reverse_name=EntityRelationship.Relationships.HOSTED_ON))
return relationships
def domain_add_tags(bl_status: str, tags: List[str]) -> None:
"""
Create tags associated to the domain.
Args:
bl_status (str): The Blacklist status associated with the Domain.
tags (list): A list of tags to return.
"""
if bl_status:
tag_to_add = bl_status.replace('_domain', '') if bl_status.endswith('domain') else \
bl_status if bl_status.startswith('abused') else ''
if tag_to_add:
tags.append(tag_to_add)
def run_domain_command(domain: str, params: dict) -> CommandResults:
"""
Query the domain_information from URLHaus db.
Args:
domain (str): Domain to query.
params (dict): The integration params.
Returns:
result (CommandResults): The CommandResults object representing the domain command results.
"""
domain_information = query_host_information(domain, params.get('api_url'), params.get('use_ssl')).json()
tags: list = []
if domain_information['query_status'] == 'ok':
# URLHaus output
blacklist_information = []
blacklists = domain_information.get('blacklists', {})
for bl_name, bl_status in blacklists.items():
blacklist_information.append({'Name': bl_name,
'Status': bl_status})
domain_add_tags(bl_status, tags)
first_seen = reformat_date(domain_information.get('firstseen'))
urlhaus_data = {
'FirstSeen': first_seen,
'Blacklist': blacklists,
'URL': domain_information.get('urls', [])
}
# DBot score calculation
score, description = domain_calculate_score(domain_information.get('blacklists', {}))
dbot_score = Common.DBotScore(
indicator=domain,
integration_name='URLhaus',
indicator_type=DBotScoreType.DOMAIN,
reliability=params.get('reliability'),
score=score,
malicious_description=description
)
relationships = domain_create_relationships(urlhaus_data.get('URL', ''), domain,
params.get('create_relationships', True),
params.get('max_num_of_relationships', False))
domain_indicator = Common.Domain(domain=domain, dbot_score=dbot_score, tags=tags,
relationships=relationships)
human_readable = tableToMarkdown(f'URLhaus reputation for {domain}',
{
'URLhaus link': domain_information.get('urlhaus_reference', 'None'),
'Description': description,
'First seen': first_seen,
})
return CommandResults(
readable_output=human_readable,
outputs_prefix='URLhaus.Domain',
outputs=urlhaus_data,
raw_response=domain_information,
indicator=domain_indicator,
relationships=relationships)
elif domain_information['query_status'] == 'no_results':
return create_indicator_result_with_dbotscore_unknown(indicator=domain,
indicator_type=DBotScoreType.DOMAIN,
reliability=params.get('reliability'))
elif domain_information['query_status'] == 'invalid_host':
human_readable = f'## URLhaus reputation for {domain}\n' \
f'Invalid domain!'
return CommandResults(
readable_output=human_readable,
raw_response=domain_information)
else:
raise DemistoException(f'Query results = {domain_information["query_status"]}', res=domain_information)
def domain_command(params: dict):
"""
Split domains and call run_domain_command on each of them.
Args:
params (dict): The integration params.
"""
domains = demisto.args().get('domain', '')
for domain in argToList(domains):
return_results(results=run_domain_command(domain, params))
def file_create_relationships(urls: List[dict], sig: str, file: str, create_relationships: bool,
max_num_of_relationships: int) -> List[EntityRelationship]:
"""
Returns a list of relationships if create_relationships is true (limited to max_num_of_relationships).
Args:
urls (list): Urls associated with the Domain.
sig (str): The signature of the File.
file (str): The queried File.
create_relationships (bool): Indicator for create relationships table.
max_num_of_relationships (int): Indicator for how many relationships to display.
Returns:
relationships (list): The EntityRelationship objects representing the File relationships.
"""
relationships = []
if create_relationships and max_num_of_relationships is not None:
if sig:
relationships.append(EntityRelationship(
name=EntityRelationship.Relationships.INDICATOR_OF, entity_a=file,
entity_a_type=FeedIndicatorType.File,
entity_b=sig, entity_b_type=ThreatIntel.ObjectsNames.MALWARE,
reverse_name=EntityRelationship.Relationships.INDICATED_BY))
for url in urls:
if len(relationships) >= max_num_of_relationships:
break
relationships.append(EntityRelationship(
name=EntityRelationship.Relationships.RELATED_TO, entity_a=file,
entity_a_type=FeedIndicatorType.File,
entity_b=url.get('url'), entity_b_type=FeedIndicatorType.URL,
reverse_name=EntityRelationship.Relationships.RELATED_TO))
return relationships
def run_file_command(hash: str, params: dict) -> CommandResults:
"""
Query the file_information from URLHaus db.
Args:
hash (str): file to query.
params (dict): The integration params.
Returns:
result (CommandResults): The CommandResults object representing the file command results.
"""
if len(hash) == 32:
hash_type = 'md5'
elif len(hash) == 64:
hash_type = 'sha256'
else:
return_error('Only accepting MD5 (32 bytes) or SHA256 (64 bytes) hash types')
file_information = query_payload_information(hash_type, params.get('api_url'), params.get('use_ssl'),
hash).json()
if file_information['query_status'] == 'ok' and file_information['md5_hash']:
# URLhaus output
first_seen = reformat_date(file_information.get('firstseen'))
last_seen = reformat_date(file_information.get('lastseen'))
urlhaus_data = {
'MD5': file_information.get('md5_hash', ''),
'SHA256': file_information.get('sha256_hash', ''),
'Type': file_information.get('file_type', ''),
'Size': int(file_information.get('file_size', '')),
'Signature': file_information.get('signature', ''),
'FirstSeen': first_seen,
'LastSeen': last_seen,
'DownloadLink': file_information.get('urlhaus_download', ''),
'URL': file_information.get('urls', [])
}
virus_total_data = file_information.get('virustotal')
if virus_total_data:
urlhaus_data['VirusTotal'] = {
'Percent': float(file_information.get('virustotal', {'percent': 0})['percent']),
'Link': file_information.get('virustotal', {'link': ''})['link']
}
score, description = file_calculate_score()
dbot_score = Common.DBotScore(
indicator=hash,
integration_name='URLhaus',
indicator_type=DBotScoreType.FILE,
reliability=params.get('reliability'),
score=score,
malicious_description=description
)
relationships = file_create_relationships(urlhaus_data['URL'], urlhaus_data.get('Signature', ''), hash,
params.get('create_relationships', True),
params.get('max_num_of_relationships', 10))
file_indicator = Common.File(sha256=hash, dbot_score=dbot_score, relationships=relationships,
ssdeep=file_information.get('ssdeep'), file_type=file_information.get('file_type'))
human_readable = tableToMarkdown(f'URLhaus reputation for {hash_type.upper()} : {hash}',
{
'URLhaus link': urlhaus_data.get('DownloadLink', ''),
'Signature': urlhaus_data.get('Signature', ''),
'MD5': urlhaus_data.get('MD5', ''),
'SHA256': urlhaus_data.get('SHA256', ''),
'First seen': first_seen,
'Last seen': last_seen,
'SSDeep': file_information.get('ssdeep'),
'Type': file_information.get('file_type')
})
return CommandResults(
readable_output=human_readable,
outputs_prefix='URLhaus.File',
outputs=urlhaus_data,
raw_response=file_information,
indicator=file_indicator,
relationships=relationships)
elif (file_information['query_status'] == 'ok' and not file_information['md5_hash']) or \
file_information['query_status'] == 'no_results':
return create_indicator_result_with_dbotscore_unknown(indicator=hash,
indicator_type=DBotScoreType.FILE,
reliability=params.get('reliability'))
elif file_information['query_status'] in ['invalid_md5', 'invalid_sha256']:
human_readable = f'## URLhaus reputation for {hash_type.upper()} : {hash}\n' \
f'Invalid {file_information["query_status"].lstrip("invalid_").upper()}!'
return CommandResults(
readable_output=human_readable,
raw_response=file_information)
else:
raise DemistoException(f'Query results = {file_information["query_status"]}', res=file_information)
def file_command(params: dict):
"""
Split domains and call run_domain_command on each of them.
Args:
params (dict): The integration params.
"""
files = demisto.args().get('file', '')
for file in argToList(files):
return_results(results=run_file_command(file, params))
def urlhaus_download_sample_command(**kwargs):
"""
The response can be either the zipped sample (content-type = application/zip), or JSON (content-type = text/html)
containing the query status.
"""
file_sha256 = demisto.args()['file']
res = download_malware_sample(file_sha256, kwargs.get('api_url'), kwargs.get('use_ssl'))
try:
if len(res.content) == 0:
demisto.results({
'Type': entryTypes['note'],
'HumanReadable': f'No results for SHA256: {file_sha256}',
'HumanReadableFormat': formats['markdown']
})
elif res.headers['content-type'] in ['text/html', 'application/json'] and \
res.json()['query_status'] == 'not_found':
demisto.results({
'Type': entryTypes['note'],
'ContentsFormat': formats['json'],
'Contents': res.json(),
'HumanReadable': f'No results for SHA256: {file_sha256}',
'HumanReadableFormat': formats['markdown']
})
elif res.headers['content-type'] == 'application/zip':
demisto.results(fileResult(file_sha256, extract_zipped_buffer(res.content)))
else:
raise Exception
# Handle like an exception
except Exception:
demisto.results({
'Type': entryTypes['error'],
'ContentsFormat': formats['text'],
'Contents': str(res.content)
})
''' COMMANDS MANAGER / SWITCH PANEL '''
LOG('Command being called is %s' % (demisto.command()))
def main():
try:
demisto_params = demisto.params()
command = demisto.command()
params = {
'api_url': demisto_params['url'].rstrip('/'),
'use_ssl': not demisto_params.get('insecure', False),
'threshold': int(demisto_params.get('threshold', 1)),
'create_relationships': demisto_params.get('create_relationships', True),
'max_num_of_relationships': min(1000, int(demisto_params.get('max_num_of_relationships', 10))),
}
reliability = demisto_params.get('integrationReliability', DBotScoreReliability.C)
if DBotScoreReliability.is_valid_type(reliability):
params['reliability'] = DBotScoreReliability.get_dbot_score_reliability_from_str(reliability)
else:
Exception('Please provide a valid value for the Source Reliability parameter.')
# Remove proxy if not set to true in params
handle_proxy()
if command == 'test-module':
# This is the call made when pressing the integration test button.
test_module(**params)
demisto.results('ok')
elif command == 'url':
url_command(params)
elif command == 'domain':
domain_command(params)
elif command == 'file':
file_command(params)
elif command == 'urlhaus-download-sample':
urlhaus_download_sample_command(**params)
# Log exceptions
except Exception as exc:
demisto.debug(traceback.format_exc())
return_error(f'Failed to execute command "{command}".\nError: {exc}', error=exc)
if __name__ in ['__main__', '__builtin__', 'builtins']:
main()
| mit | 4f57b24bfa44ba22d48253d0364d6870 | 38.650856 | 120 | 0.576209 | 4.285816 | false | false | false | false |
demisto/content | Packs/RSANetWitnessEndpoint/Integrations/RSANetWitnessEndpoint/RSANetWitnessEndpoint_test.py | 2 | 2014 | from CommonServerPython import *
class ResponseMock:
def __init__(self, _json):
self.status_code = 404
self._json = _json
self.headers = {'Content-Type': 'text\html'}
self.text = "<div>" \
"<html> some text </html>" \
"</div>"
self.ResponseStatus = ResponseStatus(_json)
def json(self):
return ResponseMock(JSON_RESP)
class ResponseStatus:
def __init__(self, _json):
self.ErrorCode = 404
self.Message = _json
JSON_RESP = {
"message": "test message"
}
def test_is_html_response(mocker):
mocker.patch.object(demisto, 'params', return_value={'server': 'mock_server', 'insecure': False,
'proxy': '',
'credentials': {'identifier': '', 'password': ''}})
from RSANetWitnessEndpoint import is_html_response
assert is_html_response(ResponseMock(JSON_RESP))
def test_get_html_from_response(mocker):
mocker.patch.object(demisto, 'params', return_value={'server': 'mock_server', 'insecure': False,
'proxy': '',
'credentials': {'identifier': '', 'password': ''}})
from RSANetWitnessEndpoint import get_html_from_response
assert get_html_from_response(ResponseMock(JSON_RESP)) == '<html> some text </html>'
def test_parse_error_response(mocker):
mocker.patch.object(demisto, 'params', return_value={'server': 'mock_server', 'insecure': False,
'proxy': '',
'credentials': {'identifier': '', 'password': ''}})
from RSANetWitnessEndpoint import parse_error_response
assert parse_error_response(ResponseMock(JSON_RESP)) == \
'Request failed with status code: 404\nReason: 404\n{\'message\': \'test message\'}'
| mit | d0b11a5f1282396da0d1c021ff83cbaf | 38.490196 | 108 | 0.527805 | 4.349892 | false | true | false | false |
demisto/content | Packs/PagerDuty/Integrations/PagerDuty/PagerDuty.py | 2 | 33138 | import demistomock as demisto
from CommonServerUserPython import *
from CommonServerPython import *
# Disable insecure warnings
requests.packages.urllib3.disable_warnings()
''' GLOBAL VARS '''
# PagerDuty API works only with secured communication.
USE_SSL = not demisto.params().get('insecure', False)
USE_PROXY = demisto.params().get('proxy', True)
API_KEY = demisto.params()['APIKey']
SERVICE_KEY = demisto.params()['ServiceKey']
FETCH_INTERVAL = demisto.params()['FetchInterval']
DEFAULT_REQUESTOR = demisto.params().get('DefaultRequestor', '')
SERVER_URL = 'https://api.pagerduty.com/'
CREATE_EVENT_URL = 'https://events.pagerduty.com/v2/enqueue'
DEFAULT_HEADERS = {
'Authorization': 'Token token=' + API_KEY,
'Accept': 'application/vnd.pagerduty+json;version=2'
}
'''HANDLE PROXY'''
if not USE_PROXY:
del os.environ['HTTP_PROXY']
del os.environ['HTTPS_PROXY']
del os.environ['http_proxy']
del os.environ['https_proxy']
'''PARAMS'''
UTC_PARAM = '&time_zone=UTC'
STATUSES = 'statuses%5B%5D'
INCLUDED_FIELDS = '&include%5B%5D=first_trigger_log_entries&include%5B%5D=assignments'
'''SUFFIX ENDPOINTS'''
GET_SCHEDULES_SUFFIX = 'schedules'
CREATE_INCIDENT_SUFFIX = 'incidents'
GET_INCIDENT_SUFFIX = 'incidents/'
GET_SERVICES_SUFFIX = 'services'
ON_CALL_BY_SCHEDULE_SUFFIX = 'schedules/{0}/users'
ON_CALLS_USERS_SUFFIX = 'oncalls?include%5B%5D=users'
USERS_NOTIFICATION_RULE = 'users/{0}/notification_rules'
GET_INCIDENTS_SUFFIX = 'incidents?include%5B%5D=assignees'
USERS_CONTACT_METHODS_SUFFIX = 'users/{0}/contact_methods'
RESPONDER_REQUESTS_SUFFIX = 'incidents/{0}/responder_requests'
RESPONSE_PLAY_SUFFIX = 'response_plays/{0}/run'
'''CONTACT_METHOD_TYPES'''
SMS_CONTACT_TYPE = 'sms_contact_method'
EMAIL_CONTACT_TYPE = 'email_contact_method'
PHONE_CONTACT_TYPE = 'phone_contact_method'
PUSH_CONTACT_TYPE = 'push_notification_contact_method'
CONTACT_METHODS_TO_HUMAN_READABLE = {
'': 'Unknown',
SMS_CONTACT_TYPE: 'SMS',
PUSH_CONTACT_TYPE: 'Push',
EMAIL_CONTACT_TYPE: 'Email',
PHONE_CONTACT_TYPE: 'Phone'
}
'''TABLE NAMES'''
SERVICES = 'Service List'
SCHEDULES = 'All Schedules'
TRIGGER_EVENT = 'Trigger Event'
RESOLVE_EVENT = 'Resolve Event'
ACKNOWLEDGE_EVENT = 'Acknowledge Event'
USERS_ON_CALL = 'Users On Call'
INCIDENTS_LIST = 'PagerDuty Incidents'
INCIDENT = 'PagerDuty Incident'
CONTACT_METHODS = 'Contact Methods'
USERS_ON_CALL_NOW = 'Users On Call Now'
NOTIFICATION_RULES = 'User notification rules'
'''TABLE HEADERS'''
CONTACT_METHODS_HEADERS = ['ID', 'Type', 'Details']
SERVICES_HEADERS = ['ID', 'Name', 'Status', 'Created At', 'Integration']
NOTIFICATION_RULES_HEADERS = ['ID', 'Type', 'Urgency', 'Notification timeout(minutes)']
SCHEDULES_HEADERS = ['ID', 'Name', 'Today', 'Time Zone', 'Escalation Policy', 'Escalation Policy ID']
USERS_ON_CALL_NOW_HEADERS = ['ID', 'Schedule ID', 'Email', 'Name', 'Role', 'User Url', 'Time Zone']
INCIDENTS_HEADERS = ['ID', 'Title', 'Description', 'Status', 'Created On', 'Urgency', 'Html Url', 'Incident key',
'Assigned To User', 'Service ID', 'Service Name', 'Escalation Policy', 'Last Status Change On',
'Last Status Change By', 'Number Of Escalations', 'Resolved By User', 'Resolve Reason']
''' HELPER FUNCTIONS '''
def http_request(method, url, params_dict=None, data=None, json_data=None, additional_headers=None): # pragma: no cover
LOG('running %s request with url=%s\nparams=%s' % (method, url, json.dumps(params_dict)))
headers = DEFAULT_HEADERS.copy()
if not additional_headers:
additional_headers = {}
headers.update(additional_headers)
try:
res = requests.request(method,
url,
verify=USE_SSL,
params=params_dict,
headers=headers,
data=data,
json=json_data
)
res.raise_for_status()
return unicode_to_str_recur(res.json())
except Exception as e:
LOG(e)
raise
def translate_severity(sev):
if sev.lower() == 'high':
return 3
elif sev.lower() == 'low':
return 1
return 0
def unicode_to_str_recur(obj):
"""Converts unicode elements of obj (incl. dictionary and list) to string recursively"""
if IS_PY3:
return obj
if isinstance(obj, dict):
obj = {unicode_to_str_recur(k): unicode_to_str_recur(v) for k, v in list(obj.items())}
elif isinstance(obj, list):
obj = list(map(unicode_to_str_recur, obj))
elif isinstance(obj, str):
obj = obj.encode('utf-8', 'ignore')
return obj
def test_module(): # pragma: no cover
get_on_call_now_users_command()
demisto.results('ok')
def extract_on_call_user_data(users, schedule_id=None):
"""Extact data about user from a given schedule."""
outputs = []
contexts = []
for user in users:
output = {}
context = {}
if schedule_id:
output['Schedule ID'] = schedule_id
context['ScheduleID'] = output['Schedule ID']
output['ID'] = user.get('id')
output['Name'] = user.get('name')
output['Role'] = user.get('role')
output['Email'] = user.get('email')
output['Time Zone'] = user.get('time_zone')
output['User Url'] = user.get('html_url')
context['ID'] = output['ID']
context['Role'] = output['Role']
context['Email'] = output['Email']
context['Username'] = output['Name']
context['DisplayName'] = output['Name']
context['TimeZone'] = output['Time Zone']
outputs.append(output)
contexts.append(context)
return CommandResults(
outputs_prefix='PagerDutyUser',
outputs_key_field='ID',
outputs=contexts,
raw_response=users,
readable_output=tableToMarkdown(USERS_ON_CALL, outputs, USERS_ON_CALL_NOW_HEADERS, removeNull=True),
)
def extract_on_call_now_user_data(users_on_call_now):
"""Extract the user data from the oncalls json."""
outputs = [] # type: List[Dict]
contexts = [] # type: List[Dict]
oncalls = users_on_call_now.get('oncalls', {})
for i in range(len(oncalls)):
output = {}
context = {}
data = oncalls[i]
user = data.get('user')
schedule_id = (data.get('schedule') or {}).get('id')
if schedule_id:
output['Schedule ID'] = schedule_id
context['ScheduleID'] = output['Schedule ID']
output['ID'] = user.get('id')
output['Name'] = user.get('name')
output['Role'] = user.get('role')
output['Email'] = user.get('email')
output['User Url'] = user.get('html_url')
output['Time Zone'] = user.get('time_zone')
context['ID'] = output['ID']
context['Role'] = output['Role']
context['Email'] = output['Email']
context['Username'] = output['Name']
context['DisplayName'] = output['Name']
context['TimeZone'] = output['Time Zone']
escal_level = data.get('escalation_level', 1)
outputs.insert(escal_level - 1, output)
contexts.insert(escal_level - 1, context)
return CommandResults(
outputs_prefix='PagerDutyUser',
outputs_key_field='ID',
outputs=contexts,
raw_response=users_on_call_now,
readable_output=tableToMarkdown(USERS_ON_CALL_NOW, outputs, USERS_ON_CALL_NOW_HEADERS, removeNull=True),
)
def parse_incident_data(incidents):
"""Parse incident data to output,context format"""
outputs = []
contexts = []
raw_response = []
for i, incident in enumerate(incidents):
output = {}
context = {}
context['ID'] = output['ID'] = incident.get('id')
context['Title'] = output['Title'] = incident.get('summary')
output['Description'] = incident.get('first_trigger_log_entry', {}).get('channel', {}).get('details', '')
context['Description'] = output['Description']
context['Status'] = output['Status'] = incident.get('status')
context['created_at'] = output['Created On'] = incident.get('created_at')
context['urgency'] = output['Urgency'] = incident.get('urgency', '')
output['Html Url'] = incident.get('html_url')
context['incident_key'] = incident.get('incident_key')
output['Incident key'] = incident.get('incident_key')
if len(incident.get('assignments', [])) > 0:
output['Assigned To User'] = incident['assignments'][0].get('assignee', {}).get('name')
else:
output['Assigned To User'] = '-'
context['assignee'] = output['Assigned To User']
context['service_id'] = output['Service ID'] = incident.get('service', {}).get('id')
context['service_name'] = output['Service Name'] = incident.get('service', {}).get('summary')
output['Escalation Policy'] = incident.get('escalation_policy', {}).get('summary')
context['escalation_policy'] = output['Escalation Policy']
context['last_status_change_at'] = output['Last Status Change On'] = incident.get('last_status_change_at')
output['Last Status Change By'] = incident.get('last_status_change_by', {}).get('summary')
context['last_status_change_by'] = output['Last Status Change By']
context['number_of_escalations'] = output['Number Of Escalations'] = incident.get('number_of_escalations')
if output['Status'] == 'resolved':
output['Resolved By User'] = output['Last Status Change By']
else:
output['Resolved By User'] = '-'
context['resolved_by'] = output['Assigned To User']
context['resolve_reason'] = output['Resolve reason'] = incident.get('resolve_reason', '')
context['teams'] = []
for team in incident.get('teams', []):
team_id = team.get('id', '')
team_name = team.get('summary', '')
team_data = {
"ID": team_id,
"Name": team_name
}
context['teams'].append(team_data)
assignment = incident.get('assignments', [{}, ])
if len(assignment) > 0:
context['assignment'] = {
"time": assignment[0].get('at', ''),
"assignee": assignment[0].get('assignee', {}).get('summary', ''),
"assigneeId": assignment[0].get('assignee', {}).get('id', ''),
}
else:
context['assignment'] = {}
acknowledgements = incident.get('acknowledgements', [{}, ])
if len(acknowledgements) > 0:
context['acknowledgement'] = {
"time": assignment[0].get('at', ''),
"acknowledger": assignment[0].get('acknowledger', {}).get('summary', ''),
"acknowledgerId": assignment[0].get('acknowledger', {}).get('id', ''),
}
else:
context['acknowledgement'] = {}
outputs.append(output)
contexts.append(context)
raw_response.append(incident)
return outputs, contexts, raw_response
def extract_incidents_data(incidents, table_name):
"""Extact data about incidents."""
outputs, contexts, _ = parse_incident_data(incidents)
return {
'Type': entryTypes['note'],
'Contents': incidents,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown(table_name, outputs, INCIDENTS_HEADERS, removeNull=True),
'EntryContext': {
'PagerDuty.Incidents(val.ID==obj.ID)': contexts
}
}
def extract_all_schedules_data(schedules):
"""Extract the data about all the schedules."""
outputs = []
contexts = []
for i in range(len(schedules)):
output = {}
context = {} # type: Dict
data = schedules[i]
output['ID'] = data.get('id')
output['Name'] = data.get('name')
output['Time Zone'] = data.get('time_zone')
output['Today'] = datetime.today().strftime('%Y-%m-%d')
escalation_policies = data.get('escalation_policies', [])
if len(escalation_policies) > 0:
output['Escalation Policy ID'] = escalation_policies[0].get('id')
output['Escalation Policy'] = escalation_policies[0].get('summary')
context['escalation_policies'] = [{}, ]
context['escalation_policies'][0]['name'] = output['Escalation Policy']
context['escalation_policies'][0]['id'] = output['Escalation Policy ID']
else:
output['Escalation Policy'] = '-'
output['Escalation Policy ID'] = '-'
context['id'] = output['ID']
context['name'] = output['Name']
context['today'] = output['Today']
context['time_zone'] = output['Time Zone']
outputs.append(output)
contexts.append(context)
return {
'Type': entryTypes['note'],
'Contents': schedules,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown(SCHEDULES, outputs, SCHEDULES_HEADERS),
'EntryContext': {
'PagerDuty.Schedules(val.id==obj.id)': contexts,
}
}
def create_new_incident(source, summary, severity, action, description='No description', group='',
event_class='', component='', incident_key=None, service_key=SERVICE_KEY):
"""Create a new incident in the PagerDuty instance."""
payload = {
'routing_key': service_key,
'event_action': action,
'dedup_key': incident_key,
'images': [],
'links': [],
'payload': {
'summary': summary,
'source': source,
'severity': severity,
'group': group,
'class': event_class,
'component': component,
'custom_details': {
'description': description
}
}
}
return http_request('POST', CREATE_EVENT_URL, data=json.dumps(payload))
def resolve_or_ack_incident(action, incident_key, service_key=SERVICE_KEY):
"""Resolve or Acknowledge an incident in the PagerDuty instance."""
payload = {
'routing_key': service_key,
'event_action': action,
'dedup_key': incident_key
}
return http_request('POST', CREATE_EVENT_URL, data=json.dumps(payload))
def extract_new_event_data(table_name, response):
"""Extract the data from the response of creating a new command."""
output = {}
context = {}
output['Status'] = response.get('status', '')
output['Message'] = response.get('message', '')
output['Incident key'] = response.get('dedup_key', '')
context['Status'] = output['Status']
context['Message'] = output['Message']
context['incident_key'] = output['Incident key']
return {
'Type': entryTypes['note'],
'Contents': response,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown(table_name, output),
'EntryContext': {
'PagerDuty.Event(val.incident_key==obj.dedup_key)': context,
'Event.ID(val.ID==obj.dedup_key)': context['incident_key']
}
}
def extract_users_contact_methods(user_contact_methods):
"""Extract all the contact methods of a given user."""
outputs = []
contexts = []
contact_methods = user_contact_methods.get('contact_methods')
for contact_method in contact_methods:
output = {
'ID': contact_method.get('id'),
'Type': CONTACT_METHODS_TO_HUMAN_READABLE[contact_method.get('type', '')]
}
country_code = str(contact_method.get('country_code', ''))
address = contact_method.get('address', '')
output['Details'] = country_code + address
outputs.append(output)
del contact_method['address']
if output['Type'] == 'SMS' or output['Type'] == 'Phone':
del contact_method['country_code']
contact_method['phone'] = output['Details']
else:
contact_method['email'] = output['Details']
contexts.append(contact_method)
return {
'Type': entryTypes['note'],
'Contents': user_contact_methods,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown(CONTACT_METHODS, outputs, CONTACT_METHODS_HEADERS),
'EntryContext': {
'PagerDuty.Contact_methods(val.id==obj.id)': contexts,
}
}
def extract_users_notification_role(user_notification_role):
"""Extract the notification role of a given user."""
outputs = []
notification_rules = user_notification_role.get('notification_rules')
for notification_rule in notification_rules:
output = {
'ID': notification_rule.get('id'),
'Type': notification_rule.get('type', ''),
'Urgency': notification_rule.get('urgency'),
'Notification timeout(minutes)': notification_rule.get('start_delay_in_minutes')}
outputs.append(output)
return {
'Type': entryTypes['note'],
'Contents': user_notification_role,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown(NOTIFICATION_RULES, outputs, NOTIFICATION_RULES_HEADERS),
'EntryContext': {
'PagerDuty.Notification_rules(val.id==obj.id)': notification_rules,
}
}
def extract_responder_request(responder_request_response):
"""Extract the users that were requested to respond"""
outputs = []
responder_request = responder_request_response.get("responder_request")
for request in responder_request.get("responder_request_targets", []):
request = request.get("responder_request_target")
output = {}
output["Type"] = request.get("type")
output["ID"] = request.get("id")
if output["Type"] == "user":
responder_user = request.get("incidents_responders", [])[0].get("user")
else:
responder_user = [x.get("user") for x in request.get("incidents_responders", [])]
output["ResponderType"] = responder_user.get("type")
output["ResponderName"] = responder_user.get("summary")
output["Message"] = responder_request.get("message")
output["IncidentID"] = (responder_request.get("incident") or {}).get("id")
output["RequesterID"] = responder_request.get("requester", {}).get("id")
output["IncidentSummary"] = (responder_request.get("incident") or {}).get("summary")
outputs.append(output)
return CommandResults(
outputs_prefix='PagerDuty.ResponderRequests',
outputs_key_field='id',
outputs=outputs,
raw_response=outputs,
readable_output=tableToMarkdown(CONTACT_METHODS, outputs, CONTACT_METHODS_HEADERS, removeNull=True)
)
'''COMMANDS'''
def fetch_incidents():
param_dict = {}
now_time = datetime.utcnow()
now = datetime.isoformat(now_time)
lastRunObject = demisto.getLastRun()
if lastRunObject:
param_dict['since'] = lastRunObject['time']
else:
param_dict['since'] = datetime.isoformat(now_time - timedelta(minutes=int(FETCH_INTERVAL)))
param_dict['until'] = now
url = SERVER_URL + GET_INCIDENTS_SUFFIX + configure_status()
res = http_request('GET', url, param_dict)
_, parsed_incidents, raw_responses = parse_incident_data(res.get('incidents', []))
incidents = []
for incident, raw_response in zip(parsed_incidents, raw_responses):
incidents.append({
'name': incident['ID'] + ' - ' + incident['Title'],
'occurred': incident['created_at'],
'severity': translate_severity(incident['urgency']),
'rawJSON': json.dumps(raw_response)
})
demisto.incidents(incidents)
demisto.setLastRun({'time': now})
def configure_status(status='triggered,acknowledged'):
statuses = status.split(',')
statuses_string = "&" + STATUSES + '='
statuses = statuses_string.join(statuses)
status_request = '&' + STATUSES + '=' + statuses
status_request = status_request + INCLUDED_FIELDS + UTC_PARAM
return status_request
def get_incidents_command(since=None, until=None, status='triggered,acknowledged', sortBy=None, incident_key=None):
"""Get incidents command."""
param_dict = {}
if since is not None:
param_dict['since'] = since
if until is not None:
param_dict['until'] = until
if sortBy is not None:
param_dict['sortBy'] = sortBy
if incident_key:
param_dict['incident_key'] = incident_key
url = SERVER_URL + GET_INCIDENTS_SUFFIX + configure_status(status)
res = http_request('GET', url, param_dict)
return extract_incidents_data(res.get('incidents', []), INCIDENTS_LIST)
def submit_event_command(source, summary, severity, action, description='No description', group='',
event_class='', component='', incident_key=None, serviceKey=SERVICE_KEY):
"""Create new event."""
if serviceKey is None:
raise Exception('You must enter a ServiceKey at the integration '
'parameters or in the command to process this action.')
res = create_new_incident(source, summary, severity, action, description,
group, event_class, component, incident_key, serviceKey)
return extract_new_event_data(TRIGGER_EVENT, res)
def get_all_schedules_command(query=None, limit=None):
"""Get all the schedules."""
param_dict = {}
if query is not None:
param_dict['query'] = query
if limit is not None:
param_dict['limit'] = limit
url = SERVER_URL + GET_SCHEDULES_SUFFIX
res = http_request('GET', url, param_dict)
schedules = res.get('schedules', [])
return extract_all_schedules_data(schedules)
def get_on_call_users_command(scheduleID, since=None, until=None):
"""Get the list of user on call in a from scheduleID"""
param_dict = {}
if since is not None:
param_dict['since'] = since
if until is not None:
param_dict['until'] = until
url = SERVER_URL + ON_CALL_BY_SCHEDULE_SUFFIX.format(scheduleID)
users_on_call = http_request('GET', url, param_dict)
return extract_on_call_user_data(users_on_call.get('users', []), scheduleID)
def get_on_call_now_users_command(limit=None, escalation_policy_ids=None, schedule_ids=None):
"""Get the list of users that are on call now."""
param_dict = {}
if limit is not None:
param_dict['limit'] = limit
if escalation_policy_ids is not None:
param_dict['escalation_policy_ids[]'] = argToList(escalation_policy_ids)
if schedule_ids is not None:
param_dict['schedule_ids[]'] = argToList(schedule_ids)
url = SERVER_URL + ON_CALLS_USERS_SUFFIX
users_on_call_now = http_request('GET', url, param_dict)
return extract_on_call_now_user_data(users_on_call_now)
def get_users_contact_methods_command(UserID):
"""Get the contact methods of a given user."""
url = SERVER_URL + USERS_CONTACT_METHODS_SUFFIX.format(UserID)
user_contact_methods = http_request('GET', url, {})
return extract_users_contact_methods(user_contact_methods)
def get_users_notification_command(UserID):
"""Get the notification rule of a given user"""
url = SERVER_URL + USERS_NOTIFICATION_RULE.format(UserID)
user_notification_role = http_request('GET', url, {})
return extract_users_notification_role(user_notification_role)
def resolve_event(incident_key=None, serviceKey=SERVICE_KEY):
if serviceKey is None:
raise Exception('You must enter a ServiceKey at the integration '
'parameters or in the command to process this action.')
action_response = resolve_or_ack_incident('resolve', incident_key, serviceKey)
time.sleep(3) # wait until the incident will update
res = http_request('GET', SERVER_URL + GET_INCIDENTS_SUFFIX, {'incident_key': incident_key})
_, contexts, _ = parse_incident_data(res.get('incidents', []))
if contexts[0]['Status'] != "resolved":
raise Exception('Could not resolve incident, you may have created it with different Service Key')
return extract_new_event_data(RESOLVE_EVENT, action_response)
def acknowledge_event(incident_key=None, serviceKey=SERVICE_KEY):
if serviceKey is None:
raise Exception('You must enter a ServiceKey at the integration '
'parameters or in the command to process this action.')
action_response = resolve_or_ack_incident('acknowledge', incident_key, serviceKey)
time.sleep(3) # wait until the incident will update
res = http_request('GET', SERVER_URL + GET_INCIDENTS_SUFFIX, {'incident_key': incident_key})
_, contexts, _ = parse_incident_data(res.get('incidents', []))
if contexts[0]['Status'] != "acknowledged":
raise Exception('Could not acknowledge incident, you may have created it with different Service Key')
return extract_new_event_data(ACKNOWLEDGE_EVENT, action_response)
def get_incident_data():
incident_id = demisto.args().get('incident_id')
url = SERVER_URL + GET_INCIDENT_SUFFIX + incident_id
res = http_request('GET', url, {})
return extract_incidents_data([res.get('incident', {})], INCIDENT)
def get_service_keys():
offset = 0
raw_response = []
url = SERVER_URL + GET_SERVICES_SUFFIX
res = http_request('GET', url, {"offset": offset})
raw_response.append(res)
outputs = []
contexts = []
while res.get('services', []):
services = res.get('services', [])
for service in services:
output = {}
context = {'ID': service.get('id'), 'Name': service.get('name'), 'Status': service.get('status'),
'CreatedAt': service.get('created_at')}
integration_list = []
integration_string = ""
for integration in service.get('integrations', []):
integration_url = integration.get('self', '')
if integration_url:
integration_data = {}
integration_res = http_request('GET', integration_url, {}).get('integration', {})
integration_data['Name'] = integration_res.get('service', {}).get('summary', '')
integration_data['Key'] = integration_res.get('integration_key', '')
vendor_value = integration_res.get('vendor', {})
if not vendor_value:
integration_data['Vendor'] = 'Missing Vendor information'
else:
integration_data['Vendor'] = vendor_value.get('summary', 'Missing Vendor information')
integration_list.append(integration_data)
integration_string += "Name: {}, Vendor: {}, Key: {}\n".format(integration_data['Name'],
integration_data['Vendor'],
integration_data['Key'])
output['Integration'] = integration_string
context['Integration'] = integration_list
outputs.append(output)
contexts.append(context)
offset += 25
res = http_request('GET', url, {"offset": offset})
raw_response.append(res)
return {
'Type': entryTypes['note'],
'Contents': raw_response,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown(SERVICES, outputs, SERVICES_HEADERS),
'EntryContext': {
'PagerDuty.Service(val.ID==obj.ID)': contexts,
}
}
def add_responders_to_incident(incident_id, message, user_requests=None, escalation_policy_requests="",
requestor_id=None):
"""
Send a new responder request for the specified incident. A responder is a specific User to respond to the Incident.
If the Requestor ID is not specified in command arguments, the Default Requestor defined in instance
parameter is used.
Args:
incident_id (str): The ID of the PagerDuty Incident
message (str): The message sent with the responder request.
user_requests (str): Comma separated list of User targets the responder request is being sent to
escalation_policy_requests (str): Comma separated list of
escalation policy targets the responder request is being sent to.
requestor_id (str): The user id of the requester.
"""
if not user_requests:
user_requests = DEFAULT_REQUESTOR
if not requestor_id:
requestor_id = DEFAULT_REQUESTOR
url = SERVER_URL + RESPONDER_REQUESTS_SUFFIX.format(incident_id)
body = {
'requester_id': requestor_id,
'message': message,
'responder_request_targets': []
}
for user_id in user_requests.split(","):
body['responder_request_targets'].append({
'responder_request_target': {
"id": user_id,
"type": 'user_reference'
}
})
for escalation_policy_id in escalation_policy_requests:
body['responder_request_targets'].append({
'responder_request_target': {
"id": escalation_policy_id,
"type": 'escalation_policy_reference'
}
})
response = http_request('POST', url, json_data=body)
return extract_responder_request(response)
def run_response_play(incident_id, from_email, response_play_uuid):
"""
Run a specified response play on a given incident.
Response Plays are a package of Incident Actions that can be applied during an Incident's life cycle.
Args:
incident_id:string The ID of the PagerDuty Incident
from_email:string, The email address of a valid user associated with the account making the request.
response_play_uuid:list, The response play ID of the response play associated with the request.
"""
url = SERVER_URL + RESPONSE_PLAY_SUFFIX.format(response_play_uuid)
body = {
'incident': {
'id': incident_id,
'type': 'incident_reference'
}
}
response = http_request('POST', url, json_data=body, additional_headers={"From": from_email})
if response != {"status": "ok"}:
raise Exception("Status NOT Ok - {}".format(response))
return CommandResults(
readable_output="Response play successfully run to the incident " + incident_id + " by " + from_email,
raw_response=response
)
''' EXECUTION CODE '''
def main():
LOG('command is %s' % (demisto.command(),))
try:
if demisto.command() == 'test-module':
test_module()
elif demisto.command() == 'fetch-incidents':
fetch_incidents()
elif demisto.command() == 'PagerDuty-incidents':
demisto.results(get_incidents_command(**demisto.args()))
elif demisto.command() == 'PagerDuty-submit-event':
demisto.results(submit_event_command(**demisto.args()))
elif demisto.command() == 'PagerDuty-get-users-on-call':
return_results(get_on_call_users_command(**demisto.args()))
elif demisto.command() == 'PagerDuty-get-all-schedules':
demisto.results(get_all_schedules_command(**demisto.args()))
elif demisto.command() == 'PagerDuty-get-users-on-call-now':
return_results(get_on_call_now_users_command(**demisto.args()))
elif demisto.command() == 'PagerDuty-get-contact-methods':
demisto.results(get_users_contact_methods_command(**demisto.args()))
elif demisto.command() == 'PagerDuty-get-users-notification':
demisto.results(get_users_notification_command(**demisto.args()))
elif demisto.command() == 'PagerDuty-resolve-event':
demisto.results(resolve_event(**demisto.args()))
elif demisto.command() == 'PagerDuty-acknowledge-event':
demisto.results(acknowledge_event(**demisto.args()))
elif demisto.command() == 'PagerDuty-get-incident-data':
demisto.results(get_incident_data())
elif demisto.command() == 'PagerDuty-get-service-keys':
demisto.results(get_service_keys())
elif demisto.command() == 'PagerDuty-add-responders':
return_results(add_responders_to_incident(**demisto.args()))
elif demisto.command() == 'PagerDuty-run-response-play':
return_results(run_response_play(**demisto.args()))
except Exception as err:
return_error(str(err))
if __name__ in ['__main__', '__builtin__', 'builtins']:
main()
| mit | ce28ef17332da39944bf0799b30f7ae8 | 37.577416 | 120 | 0.609059 | 3.815105 | false | false | false | false |
demisto/content | Packs/DeHashed/Integrations/DeHashed/DeHashed_test.py | 2 | 12255 | import json
import urllib
DEHASHED_URL = "https://url.com/" # disable-secrets-detection
INTEGRATION_CONTEXT_BRAND = "DeHashed"
def load_test_data(json_path):
with open(json_path) as f:
return json.load(f)
def test_module_command(requests_mock):
"""
Given:
- Performs a basic GET request to check if the API is reachable and authentication is successful.
When
- Setting a new instance of the integration.
Then
- returns "ok".
"""
from DeHashed import Client, test_module
test_data = load_test_data("test_data/search.json")
url_params = {"query": 'vin:"test" "test1"'}
encoded = urllib.parse.urlencode(url_params)
requests_mock.get(f"{DEHASHED_URL}search?{encoded}", json=test_data["api_response"])
client = Client(base_url=f"{DEHASHED_URL}", email='', api_key='')
client._headers = {}
res = test_module(client)
assert res == "ok"
def test_search_command_using_is_operator_without_filter(requests_mock):
"""
Given:
- "Is" operator, value to search, and not using any filters.
When
- Searching an object that matches the specified value.
Then
- returns Demisto outputs.
"""
from DeHashed import Client, dehashed_search_command
test_data = load_test_data("test_data/search.json")
expected_result = {
"DeHashed.Search(val.Id==obj.Id)": test_data["expected_results"][
"full_results"
],
"DeHashed.LastQuery(true)": {
"ResultsFrom": 1,
"ResultsTo": 2,
"DisplayedResults": 2,
"TotalResults": 2,
"PageNumber": 1
},
}
url_params = {"query": '"testgamil.co"'}
encoded = urllib.parse.urlencode(url_params)
requests_mock.get(f"{DEHASHED_URL}search?{encoded}", json=test_data["api_response"])
client = Client(base_url=f"{DEHASHED_URL}", email='', api_key='')
client._headers = {}
markdown, context, raw = dehashed_search_command(client, test_data["is_op_single"])
assert expected_result == context
def test_search_command_using_contains_operator_without_filter(requests_mock):
"""
Given:
- "Contains" operator, value to search.
When
- Searching an object that contains the specified value.
Then
- returns Demisto outputs.
"""
from DeHashed import Client, dehashed_search_command
test_data = load_test_data("test_data/search.json")
expected_result = {
"DeHashed.Search(val.Id==obj.Id)": test_data["expected_results"][
"full_results"
],
"DeHashed.LastQuery(true)": {
"ResultsFrom": 1,
"ResultsTo": 2,
"DisplayedResults": 2,
"TotalResults": 2,
"PageNumber": 1
},
}
url_params = {"query": "testgamil.co"}
encoded = urllib.parse.urlencode(url_params)
requests_mock.get(f"{DEHASHED_URL}search?{encoded}", json=test_data["api_response"])
client = Client(base_url=f"{DEHASHED_URL}", email='', api_key='')
client._headers = {}
markdown, context, raw = dehashed_search_command(
client, test_data["contains_op_single"]
)
assert expected_result == context
def test_search_command_using_regex_operator_without_filter(requests_mock):
"""
Given:
- "Regex" operator, value to search.
When
- Searching an object that contains the specified value.
Then
- returns Demisto outputs.
"""
from DeHashed import Client, dehashed_search_command
test_data = load_test_data("test_data/search.json")
expected_result = {
"DeHashed.Search(val.Id==obj.Id)": test_data["expected_results"][
"full_results"
],
"DeHashed.LastQuery(true)": {
"ResultsFrom": 1,
"ResultsTo": 2,
"DisplayedResults": 2,
"TotalResults": 2,
"PageNumber": 1
},
}
url_params = {"query": "/joh?n(ath[oa]n)/"}
encoded = urllib.parse.urlencode(url_params)
requests_mock.get(f"{DEHASHED_URL}search?{encoded}", json=test_data["api_response"])
client = Client(base_url=f"{DEHASHED_URL}", email='', api_key='')
client._headers = {}
markdown, context, raw = dehashed_search_command(
client, test_data["regex_op_single"]
)
assert expected_result == context
def test_search_command_using_is_operator_with_filter_and_multi_values(requests_mock):
"""
Given:
- "Is" operator, value to search and "email" as a filter.
When
- Searching an object that matches the specified value.
Then
- returns Demisto outputs.
"""
from DeHashed import Client, dehashed_search_command
test_data = load_test_data("test_data/search.json")
expected_result = {
"DeHashed.Search(val.Id==obj.Id)": test_data["expected_results"][
"full_results"
],
"DeHashed.LastQuery(true)": {
"ResultsFrom": 1,
"ResultsTo": 2,
"DisplayedResults": 2,
"TotalResults": 2,
"PageNumber": 1
},
}
url_params = {"query": 'email:"testgamil.co" "test1gmail.com"'}
encoded = urllib.parse.urlencode(url_params)
requests_mock.get(f"{DEHASHED_URL}search?{encoded}", json=test_data["api_response"])
client = Client(base_url=f"{DEHASHED_URL}", email='', api_key='')
client._headers = {}
markdown, context, raw = dehashed_search_command(client, test_data["is_op_multi"])
assert expected_result == context
def test_search_command_using_contains_operator_with_filter_and_multi_values(
requests_mock,
):
"""
Given:
- "Contains" operator, value to search and "name" as a filter.
When
- Searching an object that contains the specified value.
Then
- returns Demisto outputs.
"""
from DeHashed import Client, dehashed_search_command
test_data = load_test_data("test_data/search.json")
expected_result = {
"DeHashed.Search(val.Id==obj.Id)": test_data["expected_results"][
"full_results"
],
"DeHashed.LastQuery(true)": {
"ResultsFrom": 1,
"ResultsTo": 2,
"DisplayedResults": 2,
"TotalResults": 2,
"PageNumber": 1
},
}
url_params = {"query": "name:(test1 OR test2)"}
encoded = urllib.parse.urlencode(url_params)
requests_mock.get(f"{DEHASHED_URL}search?{encoded}", json=test_data["api_response"])
client = Client(base_url=f"{DEHASHED_URL}", email='', api_key='')
client._headers = {}
markdown, context, raw = dehashed_search_command(
client, test_data["contains_op_multi"]
)
assert expected_result == context
def test_search_command_using_regex_operator_with_filter_and_multi_values(
requests_mock,
):
"""
Given:
- "Regex" operator, value to search and "vin" as a filter.
When
- Searching an object that contains the specified value.
Then
- returns Demisto outputs.
"""
from DeHashed import Client, dehashed_search_command
test_data = load_test_data("test_data/search.json")
expected_result = {
"DeHashed.Search(val.Id==obj.Id)": test_data["expected_results"][
"full_results"
],
"DeHashed.LastQuery(true)": {
"ResultsFrom": 1,
"ResultsTo": 2,
"DisplayedResults": 2,
"TotalResults": 2,
"PageNumber": 1
},
}
url_params = {"query": "vin:/joh?n(ath[oa]n)/ /joh?n11(ath[oa]n)/"}
encoded = urllib.parse.urlencode(url_params)
requests_mock.get(f"{DEHASHED_URL}search?{encoded}", json=test_data["api_response"])
client = Client(base_url=f"{DEHASHED_URL}", email='', api_key='')
client._headers = {}
markdown, context, raw = dehashed_search_command(
client, test_data["regex_op_multi"]
)
assert expected_result == context
def test_search_command_using_regex_operator_with_filter_and_change_result_range(
requests_mock,
):
"""
Given:
- "Regex" operator, value to search, "vin" as a filter and a range of results amount to return.
When
- Searching an object that contains the specified value.
Then
- returns Demisto outputs.
"""
from DeHashed import Client, dehashed_search_command
test_data = load_test_data("test_data/search.json")
expected_result = {
"DeHashed.Search(val.Id==obj.Id)": test_data["expected_results_range"][
"full_results"
],
"DeHashed.LastQuery(true)": {
"ResultsFrom": 1,
"ResultsTo": 1,
"DisplayedResults": 1,
"TotalResults": 2,
"PageNumber": 1
},
}
url_params = {"query": "vin:/joh?n(ath[oa]n)/ /joh?n11(ath[oa]n)/"}
encoded = urllib.parse.urlencode(url_params)
requests_mock.get(f"{DEHASHED_URL}search?{encoded}", json=test_data["api_response"])
client = Client(base_url=f"{DEHASHED_URL}", email='', api_key='')
client._headers = {}
markdown, context, raw = dehashed_search_command(
client, test_data["regex_op_multi_range"]
)
assert expected_result == context
def test_email_command_malicious_dbot_score(mocker):
"""
Given:
- The email address to check and that user defined the default dbot score to be 'MALICIOUS'.
When:
- Searching an object that contains the given email address.
Then:
- Return the demisto outputs and validate that the dbot score is malicious.
"""
from DeHashed import Client, email_command
test_data = load_test_data('test_data/search.json')
expected_result = {
'DeHashed.Search(val.Id==obj.Id)': test_data['expected_results'][
'full_results'
],
'DBotScore': {
'Indicator': 'testgamil.com',
'Type': 'email',
'Vendor': 'DeHashed',
'Score': 3
}
}
client = Client(base_url=f'{DEHASHED_URL}', email_dbot_score='MALICIOUS', email='', api_key='')
mocker.patch.object(client, 'dehashed_search', return_value=test_data['api_response'])
markdown, context, raw = email_command(client, test_data['email_command'])
assert expected_result == context
def test_email_command_suspicious_dbot_score(mocker):
"""
Given:
- The email address to check and that user defined the default dbot score to be 'SUSPICIOUS'.
When:
- Searching an object that contains the given email address.
Then:
- Return the demisto outputs and validate that the dbot score is suspicious.
"""
from DeHashed import Client, email_command
test_data = load_test_data('test_data/search.json')
expected_result = {
'DeHashed.Search(val.Id==obj.Id)': test_data['expected_results'][
'full_results'
],
'DBotScore': {
'Indicator': 'testgamil.com',
'Type': 'email',
'Vendor': 'DeHashed',
'Score': 2
}
}
client = Client(base_url=f'{DEHASHED_URL}', email_dbot_score='SUSPICIOUS', email='', api_key='')
mocker.patch.object(client, 'dehashed_search', return_value=test_data['api_response'])
markdown, context, raw = email_command(client, test_data['email_command'])
assert expected_result == context
def test_email_command_no_entries_returned(mocker):
"""
Given:
- The email address to check.
When:
- Searching an object that contains the given email address and no results are returned.
Then:
- Validate that the DBotScore is set to 0.
"""
from DeHashed import Client, email_command
test_data = load_test_data('test_data/search.json')
expected_result = {
'DBotScore': {
'Indicator': 'testgamil.com',
'Type': 'email',
'Vendor': 'DeHashed',
'Score': 0
}
}
client = Client(base_url=f'{DEHASHED_URL}', email='', api_key='')
mocker.patch.object(client, 'dehashed_search', return_value={})
markdown, context, raw = email_command(client, test_data['email_command'])
assert expected_result == context
| mit | 7dc17b8ce5033ade0dc38b0dabcdb283 | 31.081152 | 105 | 0.60408 | 3.622524 | false | true | false | false |
demisto/content | Packs/CommonScripts/Scripts/GetFieldsByIncidentType/GetFieldsByIncidentType.py | 2 | 1942 | import demistomock as demisto
from CommonServerPython import *
from pprint import pformat
def main():
# get incident fields
res = demisto.executeCommand('demisto-api-get', {'uri': '/incidentfields'})
if is_error(res):
return_error(res[0]['Contents'])
fields = res[0]['Contents']['response']
# 'fields' contains non-incident fields (evidence and indicator), as well, so let's make a version
# containing only incident fields
incident_fields = [field for field in fields if field['id'].startswith('incident_')]
# get arguments
args = demisto.args()
incident_type = args['incident_type']
exclude_system = False
if 'exclude_system' in args and argToBoolean(args['exclude_system']):
exclude_system = True
name_key = 'name'
if 'short_names' in args and argToBoolean(args['short_names']):
name_key = 'cliName'
explicit_only = False
if 'explicit_only' in args and argToBoolean(args['explicit_only']):
explicit_only = True
# generate results
matched_fields = []
for field in incident_fields: # using multiple if statements for readability
if exclude_system and not field['system']:
# skip non-system fields if exclude_system is true
continue
elif field['associatedToAll'] and not explicit_only:
# if explicit_only is false, include fields associated to all incident types
matched_fields.append(field[name_key])
elif field['associatedTypes'] is not None and incident_type in field['associatedTypes']:
# include fields where incident type is in associatedTypes
matched_fields.append(field[name_key])
# output results
if 'pprint' in args and argToBoolean(args['pprint']):
demisto.results(pformat(matched_fields))
else:
demisto.results(matched_fields)
if __name__ in ["__builtin__", "builtins"]:
main()
| mit | 5802c53821476a9e6ada7f0048453cac | 33.070175 | 102 | 0.659114 | 4.123142 | false | false | false | false |
demisto/content | Packs/SMB/Integrations/SMB/SMB.py | 2 | 4445 | import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
''' IMPORTS '''
import tempfile
from smb.SMBConnection import SMBConnection
''' GLOBAL VARS '''
USER = demisto.params()['credentials']['identifier']
PASSWORD = demisto.params()['credentials']['password']
HOSTNAME = demisto.params()['hostname']
PORT = int(demisto.params()['port'])
NBNAME = demisto.params()['nbname']
DOMAIN = demisto.params().get('domain', None)
''' HELPER FUNCTIONS '''
def split_path(path):
delim = '/' if '/' in path else '\\'
path = path.strip(delim)
return path.split(delim, 1)
def connect(hostname, domain, user, password, nb_name, port):
if not domain:
connection = SMBConnection(user, password, 'Demisto', nb_name, is_direct_tcp=True)
else:
connection = SMBConnection(user, password, 'Demisto', nb_name, domain=domain, is_direct_tcp=True)
if not connection.connect(hostname, port):
return_error('Authentication failed, verify instance configuration parameters and try again.')
return connection
''' FUNCTIONS '''
def test_module():
if HOSTNAME and NBNAME:
connection = connect(hostname=HOSTNAME, domain=DOMAIN, user=USER, password=PASSWORD, nb_name=NBNAME, port=PORT)
demisto.results('ok')
connection.close()
else:
demisto.results('No hostname or NetBIOS name was configured, cannot perform a connection test.')
def smb_download():
share, path = split_path(demisto.getArg('file_path'))
hostname = demisto.args().get('hostname') if demisto.args().get('hostname') else HOSTNAME
nbname = demisto.args().get('nbname') if demisto.args().get('nbname') else NBNAME
domain = demisto.args().get('domain') if demisto.args().get('domain') else DOMAIN
if not hostname:
return_error('No hostname was configured for the integration, cannot establish connection.')
elif not nbname:
return_error('No NetBIOS name was configured for the integration, cannot establish connection.')
connection = connect(hostname=hostname, domain=domain, user=USER, password=PASSWORD, nb_name=nbname, port=PORT)
try:
with tempfile.NamedTemporaryFile() as file_obj:
file_attributes, filesize = connection.retrieveFile(share, path, file_obj)
file_obj.seek(0)
filename = path.split('/')[-1] if '/' in path else path.split('\\')[-1]
if demisto.getArg('download_and_attach') == "yes":
demisto.results(fileResult(filename, file_obj.read()))
else:
demisto.results(file_obj.read())
finally:
connection.close()
def smb_upload():
share, path = split_path(demisto.getArg('file_path'))
entryID = demisto.getArg('entryID')
content = demisto.getArg('content')
hostname = demisto.args().get('hostname') if demisto.args().get('hostname') else HOSTNAME
nbname = demisto.args().get('nbname') if demisto.args().get('nbname') else NBNAME
domain = demisto.args().get('domain') if demisto.args().get('domain') else DOMAIN
if not hostname:
return_error('No hostname was configured for the integration, cannot establish connection.')
elif not nbname:
return_error('No NetBIOS name was configured for the integration, cannot establish connection.')
connection = connect(hostname=hostname, domain=domain, user=USER, password=PASSWORD, nb_name=nbname, port=PORT)
try:
if not entryID and not content:
raise Exception("smb-upload requires one of the following arguments: content, entryID.")
if entryID:
file = demisto.getFilePath(entryID)
filePath = file['path']
with open(filePath, mode='rb') as f:
content = f.read()
with tempfile.NamedTemporaryFile() as file_obj:
file_obj.write(content)
file_obj.seek(0)
file_bytes_transfered = connection.storeFile(share, path, file_obj)
demisto.results("Transfered {} bytes of data.".format(file_bytes_transfered))
finally:
connection.close()
''' EXECUTION CODE '''
LOG('command is %s' % (demisto.command(),))
try:
if demisto.command() == 'test-module':
test_module()
elif demisto.command() == 'smb-download':
smb_download()
elif demisto.command() == 'smb-upload':
smb_upload()
except Exception as e:
return_error(str(e))
| mit | ca65fe8829c5375e4073ce9fb0e68bc5 | 35.735537 | 119 | 0.659393 | 3.878709 | false | false | false | false |
demisto/content | Packs/AnsibleCiscoIOS/Integrations/AnsibleCiscoIOS/AnsibleCiscoIOS.py | 2 | 4355 | import ssh_agent_setup
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
# Import Generated code
from AnsibleApiModule import * # noqa: E402
host_type = 'ios'
# MAIN FUNCTION
def main() -> None:
"""main function, parses params and runs command functions
:return:
:rtype:
"""
# SSH Key integration requires ssh_agent to be running in the background
ssh_agent_setup.setup()
# Common Inputs
command = demisto.command()
args = demisto.args()
int_params = demisto.params()
try:
if command == 'test-module':
# This is the call made when pressing the integration Test button.
result = generic_ansible('CiscoIOS', 'ios_facts', args, int_params, host_type)
if result:
return_results('ok')
else:
return_results(result)
elif command == 'ios-banner':
return_results(generic_ansible('CiscoIOS', 'ios_banner', args, int_params, host_type))
elif command == 'ios-bgp':
return_results(generic_ansible('CiscoIOS', 'ios_bgp', args, int_params, host_type))
elif command == 'ios-command':
return_results(generic_ansible('CiscoIOS', 'ios_command', args, int_params, host_type))
elif command == 'ios-config':
return_results(generic_ansible('CiscoIOS', 'ios_config', args, int_params, host_type))
elif command == 'ios-facts':
return_results(generic_ansible('CiscoIOS', 'ios_facts', args, int_params, host_type))
elif command == 'ios-interfaces':
return_results(generic_ansible('CiscoIOS', 'ios_interfaces', args, int_params, host_type))
elif command == 'ios-l2-interfaces':
return_results(generic_ansible('CiscoIOS', 'ios_l2_interfaces', args, int_params, host_type))
elif command == 'ios-l3-interfaces':
return_results(generic_ansible('CiscoIOS', 'ios_l3_interfaces', args, int_params, host_type))
elif command == 'ios-lacp':
return_results(generic_ansible('CiscoIOS', 'ios_lacp', args, int_params, host_type))
elif command == 'ios-lacp-interfaces':
return_results(generic_ansible('CiscoIOS', 'ios_lacp_interfaces', args, int_params, host_type))
elif command == 'ios-lag-interfaces':
return_results(generic_ansible('CiscoIOS', 'ios_lag_interfaces', args, int_params, host_type))
elif command == 'ios-linkagg':
return_results(generic_ansible('CiscoIOS', 'ios_linkagg', args, int_params, host_type))
elif command == 'ios-lldp':
return_results(generic_ansible('CiscoIOS', 'ios_lldp', args, int_params, host_type))
elif command == 'ios-lldp-global':
return_results(generic_ansible('CiscoIOS', 'ios_lldp_global', args, int_params, host_type))
elif command == 'ios-lldp-interfaces':
return_results(generic_ansible('CiscoIOS', 'ios_lldp_interfaces', args, int_params, host_type))
elif command == 'ios-logging':
return_results(generic_ansible('CiscoIOS', 'ios_logging', args, int_params, host_type))
elif command == 'ios-ntp':
return_results(generic_ansible('CiscoIOS', 'ios_ntp', args, int_params, host_type))
elif command == 'ios-ping':
return_results(generic_ansible('CiscoIOS', 'ios_ping', args, int_params, host_type))
elif command == 'ios-static-route':
return_results(generic_ansible('CiscoIOS', 'ios_static_route', args, int_params, host_type))
elif command == 'ios-system':
return_results(generic_ansible('CiscoIOS', 'ios_system', args, int_params, host_type))
elif command == 'ios-user':
return_results(generic_ansible('CiscoIOS', 'ios_user', args, int_params, host_type))
elif command == 'ios-vlans':
return_results(generic_ansible('CiscoIOS', 'ios_vlans', args, int_params, host_type))
elif command == 'ios-vrf':
return_results(generic_ansible('CiscoIOS', 'ios_vrf', args, int_params, host_type))
# Log exceptions and return errors
except Exception as e:
return_error(f'Failed to execute {command} command.\nError:\n{str(e)}')
# ENTRY POINT
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | f3176852190a46d2573c424ce0242fb7 | 45.329787 | 107 | 0.623881 | 3.77383 | false | false | false | false |
demisto/content | Packs/VulnDB/Integrations/VulnDB/VulnDB.py | 2 | 16142 | import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
''' IMPORTS '''
import requests
import urllib.parse
# Disable insecure warnings
requests.packages.urllib3.disable_warnings()
''' HELPER FUNCTIONS '''
class Client(BaseClient):
def __init__(self, proxy, use_ssl, base_url, client_id, client_secret):
super().__init__(base_url=base_url, verify=use_ssl, proxy=proxy)
access_token = self.get_oath_token(client_id, client_secret)
headers = {'Content-Type': 'application/json',
'Authorization': f'Bearer {access_token}'}
self._headers = headers
def get_oath_token(self, client_id, client_secret):
# Workaround ParseResult immutability
parse_result = list(urllib.parse.urlparse(self._base_url))
parse_result[2] = '/oauth/token'
oath_url = urllib.parse.urlunparse(parse_result)
res = self._http_request('POST',
'',
json_data={
'client_id': client_id,
'client_secret': client_secret,
'grant_type': 'client_credentials'
},
full_url=oath_url)
return res.get('access_token')
def http_request(self, url_suffix, size=None):
params = {'size': size} if size else None
res = self._http_request("GET",
url_suffix=url_suffix,
params=params)
# The details could reside in either error or details, not both
for error_attribute in ['error', 'details']:
if error_attribute in res:
raise DemistoException(res[error_attribute])
return res
def vulndb_vulnerability_to_entry(vuln):
vulnerability_details = {
'ID': vuln.get('vulndb_id', 0),
'Title': vuln.get('title', ''),
'Description': vuln.get('description', '').rstrip('Z'),
'Keywords': vuln.get('keywords', ''),
'PublishedDate': vuln.get('vulndb_published_date', '').rstrip('Z'),
'TDescription': vuln.get('t_description', ''),
'SolutionDate': vuln.get('solution_date', '').rstrip('Z'),
'DiscoveryDate': vuln.get('disclosure_date', '').rstrip('Z'),
'ExploitPublishDate': vuln.get('exploit_publish_date', '').rstrip('Z'),
}
cve_ext_reference_values = [ext_reference['value'] for ext_reference in
vuln.get('ext_references', [])]
cvss_metrics_details = [{
'Id': cvss_metrics_data.get('cve_id', 0),
'AccessVector': cvss_metrics_data.get('access_vector', ''),
'AccessComplexity': cvss_metrics_data.get('access_complexity', ''),
'Authentication': cvss_metrics_data.get('authentication', ''),
'ConfidentialityImpact': cvss_metrics_data.get('confidentiality_impact', ''),
'IntegrityImpact': cvss_metrics_data.get('integrity_impact', ''),
'AvailabilityImpact': cvss_metrics_data.get('availability_impact', ''),
'GeneratedOn': cvss_metrics_data.get('generated_on', ''),
'Score': cvss_metrics_data.get('score', 0),
} for cvss_metrics_data in vuln['cvss_metrics']]
vendor_details = [{'Id': vendor.get('vendor', {'id': 0})['id'], 'Name': vendor.get('vendor', {'name': ''})['name']}
for vendor in vuln['vendors']]
product_details = []
for product in vuln['products']:
product_versions = [{'Id': version.get('id', ''), 'Name': version.get('name', '')} for version in
product.get('versions', [])]
product_details.append({
'Id': product.get('id', ''),
'Name': product.get('name', ''),
'Versions': product_versions
})
default_classification = {'longname': '', 'description': ''}
classification_details = [{'Longname': classification.get('classification', default_classification)['longname'],
'Description': classification.get('classification', default_classification)[
'description']}
for classification in vuln['classifications']]
return {
'Vulnerability': vulnerability_details,
'CVE-ExtReference': {
'Value': cve_ext_reference_values
},
'CvssMetrics': cvss_metrics_details,
'Vendor': vendor_details,
'Products': product_details,
'Classification': classification_details
}
def vulndb_vulnerability_results_to_demisto_results(res):
if 'vulnerability' in res:
results = [res['vulnerability']]
elif 'results' in res:
results = res['results']
else:
return_error('No "vulnerability" or "results" keys in the returned JSON')
for result in results:
ec = {
'VulnDB': vulndb_vulnerability_to_entry(result)
}
human_readable = tableToMarkdown(f'Result for vulnerability ID: {ec["VulnDB"]["Vulnerability"]["ID"]}', {
'Title': ec['VulnDB']['Vulnerability']['Title'],
'Description': ec['VulnDB']['Vulnerability']['Description'],
'Publish Date': ec['VulnDB']['Vulnerability']['PublishedDate'],
'Solution Date': ec['VulnDB']['Vulnerability']['SolutionDate']
})
return_outputs(readable_output=human_readable, outputs=ec, raw_response=res)
def vulndb_vendor_to_entry(vendor):
return {
'Results': {
'Id': vendor.get('id', ''),
'Name': vendor.get('name', ''),
'ShortName': vendor.get('short_name', ''),
'VendorUrl': vendor.get('vendor_url', '')
}
}
def vulndb_vendor_results_to_demisto_results(res):
if 'vendor' in res:
results = [res['vendor']]
elif 'results' in res:
results = res['results']
else:
demisto.results({
'Type': entryTypes['error'],
'Contents': res,
'ContentsFormat': formats['json'],
'HumanReadable': 'No "vendor" or "results" keys in the returned JSON',
'HumanReadableFormat': formats['text']
})
return
for result in results:
ec = {
'VulnDB': vulndb_vendor_to_entry(result)
}
human_readable = tableToMarkdown(f'Result for vendor ID: {ec["VulnDB"]["Results"]["Id"]}', {
'ID': ec['VulnDB']['Results']['Id'],
'Name': ec['VulnDB']['Results']['Name'],
'Short Name': ec['VulnDB']['Results']['ShortName'],
'Vendor URL': ec['VulnDB']['Results']['VendorUrl']
})
demisto.results({
'Type': entryTypes['note'],
'Contents': res,
'ContentsFormat': formats['json'],
'HumanReadable': human_readable,
'HumanReadableFormat': formats['markdown'],
'EntryContext': ec
})
def vulndb_product_to_entry(product):
return {
'Results': {
'Id': product.get('id', ''),
'Name': product.get('name', '')
}
}
def vulndb_product_results_to_demisto_results(res):
if 'results' in res:
results = res['results']
else:
demisto.results({
'Type': entryTypes['error'],
'Contents': res,
'ContentsFormat': formats['json'],
'HumanReadable': 'No "results" key in the returned JSON',
'HumanReadableFormat': formats['text']
})
return
for result in results:
ec = {
'VulnDB': vulndb_product_to_entry(result)
}
human_readable = tableToMarkdown(f'Result for product ID: {ec["VulnDB"]["Results"]["Id"]}', {
'ID': ec['VulnDB']['Results']['Id'],
'Name': ec['VulnDB']['Results']['Name']
})
demisto.results({
'Type': entryTypes['note'],
'Contents': res,
'ContentsFormat': formats['json'],
'HumanReadable': human_readable,
'HumanReadableFormat': formats['markdown'],
'EntryContext': ec
})
''' COMMANDS + REQUESTS FUNCTIONS '''
def test_module(client: Client, client_id, client_secret):
"""
Performs basic get request to get item samples
"""
client.get_oath_token(client_id, client_secret)
def vulndb_get_vuln_by_id_command(args: dict, client: Client):
vulndb_id = args['vuln_id']
res = client.http_request(f'/vulnerabilities/{vulndb_id}')
vulndb_vulnerability_results_to_demisto_results(res)
def vulndb_get_vuln_by_vendor_and_product_name_command(args: dict, client: Client):
vendor_name = args['vendor_name']
product_name = args['product_name']
max_size = args.get('max_size')
res = client.http_request(
f'/vulnerabilities/find_by_vendor_and_product_name?vendor_name={vendor_name}&product_name={product_name}',
max_size)
vulndb_vulnerability_results_to_demisto_results(res)
def vulndb_get_vuln_by_vendor_and_product_id_command(args: dict, client: Client):
vendor_id = args['vendor_id']
product_id = args['product_id']
max_size = args.get('max_size')
res = client.http_request(
f'/vulnerabilities/find_by_vendor_and_product_id?vendor_id={vendor_id}&product_id={product_id}', max_size)
vulndb_vulnerability_results_to_demisto_results(res)
def vulndb_get_vuln_by_vendor_id_command(args: dict, client: Client):
vendor_id = args['vendor_id']
max_size = args.get('max_size')
res = client.http_request(f'/vulnerabilities/find_by_vendor_id?vendor_id={vendor_id}', max_size)
vulndb_vulnerability_results_to_demisto_results(res)
def vulndb_get_vuln_by_product_id_command(args: dict, client: Client):
product_id = args['product_id']
max_size = args.get('max_size')
res = client.http_request(f'/vulnerabilities/find_by_product_id?product_id={product_id}', max_size)
vulndb_vulnerability_results_to_demisto_results(res)
def vulndb_get_vuln_by_cve_id_command(args: dict, client: Client):
cve_id = args['cve_id']
max_size = args.get('max_size')
res = client.http_request(f'/vulnerabilities/{cve_id}/find_by_cve_id', max_size)
vulndb_vulnerability_results_to_demisto_results(res)
def vulndb_get_updates_by_dates_or_hours_command(args: dict, client: Client):
start_date = args.get('start_date')
end_date = args.get('end_date')
hours_ago = args.get('hours_ago')
max_size = args.get('max_size')
if start_date:
url_suffix = f'/vulnerabilities/find_by_date?start_date={start_date}'
if end_date:
url_suffix += f'&end_date={end_date}'
res = client.http_request(url_suffix, max_size)
elif hours_ago is not None:
res = client.http_request(f'/vulnerabilities/find_by_time?hours_ago={hours_ago}', max_size)
else:
return_error('Must provide either start date or hours ago.')
vulndb_vulnerability_results_to_demisto_results(res)
def vulndb_get_vendor_command(args: dict, client: Client):
vendor_id = args.get('vendor_id')
vendor_name = args.get('vendor_name')
max_size = args.get('max_size')
if vendor_id is not None and vendor_name is not None:
return_error('Provide either vendor id or vendor name or neither, not both.')
elif vendor_id:
res = client.http_request(f'/vendors/{vendor_id}', max_size)
elif vendor_name:
res = client.http_request(f'/vendors/by_name?vendor_name={vendor_name}', max_size)
else:
res = client.http_request('/vendors', max_size)
vulndb_vendor_results_to_demisto_results(res)
def vulndb_get_product_command(args: dict, client: Client):
vendor_id = args.get('vendor_id')
vendor_name = args.get('vendor_name')
max_size = args.get('max_size')
if vendor_id is not None and vendor_name is not None:
return_error('Provide either vendor id or vendor name or neither, not both.')
elif vendor_id:
res = client.http_request(f'/products/by_vendor_id?vendor_id={vendor_id}', max_size)
elif vendor_name:
res = client.http_request(f'/products/by_vendor_name?vendor_name={vendor_name}', max_size)
else:
res = client.http_request('/products', max_size)
vulndb_product_results_to_demisto_results(res)
def vulndb_get_version_command(args: dict, client: Client):
product_id = args.get('product_id')
product_name = args.get('product_name')
max_size = args.get('max_size')
if product_id is not None and product_name is not None:
return_error('Provide either product id or vendor name, not both.')
elif product_id:
res = client.http_request(f'/versions/by_product_id?product_id={product_id}', max_size)
elif product_name:
res = client.http_request(f'/versions/by_product_name?product_name={product_name}', max_size)
vulndb_product_results_to_demisto_results(res)
def vulndb_get_cve_command(args: dict, client: Client):
cve_id = args['cve_id']
max_size = args.get('max_size')
res = client.http_request(f'/vulnerabilities/{cve_id}/find_by_cve_id', max_size)
results = res.get("results")
if not results:
return_error('Could not find "results" in the returned JSON')
result = results[0]
cvss_metrics_details = result.get("cvss_metrics", [])
data = {
"ID": cve_id,
"CVSS": cvss_metrics_details[0].get("score", "0") if cvss_metrics_details else "0",
"Published": result.get('vulndb_published_date', '').rstrip('Z'),
"Modified": result.get('vulndb_last_modified', '').rstrip('Z'),
"Description": result.get("description", '')
}
human_readable = tableToMarkdown(f'Result for CVE ID: {cve_id}', data)
ec = {'CVE(val.ID === obj.ID)': data}
return_outputs(human_readable, outputs=ec, raw_response=res)
''' COMMANDS MANAGER / SWITCH PANEL '''
def main():
params = demisto.params()
# Remove trailing slash to prevent wrong URL path to service
api_url = params['api_url']
client_id = params['client_id']
client_secret = params['client_secret']
use_ssl = not params.get('insecure', False)
proxy = params.get('proxy', False)
client = Client(proxy, use_ssl, api_url, client_id, client_secret)
args = demisto.args()
command = demisto.command()
LOG(f'Command being called is {command}')
try:
if command == 'test-module':
# This is the call made when pressing the integration test button.
test_module(client, client_id, client_secret)
demisto.results('ok')
if command == 'vulndb-get-vuln-by-id':
vulndb_get_vuln_by_id_command(args, client)
elif command == 'vulndb-get-vuln-by-vendor-and-product-name':
vulndb_get_vuln_by_vendor_and_product_name_command(args, client)
elif command == 'vulndb-get-vuln-by-vendor-and-product-id':
vulndb_get_vuln_by_vendor_and_product_id_command(args, client)
elif command == 'vulndb-get-vuln-by-vendor-id':
vulndb_get_vuln_by_vendor_id_command(args, client)
elif command == 'vulndb-get-vuln-by-product-id':
vulndb_get_vuln_by_product_id_command(args, client)
elif command == 'vulndb-get-vuln-by-cve-id':
vulndb_get_vuln_by_cve_id_command(args, client)
elif command == 'vulndb-get-vendor':
vulndb_get_vendor_command(args, client)
elif command == 'vulndb-get-product':
vulndb_get_product_command(args, client)
elif command == 'vulndb-get-version':
vulndb_get_version_command(args, client)
elif command == 'vulndb-get-updates-by-dates-or-hours':
vulndb_get_updates_by_dates_or_hours_command(args, client)
elif command == 'cve':
vulndb_get_cve_command(args, client)
except Exception as e:
error_message = f'Failed to execute {command} command. Error: {str(e)}'
return_error(error_message)
if __name__ in ('__main__', 'builtins'):
main()
| mit | c004b4ba33e9765894624c1b1ecfe526 | 36.62704 | 119 | 0.599244 | 3.471398 | false | false | false | false |
demisto/content | Packs/SafeBreach/Scripts/JoinListsOfDicts/JoinListsOfDicts_test.py | 2 | 1464 | import demistomock as demisto
from JoinListsOfDicts import main as JoinListsOfDicts
LEFT_DICTS = [{
"insightIds": ['1', '2', '3'],
"value": "value123"
}, {
"insightIds": ['1'],
"value": "value1"
}, {
"insightIds": ['3'],
"value": "value3"
}]
RIGHT_DICTS = [{
"id": 1,
"name": "someInsight 1"
}, {
"id": 3,
"name": "someInsight 3"
}]
LEFT_DICT = {
"insightIds": ['1', '2', '3'],
"value": "value123"
}
RIGHT_DICT = {
"id": 2,
"name": "someInsight 2"
}
def test_JoinListsOfDicts(mocker):
mocker.patch.object(demisto, 'args', return_value={
'rightkey': 'id',
'key': 'insightIds',
'right': RIGHT_DICTS,
'value': LEFT_DICTS, # left
})
mocker.patch.object(demisto, 'results')
JoinListsOfDicts(demisto.args())
list_join_indicators = demisto.results.call_args[0][0]
assert len(list_join_indicators) == 4
assert list_join_indicators[0]['insightIds'] == ['1', '2', '3']
# verify that also single dict will work.
mocker.patch.object(demisto, 'args', return_value={
'rightkey': 'id',
'key': 'insightIds',
'right': RIGHT_DICT,
'value': LEFT_DICT, # left
})
mocker.patch.object(demisto, 'results')
JoinListsOfDicts(demisto.args())
single_join_indicator = demisto.results.call_args[0][0]
assert len(single_join_indicator) == 1
assert single_join_indicator[0]['insightIds'] == ['1', '2', '3']
| mit | 4ed2b297b74594943d68f29fca5a9cd7 | 24.684211 | 68 | 0.578552 | 2.969574 | false | false | false | false |
demisto/content | Packs/AbnormalSecurity/Integrations/AbnormalSecurityEventCollector/AbnormalSecurityEventCollector.py | 2 | 4399 | from CommonServerPython import *
import demistomock as demisto
VENDOR = 'Abnormal_Security'
PRODUCT = 'Email_Protection'
class Client(BaseClient):
def list_threats(self, params):
return self._http_request('GET', params=params, url_suffix='threats')
def get_threat(self, threat_id):
return self._http_request('GET', url_suffix=f'threats/{threat_id}')
def get_events(client: Client, after: str):
"""Retrieves messages by time range & ordered by datetime
Args:
client (Client): Abnormal Security client.
after (str): the start datetime to search messages
Returns:
list: messages ordered by datetime.
str: the last run to be set for the next run.
"""
before = arg_to_datetime(arg='now', arg_name='before', required=True).strftime("%Y-%m-%dT%H:%M:%SZ") # type: ignore
threats_ids = get_list_threats(client, after, before)
messages = []
if threats_ids:
for threat in reversed(threats_ids):
messages += get_messages_by_datetime(client, threat.get('threatId'), after, before)
ordered_messages = sorted(messages, key=lambda d: d['receivedTime'])
return ordered_messages, before
return [], before
def get_messages_by_datetime(client: Client, threat_id: str, after: str, before: str):
"""get messages from a threat and return only the messages that are in the time range
Args:
client (Client): Abnormal Security client.
threat_id (str): the threat to get messages.
after (str): the datetime to search messages after that.
before (str): the datetime to search messages before that.
Returns:
list: messages filtered by the time range.
"""
messages = []
res = client.get_threat(threat_id)
for message in res.get('messages'):
# messages are ordered from newest to oldest
received_time = message.get('receivedTime')
if before >= received_time >= after:
messages.append(message)
elif received_time < after:
break
return messages
def get_list_threats(client: Client, after: str, before: str):
"""get list of all threats ids in the time range
Args:
client (Client): Abnormal Security client.
after (str): the datetime to search threats after that.
before (str): the datetime to search threats before that.
Returns:
list: list of threats ids.
"""
threats = []
is_next_page = True
page_number = 1
while is_next_page:
params = assign_params(pageSize=1000, filter=f'receivedTime gte {after} lte {before}', pageNumber=page_number)
res = client.list_threats(params)
threats += res.get('threats')
if res.get('nextPageNumber'):
page_number = res.get('nextPageNumber')
else:
is_next_page = False
return threats
def main():
# Args is always stronger. Get last run even stronger
params = demisto.params()
token = params['token']['password']
verify = params['verify']
proxy = params['proxy']
after = arg_to_datetime(
arg=params.get('after'), arg_name='after', required=True).strftime("%Y-%m-%dT%H:%M:%SZ") # type: ignore
client = Client(base_url='https://api.abnormalplatform.com/v1',
verify=verify,
proxy=proxy,
headers={"Authorization": f"Bearer {token}"})
last_run = demisto.getLastRun().get('last_run')
if last_run:
after = last_run
command = demisto.command()
try:
threats, last_run = get_events(client, after)
if command == 'test-module':
return_results('ok')
elif command == 'fetch-events':
demisto.setLastRun({'last_run': last_run})
send_events_to_xsiam(threats, VENDOR, PRODUCT)
elif command == 'abnormal-security-event-collector-get-events':
command_results = CommandResults(
readable_output=tableToMarkdown(f'{VENDOR} - {PRODUCT} events', threats),
raw_response=threats,
)
return_results(command_results)
if argToBoolean(demisto.args().get('should_push_events', False)):
send_events_to_xsiam(threats, VENDOR, PRODUCT)
except Exception as e:
return_error(str(e))
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | 499ac98342c0ecb8e2a42a871220adfc | 32.838462 | 120 | 0.622414 | 3.753413 | false | false | false | false |
demisto/content | Packs/CohesityHelios/Integrations/CohesityHelios/CohesityHelios_test.py | 2 | 3520 | """Cohesity Helios Cortex XSOAR - Unit Tests file
"""
import json
import io
BASE_URL = "https://helios.cohesity.com/"
ALERTS_URL = BASE_URL + "mcm/alerts"
ALERT_DETAIL_URL = BASE_URL + "mcm/alerts/6595940238747379:1630539139046817"
RESTORE_OBJECT_URL = BASE_URL + "irisservices/api/v1/public/restore/recover"
MOCK_OBJECT_NAME = "mock-testing03"
MOCK_ALERTS_RESP_FILE = "test_data/get_ransomware_alerts_resp.json"
MOCK_ALERT_DETAIL_RESP_FILE = "test_data/get_ransomware_alert_detail_resp.json"
def util_load_json(path):
with io.open(path, mode='r', encoding='utf-8') as f:
return json.loads(f.read())
def test_test_module(requests_mock):
"""Tests test-module command function.
Checks the output of the command function with the expected output.
"""
from CohesityHelios import Client, test_module
client = Client(
base_url=BASE_URL,
verify=False)
# set up mock response.
mock_response = {}
requests_mock.get(ALERTS_URL, json=mock_response)
response = test_module(client)
assert response == "ok"
def test_fetch_incidents_command(requests_mock):
"""Tests fetch incidents. Since fetch_incidents_command calls
get_ransomware_alerts_command(), that command is also tested.
Checks the output of the command function with the expected output.
"""
from CohesityHelios import Client, fetch_incidents_command
client = Client(
base_url=BASE_URL,
verify=False)
# set up mock response.
mock_response = util_load_json(MOCK_ALERTS_RESP_FILE)
requests_mock.get(ALERTS_URL, json=mock_response)
response = fetch_incidents_command(client)
assert len(response) == 1
incident = response[0]
assert incident['CustomFields']['anomalous_object'] == MOCK_OBJECT_NAME
assert incident['CustomFields']['environment'] == 'kVMware'
def test_ignore_ransomware_anomaly_command(requests_mock):
"""Tests ignore_ransomware_anomaly_command.
Checks the output of the command function with the expected output.
"""
from CohesityHelios import Client, ignore_ransomware_anomaly_command
client = Client(
base_url=BASE_URL,
verify=False)
# set up mock response.
mock_response_alerts = util_load_json(MOCK_ALERTS_RESP_FILE)
requests_mock.get(ALERTS_URL, json=mock_response_alerts)
mock_response_alert_detail = util_load_json(MOCK_ALERT_DETAIL_RESP_FILE)
requests_mock.patch(ALERT_DETAIL_URL, json=mock_response_alert_detail)
args = {'object_name': MOCK_OBJECT_NAME}
response = ignore_ransomware_anomaly_command(client, args)
assert response == f"Ignored object {MOCK_OBJECT_NAME}."
def test_restore_latest_clean_snapshot(requests_mock):
"""Tests restore_latest_clean_snapshot
Checks the output of the command function with the expected output.
"""
from CohesityHelios import Client, restore_latest_clean_snapshot
client = Client(
base_url=BASE_URL,
verify=False)
# set up mock response.
mock_response_alerts = util_load_json(MOCK_ALERTS_RESP_FILE)
requests_mock.get(ALERTS_URL, json=mock_response_alerts)
mock_response_alert_detail = util_load_json(MOCK_ALERT_DETAIL_RESP_FILE)
requests_mock.patch(ALERT_DETAIL_URL, json=mock_response_alert_detail)
requests_mock.post(RESTORE_OBJECT_URL, json={})
args = {'object_name': MOCK_OBJECT_NAME}
response = restore_latest_clean_snapshot(client, args)
assert response == f"Restored object {MOCK_OBJECT_NAME}."
| mit | 86d769af73b245d290123ef4c78ba5a4 | 31.293578 | 79 | 0.705966 | 3.365201 | false | true | false | false |
missionpinball/mpf | mpf/tests/test_Delay.py | 1 | 4931 | from mpf.tests.MpfTestCase import MpfTestCase
from unittest.mock import MagicMock
class TestDelay(MpfTestCase):
def callback(self):
pass
def test_basic_functions(self):
self.callback = MagicMock()
self.assertEqual(len(self.machine.delay.delays), 0)
# Create a one second delay
self.machine.delay.add(1000, self.callback, "delay_test")
self.assertEqual(len(self.machine.delay.delays), 1)
self.assertIn("delay_test", self.machine.delay.delays.keys())
# Advance 0.5 sec (callback should not have been called yet)
self.advance_time_and_run(0.5)
self.callback.assert_not_called()
# Advance another 0.5 sec (callback should have been called)
self.advance_time_and_run(0.5)
self.callback.assert_called_with()
self.assertEqual(len(self.machine.delay.delays), 0)
self.assertNotIn("delay_test", self.machine.delay.delays.keys())
# Create another one second delay
self.callback.reset_mock()
self.machine.delay.add(1000, self.callback, "delay_test2")
self.assertEqual(len(self.machine.delay.delays), 1)
self.assertIn("delay_test2", self.machine.delay.delays.keys())
# Advance 0.5 sec (callback should not have been called yet)
self.advance_time_and_run(0.5)
self.callback.assert_not_called()
# Now cancel the delay
self.machine.delay.remove("delay_test2")
self.assertEqual(len(self.machine.delay.delays), 0)
self.assertNotIn("delay_test2", self.machine.delay.delays.keys())
# Advance another 0.5 sec (callback should not be called since it was cancelled)
self.advance_time_and_run(0.5)
self.callback.assert_not_called()
def test_remove(self):
self.callback = MagicMock()
self.machine.delay.add(1000, self.callback, "delay_test")
self.assertTrue(self.machine.delay.check('delay_test'))
self.machine.delay.remove('delay_test')
self.advance_time_and_run(2)
self.callback.assert_not_called()
def test_check(self):
self.callback = MagicMock()
self.machine.delay.add(1000, self.callback, "delay_test")
self.assertTrue(self.machine.delay.check('delay_test'))
self.assertFalse(self.machine.delay.check('delay_test_fake'))
def test_double_add(self):
self.callback = MagicMock()
# add delay
self.machine.delay.add(100, self.callback, "delay_test")
self.advance_time_and_run(.05)
# add same name again. it should reset the time
self.machine.delay.add(100, self.callback, "delay_test")
self.advance_time_and_run(.06)
self.callback.assert_not_called()
self.advance_time_and_run(.05)
self.callback.assert_any_call()
def test_reset(self):
self.callback = MagicMock()
self.machine.delay.add(1000, self.callback, "delay_test")
self.assertTrue(self.machine.delay.check('delay_test'))
self.machine.delay.reset(2000, self.callback, "delay_test")
self.advance_time_and_run(1.1)
self.callback.assert_not_called()
self.advance_time_and_run(1)
self.callback.assert_any_call()
# make sure reset works if there is no delay with that name
self.machine.delay.reset(1000, self.callback, "delay_test2")
self.advance_time_and_run(1.1)
self.callback.assert_any_call()
def test_clear(self):
self.callback = MagicMock()
self.machine.delay.add(1000, self.callback)
self.machine.delay.add(2000, self.callback)
self.machine.delay.clear()
self.advance_time_and_run(3)
self.callback.assert_not_called()
def test_add_if_doesnt_exist(self):
self.callback = MagicMock()
self.machine.delay.add_if_doesnt_exist(1000, self.callback,
"delay_test")
self.advance_time_and_run(1.1)
self.callback.assert_any_call()
self.callback = MagicMock()
self.machine.delay.add_if_doesnt_exist(1000, self.callback,
"delay_test")
self.machine.delay.add_if_doesnt_exist(500, self.callback,
"delay_test")
self.advance_time_and_run(.6)
self.callback.assert_not_called()
self.advance_time_and_run(.5)
self.callback.assert_any_call()
def test_run_now(self):
self.callback = MagicMock()
self.machine.delay.add(1000, self.callback, "delay_test")
self.advance_time_and_run(.1)
self.callback.assert_not_called()
self.machine.delay.run_now("delay_test")
self.advance_time_and_run(.1)
self.callback.assert_any_call()
self.callback = MagicMock()
self.advance_time_and_run(1)
self.callback.assert_not_called()
| mit | 075e8be9303cce4034d69875e6e7f038 | 35.525926 | 88 | 0.627053 | 3.641802 | false | true | false | false |
missionpinball/mpf | mpf/core/file_manager.py | 1 | 3664 | """Contains the FileManager base classes."""
import logging
import os
from mpf.file_interfaces.pickle_interface import PickleInterface
from mpf.file_interfaces.yaml_interface import YamlInterface
MYPY = False
if MYPY: # pragma: no cover
from typing import Dict, List # pylint: disable-msg=cyclic-import,unused-import
class FileManager:
"""Manages file interfaces."""
__slots__ = [] # type: List[str]
log = logging.getLogger('FileManager')
file_interfaces = dict() # type: Dict[str, YamlInterface]
initialized = False
@classmethod
def init(cls):
"""Initialise file interfaces."""
cls.file_interfaces[".yaml"] = YamlInterface()
cls.file_interfaces[".bin"] = PickleInterface()
FileManager.initialized = True
@staticmethod
def locate_file(filename) -> str:
"""Find a file location.
Args:
----
filename: Filename to locate
Returns: Location of file
"""
if not filename:
raise FileNotFoundError("No filename provided")
if not FileManager.initialized:
FileManager.init()
ext = os.path.splitext(filename)[1]
if not os.path.isfile(filename):
# If the file doesn't have an extension, let's see if we can find
# one
if not ext:
for config_processor in set(FileManager.file_interfaces.values()):
questionable_file, ext = config_processor.find_file(filename)
if isinstance(questionable_file, str):
return questionable_file
raise FileNotFoundError("File not found: {}".format(filename))
return filename
@staticmethod
def get_file_interface(filename):
"""Return a file interface."""
try:
FileManager.locate_file(filename)
except FileNotFoundError:
return None
ext = os.path.splitext(filename)[1]
try:
return FileManager.file_interfaces[ext]
except KeyError:
return None
@staticmethod
def load(filename, verify_version=False, halt_on_error=True):
"""Load a file by name."""
if not FileManager.initialized:
FileManager.init()
try:
file = FileManager.locate_file(filename)
except FileNotFoundError:
if halt_on_error:
raise IOError("Could not find file {}".format(filename))
return dict()
if not file and halt_on_error:
raise IOError(
"Could not find file '{}'. Resolved abs path to {}".format(
filename, os.path.abspath(filename)))
ext = os.path.splitext(file)[1]
try:
interface = FileManager.file_interfaces[ext]
except KeyError:
raise AssertionError("No config file processor available for file type {}".format(ext))
return interface.load(file, verify_version, halt_on_error)
@staticmethod
def save(filename, data):
"""Save data to file."""
if not FileManager.initialized:
FileManager.init()
ext = os.path.splitext(filename)[1]
# save to temp file and move afterwards. prevents broken files
temp_file = os.path.dirname(filename) + os.sep + "_" + os.path.basename(filename)
try:
FileManager.file_interfaces[ext].save(temp_file, data)
except KeyError:
raise AssertionError("No config file processor available for file type {}".format(ext))
# move temp file
os.replace(temp_file, filename)
| mit | ac6906e7b2464a89ec5e2fde3c2148b8 | 28.788618 | 99 | 0.599345 | 4.540273 | false | false | false | false |
pennersr/django-allauth | allauth/socialaccount/providers/sharefile/views.py | 2 | 1432 | import requests
from allauth.socialaccount import app_settings
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter,
OAuth2CallbackView,
OAuth2LoginView,
)
from .provider import ShareFileProvider
class ShareFileOAuth2Adapter(OAuth2Adapter):
provider_id = ShareFileProvider.id
settings = app_settings.PROVIDERS.get(provider_id, {})
subdomain = settings.get("SUBDOMAIN", "secure")
apicp = settings.get("APICP", "sharefile.com")
provider_default_url = settings.get("DEFAULT_URL", "https://secure.sharefile.com")
provider_default_api_url = "https://{}.sf-api.com".format(subdomain)
provider_api_version = "v3"
access_token_url = "https://{}.{}/oauth/token".format(subdomain, apicp)
refresh_token_url = "https://{}.{}/oauth/token".format(subdomain, apicp)
authorize_url = "{}/oauth/authorize".format(provider_default_url)
profile_url = "{}/sf/{}/Users".format(
provider_default_api_url, provider_api_version
)
def complete_login(self, request, app, token, response):
headers = {"Authorization": "Bearer {}".format(token.token)}
extra_data = requests.get(self.profile_url, headers=headers).json()
return self.get_provider().sociallogin_from_response(request, extra_data)
oauth2_login = OAuth2LoginView.adapter_view(ShareFileOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(ShareFileOAuth2Adapter)
| mit | 12a56e1c164aaf9a5d3b7649d13179e8 | 37.702703 | 86 | 0.717179 | 3.509804 | false | false | false | false |
pennersr/django-allauth | allauth/socialaccount/providers/exist/provider.py | 2 | 1152 | from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class ExistAccount(ProviderAccount):
def get_profile_url(self):
username = self.account.extra_data.get("username")
return "https://exist.io/api/1/users/{}/profile/".format(username)
def get_avatar_url(self):
return self.account.extra_data.get("avatar")
def to_str(self):
name = super(ExistAccount, self).to_str()
return self.account.extra_data.get("name", name)
class ExistProvider(OAuth2Provider):
id = "exist"
name = "Exist.io"
account_class = ExistAccount
def extract_uid(self, data):
return data.get("id")
def extract_common_fields(self, data):
extra_common = super(ExistProvider, self).extract_common_fields(data)
extra_common.update(
username=data.get("username"),
first_name=data.get("first_name"),
last_name=data.get("last_name"),
)
return extra_common
def get_default_scope(self):
return ["read"]
provider_classes = [ExistProvider]
| mit | c2d3a5e43424a81d65232519a84d6229 | 28.538462 | 77 | 0.657986 | 3.74026 | false | false | false | false |
pennersr/django-allauth | allauth/socialaccount/providers/gitea/provider.py | 2 | 1165 | from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class GiteaAccount(ProviderAccount):
def get_profile_url(self):
return self.account.extra_data.get("html_url")
def get_avatar_url(self):
return self.account.extra_data.get("avatar_url")
def to_str(self):
dflt = super(GiteaAccount, self).to_str()
return next(
value
for value in (
self.account.extra_data.get("username", None),
self.account.extra_data.get("login", None),
dflt,
)
if value is not None
)
class GiteaProvider(OAuth2Provider):
id = "gitea"
name = "Gitea"
account_class = GiteaAccount
def get_default_scope(self):
scope = []
return scope
def extract_uid(self, data):
return str(data["id"])
def extract_common_fields(self, data):
return dict(
email=data.get("email"),
username=data.get("login"),
name=data.get("name"),
)
provider_classes = [GiteaProvider]
| mit | 0e9e414a73879bcd6c828620c875163b | 24.888889 | 74 | 0.592275 | 3.832237 | false | false | false | false |
pennersr/django-allauth | allauth/socialaccount/providers/mailchimp/views.py | 2 | 1071 | """Views for MailChimp API v3."""
import requests
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter,
OAuth2CallbackView,
OAuth2LoginView,
)
from .provider import MailChimpProvider
class MailChimpOAuth2Adapter(OAuth2Adapter):
"""OAuth2Adapter for MailChimp API v3."""
provider_id = MailChimpProvider.id
authorize_url = "https://login.mailchimp.com/oauth2/authorize"
access_token_url = "https://login.mailchimp.com/oauth2/token"
profile_url = "https://login.mailchimp.com/oauth2/metadata"
def complete_login(self, request, app, token, **kwargs):
"""Complete login, ensuring correct OAuth header."""
headers = {"Authorization": "OAuth {0}".format(token.token)}
metadata = requests.get(self.profile_url, headers=headers)
extra_data = metadata.json()
return self.get_provider().sociallogin_from_response(request, extra_data)
oauth2_login = OAuth2LoginView.adapter_view(MailChimpOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(MailChimpOAuth2Adapter)
| mit | 0fc4108e5dcb7c49f5e7bc7e69cc4d70 | 32.46875 | 81 | 0.730159 | 3.57 | false | false | false | false |
pennersr/django-allauth | allauth/socialaccount/providers/mailchimp/provider.py | 2 | 1411 | """Customise Provider classes for MailChimp API v3."""
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class MailChimpAccount(ProviderAccount):
"""ProviderAccount subclass for MailChimp."""
def get_profile_url(self):
"""Return base profile url."""
return self.account.extra_data["api_endpoint"]
def get_avatar_url(self):
"""Return avatar url."""
return self.account.extra_data["login"]["avatar"]
class MailChimpProvider(OAuth2Provider):
"""OAuth2Provider subclass for MailChimp v3."""
id = "mailchimp"
name = "MailChimp"
account_class = MailChimpAccount
def extract_uid(self, data):
"""Extract uid ('user_id') and ensure it's a str."""
return str(data["user_id"])
def get_default_scope(self):
"""Ensure scope is null to fit their API."""
return [""]
def extract_common_fields(self, data):
"""Extract fields from a metadata query."""
return dict(
dc=data.get("dc"),
role=data.get("role"),
account_name=data.get("accountname"),
user_id=data.get("user_id"),
login=data.get("login"),
login_url=data.get("login_url"),
api_endpoint=data.get("api_endpoint"),
)
provider_classes = [MailChimpProvider]
| mit | f5e453c33582cafc37b22062b1e85273 | 28.395833 | 74 | 0.627215 | 3.952381 | false | false | false | false |
pennersr/django-allauth | allauth/socialaccount/providers/trello/views.py | 2 | 1155 | import requests
from django.utils.http import urlencode
from allauth.socialaccount.providers.oauth.views import (
OAuthAdapter,
OAuthCallbackView,
OAuthLoginView,
)
from .provider import TrelloProvider
class TrelloOAuthAdapter(OAuthAdapter):
provider_id = TrelloProvider.id
request_token_url = "https://trello.com/1/OAuthGetRequestToken"
authorize_url = "https://trello.com/1/OAuthAuthorizeToken"
access_token_url = "https://trello.com/1/OAuthGetAccessToken"
def complete_login(self, request, app, token, response):
# we need to get the member id and the other information
info_url = "{base}?{query}".format(
base="https://api.trello.com/1/members/me",
query=urlencode({"key": app.key, "token": response.get("oauth_token")}),
)
resp = requests.get(info_url)
resp.raise_for_status()
extra_data = resp.json()
result = self.get_provider().sociallogin_from_response(request, extra_data)
return result
oauth_login = OAuthLoginView.adapter_view(TrelloOAuthAdapter)
oauth_callback = OAuthCallbackView.adapter_view(TrelloOAuthAdapter)
| mit | 83859a6d6f148741256444f59ade449a | 32.970588 | 84 | 0.698701 | 3.553846 | false | false | false | false |
pennersr/django-allauth | allauth/socialaccount/providers/twitter/views.py | 2 | 1445 | import json
from allauth.socialaccount.app_settings import QUERY_EMAIL
from allauth.socialaccount.providers.oauth.client import OAuth
from allauth.socialaccount.providers.oauth.views import (
OAuthAdapter,
OAuthCallbackView,
OAuthLoginView,
)
from .provider import TwitterProvider
class TwitterAPI(OAuth):
"""
Verifying twitter credentials
"""
_base_url = "https://api.twitter.com/1.1/account/verify_credentials.json"
url = _base_url + "?include_email=true" if QUERY_EMAIL else _base_url
def get_user_info(self):
user = json.loads(self.query(self.url))
return user
class TwitterOAuthAdapter(OAuthAdapter):
provider_id = TwitterProvider.id
request_token_url = "https://api.twitter.com/oauth/request_token"
access_token_url = "https://api.twitter.com/oauth/access_token"
# Issue #42 -- this one authenticates over and over again...
# authorize_url = 'https://api.twitter.com/oauth/authorize'
authorize_url = "https://api.twitter.com/oauth/authenticate"
def complete_login(self, request, app, token, response):
client = TwitterAPI(request, app.client_id, app.secret, self.request_token_url)
extra_data = client.get_user_info()
return self.get_provider().sociallogin_from_response(request, extra_data)
oauth_login = OAuthLoginView.adapter_view(TwitterOAuthAdapter)
oauth_callback = OAuthCallbackView.adapter_view(TwitterOAuthAdapter)
| mit | c40d3c269caf3ee115a452457e53ad9b | 33.404762 | 87 | 0.725952 | 3.639798 | false | false | false | false |
mosra/m.css | plugins/m/vk.py | 1 | 3102 | #
# This file is part of m.css.
#
# Copyright © 2017, 2018, 2019, 2020, 2021, 2022
# Vladimír Vondruš <mosra@centrum.cz>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
import re
from docutils import nodes, utils
from docutils.parsers import rst
from docutils.parsers.rst.roles import set_classes
# to avoid dependencies, link_regexp and parse_link() is common for m.abbr,
# m.gh, m.gl, m.link and m.vk
link_regexp = re.compile(r'(?P<title>.*) <(?P<link>.+)>')
def parse_link(text):
link = utils.unescape(text)
m = link_regexp.match(link)
if m: return m.group('title', 'link')
return None, link
def vkext(name, rawtext, text, lineno, inliner, options={}, content=[]):
title, extension = parse_link(text)
if not title: title = extension
url = "https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VK_{}".format(extension)
set_classes(options)
node = nodes.reference(rawtext, title, refuri=url, **options)
return [node], []
def vkfn(name, rawtext, text, lineno, inliner, options={}, content=[]):
title, fn = parse_link(text)
if not title: title = "vk{}()".format(fn)
url = "https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vk{}.html".format(fn)
set_classes(options)
node = nodes.reference(rawtext, title, refuri=url, **options)
return [node], []
def vktype(name, rawtext, text, lineno, inliner, options={}, content=[]):
title, fn = parse_link(text)
if not title: title = "Vk{}".format(fn)
url = "https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/Vk{}.html".format(fn)
set_classes(options)
node = nodes.reference(rawtext, title, refuri=url, **options)
return [node], []
def register_mcss(**kwargs):
rst.roles.register_local_role('vkext', vkext)
rst.roles.register_local_role('vkfn', vkfn)
rst.roles.register_local_role('vktype', vktype)
# Below is only Pelican-specific functionality. If Pelican is not found, these
# do nothing.
register = register_mcss # for Pelican
| mit | d59b834740bb15cf9b960bbb82dc3af4 | 40.878378 | 113 | 0.704098 | 3.398026 | false | false | false | false |
simpleai-team/simpleai | simpleai/search/traditional.py | 5 | 5707 | # coding=utf-8
from simpleai.search.utils import FifoList, BoundedPriorityQueue, LifoList
from simpleai.search.models import (SearchNode, SearchNodeHeuristicOrdered,
SearchNodeStarOrdered,
SearchNodeCostOrdered)
def breadth_first(problem, graph_search=False, viewer=None):
'''
Breadth first search.
If graph_search=True, will avoid exploring repeated states.
Requires: SearchProblem.actions, SearchProblem.result, and
SearchProblem.is_goal.
'''
return _search(problem,
FifoList(),
graph_search=graph_search,
viewer=viewer)
def depth_first(problem, graph_search=False, viewer=None):
'''
Depth first search.
If graph_search=True, will avoid exploring repeated states.
Requires: SearchProblem.actions, SearchProblem.result, and
SearchProblem.is_goal.
'''
return _search(problem,
LifoList(),
graph_search=graph_search,
viewer=viewer)
def limited_depth_first(problem, depth_limit, graph_search=False, viewer=None):
'''
Limited depth first search.
Depth_limit is the maximum depth allowed, being depth 0 the initial state.
If graph_search=True, will avoid exploring repeated states.
Requires: SearchProblem.actions, SearchProblem.result, and
SearchProblem.is_goal.
'''
return _search(problem,
LifoList(),
graph_search=graph_search,
depth_limit=depth_limit,
viewer=viewer)
def iterative_limited_depth_first(problem, graph_search=False, viewer=None):
'''
Iterative limited depth first search.
If graph_search=True, will avoid exploring repeated states.
Requires: SearchProblem.actions, SearchProblem.result, and
SearchProblem.is_goal.
'''
solution = None
limit = 0
while not solution:
solution = limited_depth_first(problem,
depth_limit=limit,
graph_search=graph_search,
viewer=viewer)
limit += 1
if viewer:
viewer.event('no_more_runs', solution, 'returned after %i runs' % limit)
return solution
def uniform_cost(problem, graph_search=False, viewer=None):
'''
Uniform cost search.
If graph_search=True, will avoid exploring repeated states.
Requires: SearchProblem.actions, SearchProblem.result,
SearchProblem.is_goal, and SearchProblem.cost.
'''
return _search(problem,
BoundedPriorityQueue(),
graph_search=graph_search,
node_factory=SearchNodeCostOrdered,
graph_replace_when_better=True,
viewer=viewer)
def greedy(problem, graph_search=False, viewer=None):
'''
Greedy search.
If graph_search=True, will avoid exploring repeated states.
Requires: SearchProblem.actions, SearchProblem.result,
SearchProblem.is_goal, SearchProblem.cost, and SearchProblem.heuristic.
'''
return _search(problem,
BoundedPriorityQueue(),
graph_search=graph_search,
node_factory=SearchNodeHeuristicOrdered,
graph_replace_when_better=True,
viewer=viewer)
def astar(problem, graph_search=False, viewer=None):
'''
A* search.
If graph_search=True, will avoid exploring repeated states.
Requires: SearchProblem.actions, SearchProblem.result,
SearchProblem.is_goal, SearchProblem.cost, and SearchProblem.heuristic.
'''
return _search(problem,
BoundedPriorityQueue(),
graph_search=graph_search,
node_factory=SearchNodeStarOrdered,
graph_replace_when_better=True,
viewer=viewer)
def _search(problem, fringe, graph_search=False, depth_limit=None,
node_factory=SearchNode, graph_replace_when_better=False,
viewer=None):
'''
Basic search algorithm, base of all the other search algorithms.
'''
if viewer:
viewer.event('started')
memory = set()
initial_node = node_factory(state=problem.initial_state,
problem=problem)
fringe.append(initial_node)
while fringe:
if viewer:
viewer.event('new_iteration', fringe.sorted())
node = fringe.pop()
if problem.is_goal(node.state):
if viewer:
viewer.event('chosen_node', node, True)
viewer.event('finished', fringe.sorted(), node, 'goal found')
return node
else:
if viewer:
viewer.event('chosen_node', node, False)
memory.add(node.state)
if depth_limit is None or node.depth < depth_limit:
expanded = node.expand()
if viewer:
viewer.event('expanded', [node], [expanded])
for n in expanded:
if graph_search:
others = [x for x in fringe if x.state == n.state]
assert len(others) in (0, 1)
if n.state not in memory and len(others) == 0:
fringe.append(n)
elif graph_replace_when_better and len(others) > 0 and n < others[0]:
fringe.remove(others[0])
fringe.append(n)
else:
fringe.append(n)
if viewer:
viewer.event('finished', fringe.sorted(), None, 'goal not found')
| mit | a6519f2d9064e533a69f55aee047c831 | 31.988439 | 89 | 0.583669 | 4.396764 | false | false | false | false |
simpleai-team/simpleai | tests/search/test_local.py | 1 | 3196 | # coding=utf-8
import unittest
from tests.search.dummies import DummyProblem, GOAL, DummyGeneticProblem
from simpleai.search.local import (beam, beam_best_first,
hill_climbing,
hill_climbing_stochastic,
simulated_annealing,
hill_climbing_random_restarts, genetic)
from simpleai.search.models import SearchNode
class TestLocalSearch(unittest.TestCase):
def setUp(self):
self.problem = DummyProblem()
self.problem.initial_state = 'i'
def test_beam(self):
result = beam(self.problem)
self.assertEqual(result.state, GOAL)
def test_beam_best_first(self):
result = beam_best_first(self.problem)
self.assertEqual(result.state, GOAL)
def test_hill_climbing(self):
result = hill_climbing(self.problem)
self.assertEqual(result.state, GOAL)
def test_hill_climbing_stochastic(self):
result = hill_climbing_stochastic(self.problem)
self.assertEqual(result.state, GOAL)
def test_hill_climbing_random_restarts(self):
result = hill_climbing_random_restarts(self.problem, restarts_limit=2)
self.assertEqual(result.state, GOAL)
def test_simulated_annealing(self):
# give the problem an actions function that always
# goes up, to test if simulated_annealing takes the
# correct states
def dummy_actions(state):
if len(state) < len(GOAL):
return {'i': 'a',
'a': 'b',
'b': 'c',
'c': 'a'}[state[-1]]
else:
return []
self.problem.actions = dummy_actions
result = simulated_annealing(self.problem)
self.assertEqual(result.state, GOAL)
class TestGeneticSearch(unittest.TestCase):
def setUp(self):
self.problem = DummyGeneticProblem()
def test_solution_is_node(self):
node = genetic(self.problem, iterations_limit=1, mutation_chance=0, population_size=1)
self.assertIsInstance(node, SearchNode)
def test_calls_crossover(self):
node = genetic(self.problem, iterations_limit=1, mutation_chance=0, population_size=5)
self.assertEqual(node.state, 5)
def test_calls_mutation(self):
node = genetic(self.problem, iterations_limit=1, mutation_chance=1, population_size=5)
self.assertEqual(node.state, 20)
def test_count_generations(self):
node = genetic(self.problem, iterations_limit=10, mutation_chance=0, population_size=5)
self.assertEqual(node.state, 14) # initial is 4, plus 10 generations
def test_zero_fitness_get_waxed(self):
count = [-1]
def g():
count[0] = count[0] + 1 # Nasty trick uh? try without the list
return [0, 0, 1, 0, 0][count[0]]
def fitness(state):
return state
self.problem.generate_random_state = g
self.problem.value = fitness
node = genetic(self.problem, iterations_limit=1, mutation_chance=0, population_size=5)
self.assertEqual(node.state, 2)
| mit | 885b40ea97dcbfda320bb306f0d6ec54 | 35.318182 | 95 | 0.613267 | 3.665138 | false | true | false | false |
simpleai-team/simpleai | tests/machine_learning/test_classifiers.py | 1 | 9496 | #!/usr/bin/env python
# coding: utf-8
"""
Tests for dtree.
"""
import os
import math
import tempfile
import unittest
from collections import defaultdict
import numpy as np
from simpleai.machine_learning import evaluation
from simpleai.machine_learning.models import VectorDataClassificationProblem
from simpleai.machine_learning.classifiers import DecisionTreeLearner, \
DecisionTreeLearner_Queued, DecisionTreeLearner_LargeData, NaiveBayes, \
KNearestNeighbors
def euclidean_vector_distance(x, y):
return math.sqrt(sum([(a - b) ** 2 for a, b in zip(x, y)]))
class BaseTestClassifier(object):
classifier = None
def setup_dataset(self):
raise NotImplementedError()
def setUp(self):
if self.classifier is None:
raise NotImplementedError("Choose a classifier")
self.setup_dataset()
N = int(len(self.corpus) / 10)
self.test_set = []
i = 1
while len(self.test_set) != N:
i = (i * 1223) % len(self.corpus) + 1 # "random" number generator
self.test_set.append(self.corpus.pop(i - 1))
self.this = self.classifier(self.corpus, self.problem)
self.attributes = self.problem.attributes
self.target = self.problem.target
def test_better_than_majority(self):
d = defaultdict(int)
for example in self.corpus:
d[self.target(example)] += 1
majority = max(d, key=d.get)
class MockClassifier(object):
target = self.target
def classify(self, example):
return majority, 1.0
mock = MockClassifier()
mock_prec = evaluation.precision(mock, self.test_set)
this_prec = evaluation.precision(self.this, self.test_set)
try:
self.assertGreaterEqual(this_prec, mock_prec)
except:
print(self.corpus)
def test_tolerates_empty_attributes(self):
self.problem.attributes = []
self.this = self.classifier(self.corpus, self.problem)
evaluation.precision(self.this, self.test_set)
def test_handles_empty_dataset(self):
self.assertRaises(ValueError, self.classifier,
[], self.problem)
def test_target_in_attributes(self):
"""
If target in attributes precision is 1.0.
"""
self.problem.attributes = [self.target]
self.this = self.classifier(self.corpus, self.problem)
prec = evaluation.precision(self.this, self.test_set)
self.assertEqual(prec, 1.0)
def test_save_classifier(self):
_, tmp_filepath = tempfile.mkstemp()
# Bad values
self.assertRaises(ValueError, self.this.save, None)
self.assertRaises(ValueError, self.this.save, "")
self.assertRaises(ValueError, self.this.save, 42)
# Save the values before saving the tree
classification_values = {}
for test in self.test_set:
classification_values[tuple(test)] = self.this.classify(test)
self.this.save(tmp_filepath)
self.assertTrue(os.path.exists(tmp_filepath))
self.assertNotEqual(os.stat(tmp_filepath).st_size, 0) # File not empty
# The classification must remain equal after saving the dtree
for test in self.test_set:
self.assertEqual(classification_values[tuple(test)],
self.this.classify(test))
def test_load(self):
_, tmp_filepath = tempfile.mkstemp()
self.this.save(tmp_filepath)
# Save the values before saving the tree
classification_values = {}
for test in self.test_set:
classification_values[tuple(test)] = self.this.classify(test)
classifier = self.classifier.load(tmp_filepath)
self.assertIsInstance(classifier, self.classifier)
# The classification must remain equal after loading the dtree
for test in self.test_set:
self.assertEqual(classification_values[tuple(test)],
classifier.classify(test))
def test_leave_one_out(self):
fold = evaluation.kfold(self.corpus, self.problem,
self.classifier, len(self.corpus))
self.assertNotEqual(fold, 0)
class BaseTestDtree_Pseudo(BaseTestClassifier):
classifier = DecisionTreeLearner
def test_no_target_split(self):
nodes = [self.this.root]
while nodes:
node = nodes.pop()
self.assertNotEqual(self.target, node.attribute)
nodes.extend(list(node.branches.values()))
class BaseTestDtree_LargeData(BaseTestDtree_Pseudo):
classifier = DecisionTreeLearner_LargeData
def test_equal_classification(self):
"""
This checks that the three tree learning methods are equal.
"""
pseudo = DecisionTreeLearner(self.corpus, self.problem)
for test in self.test_set:
self.assertEqual(pseudo.classify(test), self.this.classify(test))
def test_every_node_can_classify(self):
nodes = [self.this.root]
while nodes:
node = nodes.pop()
self.assertNotEqual(node.result, None)
nodes.extend(list(node.branches.values()))
class BaseTestDtree_Queued(BaseTestDtree_LargeData):
classifier = DecisionTreeLearner_Queued
class BaseTestNaiveBayes(BaseTestClassifier):
classifier = NaiveBayes
class BaseTestKNearestNeighbors(BaseTestClassifier):
classifier = KNearestNeighbors
class CorpusIris(object):
IRIS_PATH = os.path.join(os.path.dirname(__file__), "iris.txt")
def setup_dataset(self):
"""
Creates a corpus with the iris dataset. Returns the dataset,
the attributes getter and the target getter.
"""
dataset = []
with open(self.IRIS_PATH) as filehandler:
file_data = filehandler.read()
for line in file_data.split("\n"):
line_data = [np.rint(float(x)) for x in line.split()]
if line_data:
dataset.append(line_data)
problem = VectorDataClassificationProblem(dataset, target_index=4)
problem.distance = euclidean_vector_distance
self.corpus = dataset
self.problem = problem
class CorpusXor(object):
def setup_dataset(self):
"""
Creates a corpus with n k-bit examples of the parity problem:
k random bits followed by a 1 if an odd number of bits are 1, else 0
"""
k = 2
n = 100
dataset = []
for i in range(n):
# Pseudo random generation of bits
bits = [(((i + j) * 1223) % (n + 1)) % 2 for j in range(k)]
bits.append(sum(bits) % 2)
dataset.append(bits)
problem = VectorDataClassificationProblem(dataset, target_index=k)
self.corpus = dataset
self.problem = problem
class CorpusPrimes(object):
def setup_dataset(self):
"""
Creates a corpus of primes. Returns the dataset,
the attributes getter and the target getter.
"""
size = 105 # Magic number, chosen to avoid an "error" that cannot be
# patched in Dtree Pseudo (with modifing the pseudocode).
dataset = []
for i in range(size):
dataset.append([
i % 2 == 0,
i % 3 == 0,
i % 5 == 0,
i % 7 == 0,
self.isprime(i)
])
problem = VectorDataClassificationProblem(dataset, target_index=-1)
problem.distance = euclidean_vector_distance
self.corpus = dataset
self.problem = problem
def isprime(self, number):
"""
Returns if a number is prime testing if
is divisible by any number from 0 to sqrt(number)
"""
if number < 2:
return False
if number == 2:
return True
if not number & 1:
return False
for i in range(3, int(number ** 0.5) + 1, 2):
if number % i == 0:
return False
return True
def create_tstcase(classifier, corpus):
name = "{}_{}".format(classifier.__name__, corpus.__name__)
bases = (corpus, classifier, unittest.TestCase)
newclass = type(name, bases, {})
globals()[name] = newclass
TestDtree_Pseudo_CorpusIris = create_tstcase(BaseTestDtree_Pseudo, CorpusIris)
TestDtree_Pseudo_CorpusXor = create_tstcase(BaseTestDtree_Pseudo, CorpusXor)
TestDtree_Pseudo_CorpusPrimes = create_tstcase(BaseTestDtree_Pseudo, CorpusPrimes)
TestDtree_Queued_CorpusIris = create_tstcase(BaseTestDtree_Queued, CorpusIris)
TestDtree_Queued_CorpusXor = create_tstcase(BaseTestDtree_Queued, CorpusXor)
TestDtree_Queued_CorpusPrimes = create_tstcase(BaseTestDtree_Queued, CorpusPrimes)
TestDtree_LargeData_CorpusIris = create_tstcase(BaseTestDtree_LargeData, CorpusIris)
TestDtree_LargeData_CorpusXor = create_tstcase(BaseTestDtree_LargeData, CorpusXor)
TestDtree_LargeData_CorpusPrimes = create_tstcase(BaseTestDtree_LargeData, CorpusPrimes)
TestNaiveBayes_CorpusIris = create_tstcase(BaseTestNaiveBayes, CorpusIris)
TestNaiveBayes_CorpusXor = create_tstcase(BaseTestNaiveBayes, CorpusXor)
TestNaiveBayes_CorpusPrimes = create_tstcase(BaseTestNaiveBayes, CorpusPrimes)
TestKNearestNeighbors_CorpusPrimes = create_tstcase(BaseTestKNearestNeighbors, CorpusPrimes)
TestKNearestNeighbors_CorpusIris = create_tstcase(BaseTestKNearestNeighbors, CorpusIris)
| mit | 984b97627d75f543bbeb5c6918f23bf0 | 32.087108 | 92 | 0.638795 | 3.883845 | false | true | false | false |
bernardopires/django-tenant-schemas | examples/tenant_tutorial/customers/views.py | 9 | 1718 | from django.contrib.auth.models import User
from django.db.utils import DatabaseError
from django.views.generic import FormView
from customers.forms import GenerateUsersForm
from customers.models import Client
from random import choice
class TenantView(FormView):
form_class = GenerateUsersForm
template_name = "index_tenant.html"
success_url = "/"
def get_context_data(self, **kwargs):
context = super(TenantView, self).get_context_data(**kwargs)
context['tenants_list'] = Client.objects.all()
context['users'] = User.objects.all()
return context
def form_valid(self, form):
User.objects.all().delete() # clean current users
# generate five random users
USERS_TO_GENERATE = 5
first_names = ["Aiden", "Jackson", "Ethan", "Liam", "Mason", "Noah",
"Lucas", "Jacob", "Jayden", "Jack", "Sophia", "Emma",
"Olivia", "Isabella", "Ava", "Lily", "Zoe", "Chloe",
"Mia", "Madison"]
last_names = ["Smith", "Brown", "Lee ", "Wilson", "Martin", "Patel",
"Taylor", "Wong", "Campbell", "Williams"]
while User.objects.count() != USERS_TO_GENERATE:
first_name = choice(first_names)
last_name = choice(last_names)
try:
user = User(username=(first_name + last_name).lower(),
email="%s@%s.com" % (first_name, last_name),
first_name=first_name,
last_name=last_name)
user.save()
except DatabaseError:
pass
return super(TenantView, self).form_valid(form)
| mit | fa3498cd7662ab76e78bb35bd9a42182 | 38.045455 | 76 | 0.558789 | 3.726681 | false | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.