repo_name stringlengths 7 65 | path stringlengths 5 185 | copies stringlengths 1 4 | size stringlengths 4 6 | content stringlengths 977 990k | license stringclasses 14 values | hash stringlengths 32 32 | line_mean float64 7.18 99.4 | line_max int64 31 999 | alpha_frac float64 0.25 0.95 | ratio float64 1.5 7.84 | autogenerated bool 1 class | config_or_test bool 2 classes | has_no_keywords bool 2 classes | has_few_assignments bool 1 class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
eliben/code-for-blog | 2009/csp_for_euler68/csp_sample_problems.py | 13 | 12407 | """ A collection of "worlds" suitable by solution by a CSP.
Each world has a make_XXX_SCP function that creates a new
CSP object, and some auxiliary utilities.
"""
import re, math
from collections import defaultdict
from types import StringTypes
from csplib import CSP
#----------------------------------------------------------------
#
# Map coloring
#
def unequal_vals_constraint(A, a, B, b):
""" A simple constraint: two neighbors must always have
different values.
"""
return a != b
def make_map_coloring_CSP(colors, neighbors):
if isinstance(neighbors, StringTypes):
neighbors = parse_neighbors_graph(neighbors)
return CSP(
vars=neighbors.keys(),
domains=defaultdict(lambda: colors),
neighbors=neighbors,
binary_constraint=unequal_vals_constraint)
def parse_neighbors_graph(neighbors, vars=[]):
""" A utility for converting a string of the form
'X: Y Z; Y: Z' into a dict mapping variables to their
neighbors. The syntax is a vertex name followed by a
':' followed by zero or more vertex names, followed by
';', repeated for each vertes. Neighborhood is
commutative.
'vars' may contain vertices that have no neighbors.
"""
graph = defaultdict(list)
for var in vars:
graph[var] = []
specs = [spec.split(':') for spec in neighbors.split(';')]
for (v, v_neighbors) in specs:
v = v.strip()
graph.setdefault(v, [])
for u in v_neighbors.split():
graph[v].append(u)
graph[u].append(v)
return graph
def make_australia_CSP():
#
# WA---NT---Q
# \ | / \
# \ | / \
# \ |/ \
# SA------NSW
# \ /
# \ /
# \ /
# V
#
#
# T
#
return make_map_coloring_CSP(
list('RGB'),
'SA: WA NT Q NSW V; NT: WA Q; NSW: Q V; T: ')
def make_USA_CSP():
return make_map_coloring_CSP(list('RGBY'),
"""WA: OR ID; OR: ID NV CA; CA: NV AZ; NV: ID UT AZ; ID: MT WY UT;
UT: WY CO AZ; MT: ND SD WY; WY: SD NE CO; CO: NE KA OK NM; NM: OK TX;
ND: MN SD; SD: MN IA NE; NE: IA MO KA; KA: MO OK; OK: MO AR TX;
TX: AR LA; MN: WI IA; IA: WI IL MO; MO: IL KY TN AR; AR: MS TN LA;
LA: MS; WI: MI IL; IL: IN; IN: KY; MS: TN AL; AL: TN GA FL; MI: OH;
OH: PA WV KY; KY: WV VA TN; TN: VA NC GA; GA: NC SC FL;
PA: NY NJ DE MD WV; WV: MD VA; VA: MD DC NC; NC: SC; NY: VT MA CT NJ;
NJ: DE; DE: MD; MD: DC; VT: NH MA; MA: NH RI CT; CT: RI; ME: NH;
HI: ; AK: """)
#----------------------------------------------------------------
#
# N-Queens
#
# (var, val) is (column, row)
# The domain is 0..N-1
#
def queens_constraint(A, a, B, b):
""" Constraint is satisfied if it's the same column (queens
are assigned by columns), or if the queens are not in the
same row or diagonal
"""
if A == B:
return True
return a != b and A + a != B + b and A - a != B - b
class NQueensCSP(CSP):
def to_str(self, assignment):
s = ''
for row in self.domains:
for col in self.vars:
if assignment[col] == row:
s += '*'
else:
s += 'o'
s += '\n'
return s
def make_NQueens_CSP(n):
""" Creates a N-Queens CSP problem for a given N.
Note that this isn't a particularly efficient
representation.
"""
# columns
vars = list(range(n))
# rows
domains = list(range(n))
neighbors = {}
for v in vars:
neighbors[v] = vars[:]
neighbors[v].remove(v)
return NQueensCSP(
vars=vars,
domains=defaultdict(lambda: domains),
neighbors=neighbors,
binary_constraint=queens_constraint)
#----------------------------------------------------------------
#
# Sudoku
#
# Vars are (row, col) pairs.
# Values are 1..9
#
class SudokuCSP(CSP):
def to_str(self, assignment):
s = ''
for row in range(9):
if row % 3 == 0:
s += '+-------+-------+-------+\n'
s += '| '
for col in range(9):
if (row, col) in assignment:
s += str(assignment[(row, col)])
else:
s += '_'
if col % 3 == 2:
s += ' | '
else:
s += ' '
s += '\n'
s += '+-------+-------+-------+\n'
return s
def cross(A, B):
return [(a, b) for a in A for b in B]
def parse_sudoku_assignment(grid):
""" Given a string of 81 digits, return the assignment it
represents. 0 means unassigned.
Whitespace is ignored.
"""
digits = re.sub('\s', '', grid)
assert len(digits) == 81
digit = iter(digits)
asg = {}
for row in range(9):
for col in range(9):
d = int(digit.next())
if d > 0:
asg[(row, col)] = d
return asg
def make_sudoku_CSP():
""" A regular 9x9 Sudoku puzzle. Note that it's an 'empty'
Sudoku board. Solving partially filled boards is done
by passing an initial assignment (obtained with
parse_sudoku_assignment) to the solve_search method of
the CSP.
"""
# All (row, col) cells
rows = range(9)
cols = range(9)
vars = cross(rows, cols)
# Available values
domains = defaultdict(lambda: range(1, 10))
triples = [[0, 1, 2], [3, 4, 5], [6, 7, 8]]
unitlist = ([cross(rows, [c]) for c in cols] +
[cross([r], cols) for r in rows] +
[cross(rs, cs) for rs in triples for cs in triples])
# Neighbors holds sets, but that's fine for CSP - it just
# wants 'em to be iterable
#
neighbors = defaultdict(lambda: set([]))
for unit in unitlist:
for cell in unit:
neighbors[cell].update(unit)
neighbors[cell].remove(cell)
return SudokuCSP(
vars=vars,
domains=domains,
neighbors=neighbors,
binary_constraint=unequal_vals_constraint)
#----------------------------------------------------------------
#
# Magic squares
#
class MagicSquareCSP(CSP):
def to_str(self, assignment):
s = ''
ns = range(int(math.sqrt(len(self.vars))))
for row in ns:
for col in ns:
if (row, col) in assignment:
s += str(assignment[(row, col)])
else:
s += '_'
s += ' '
s += '\n'
return s
def make_magic_square_CSP(n):
""" A NxN additive magic square
A sample solution for 3x3:
2 7 6
9 5 1
4 3 8
(row, column and diagonal sum = 15)
"""
rows = range(n)
cols = range(n)
vars = cross(rows, cols)
domains = defaultdict(lambda: range(1, n*n + 1))
magic_sum = n * (n*n + 1) / 2
# All cells are different --> neighbors of one another.
#
neighbors = {}
for v in vars:
neighbors[v] = vars[:]
neighbors[v].remove(v)
def check_sum(values):
s = sum(values)
if s > magic_sum:
return False
return not (len(values) == n and s != magic_sum)
def sum_constraint(new_asgn, cur_asgn):
square = {}
square.update(new_asgn)
square.update(cur_asgn)
# Only new assignments can cause conflicts...
#
for (vrow, vcol) in new_asgn.iterkeys():
#~ if check_sum([square.get((vrow, col), 0) for col in cols]) == False:
if check_sum([square[(vrow, col)] for col in cols if (vrow, col) in square]) == False:
return False
#~ if check_sum([square.get((row, vcol), 0) for row in rows]) == False:
if check_sum([square[(row, vcol)] for row in rows if (row, vcol) in square]) == False:
return False
# \ diagonal
if ( vrow == vcol and
check_sum([square[(row, row)] for row in rows if (row, row) in square]) == False):
return False
# / diagonal
if ( vrow == n - 1 - vcol and
check_sum([square[(n - 1 - row, row)]
for row in rows
if (n - 1 - row, row) in square]) == False):
return False
return True
return MagicSquareCSP(
vars=vars,
domains=domains,
neighbors=neighbors,
binary_constraint=unequal_vals_constraint,
global_constraint=sum_constraint)
#----------------------------------------------------------------
#
# Magic gons (Project Euler problem 68)
#
class Magic3gonCSP(CSP):
def to_str(self, assignment):
asgn = defaultdict(lambda: '*')
asgn.update(assignment)
s = ''
s += ' %s\n' % asgn[2]
s += '\n'
s += ' %s\n' % asgn[4]
s += '\n'
s += ' %s %s %s\n' % (asgn[3], asgn[5], asgn[6])
s += '\n'
s += '%s\n' % asgn[1]
return s
def make_magic_3gon_CSP():
vars = range(1, 7)
domains = defaultdict(lambda: vars)
# All cells are different --> neighbors of one another.
#
neighbors = {}
for v in vars:
neighbors[v] = vars[:]
neighbors[v].remove(v)
groups = [[1, 3, 4], [2, 4, 5], [6, 5, 3]]
def sum_constraint(new_asgn, cur_asgn):
asgn = defaultdict(lambda: 999)
asgn.update(new_asgn)
asgn.update(cur_asgn)
last_total = None
for group in groups:
total = sum(asgn[i] for i in group)
if total < 1000:
if last_total is None:
last_total = total
elif last_total != total:
return False
return True
return Magic3gonCSP(
vars=vars,
domains=domains,
neighbors=neighbors,
binary_constraint=unequal_vals_constraint,
global_constraint=sum_constraint)
class Magic5gonCSP(CSP):
def to_str(self, assignment):
asgn = defaultdict(lambda: '*')
asgn.update(assignment)
s = ''
s += ' %s %s\n' % (asgn[2], asgn[9])
s += ' %s\n' % asgn[5]
s += ' %s %s\n' % (asgn[3], asgn[8])
s += '%s\n' % (asgn[1])
s += ' %s %s %s\n' % (asgn[4], asgn[7], asgn[10])
s += '\n'
s += ' %s\n' % asgn[6]
return s
def make_magic_5gon_CSP():
vars = range(1, 11)
domains = defaultdict(lambda: vars)
# All cells are different --> neighbors of one another.
#
neighbors = {}
for v in vars:
neighbors[v] = vars[:]
neighbors[v].remove(v)
groups = [[1, 3, 5], [2, 5, 8], [9, 8, 7], [10, 7, 4], [6, 4, 3]]
def sum_constraint(new_asgn, cur_asgn):
asgn = defaultdict(lambda: 999)
asgn.update(new_asgn)
asgn.update(cur_asgn)
last_total = None
for group in groups:
total = sum(asgn[i] for i in group)
if total < 1000:
if last_total is None:
last_total = total
elif last_total != total:
return False
return True
return Magic5gonCSP(
vars=vars,
domains=domains,
neighbors=neighbors,
binary_constraint=unequal_vals_constraint,
global_constraint=sum_constraint)
| unlicense | fdf932c29891fed5b4b327c3abfaecfb | 25.683742 | 102 | 0.457725 | 3.681602 | false | false | false | false |
eliben/code-for-blog | 2012/plugins_python/htmlize/iplugin.py | 1 | 2327 | #-------------------------------------------------------------------------------
# htmlize: htmlize/iplugin.py
#
# The plugin interface. Plugins that want to register with htmlize must inherit
# IPlugin.
#
# Eli Bendersky (eliben@gmail.com)
# This code is in the public domain
#-------------------------------------------------------------------------------
import imp
import os
class IPluginRegistry(type):
plugins = []
def __init__(cls, name, bases, attrs):
if name != 'IPlugin':
IPluginRegistry.plugins.append(cls)
class IPlugin(object, metaclass=IPluginRegistry):
def __init__(self, post=None, db=None):
""" Initialize the plugin. Optinally provide the db.Post that is
being processed and the db.DB it belongs to.
"""
self.post = post
self.db = db
""" Plugin classes inherit from IPlugin. The methods below can be
implemented to provide services.
"""
def get_role_hook(self, role_name):
""" Return a function accepting role contents.
The function will be called with a single argument - the role
contents, and should return what the role gets replaced with.
None if the plugin doesn't provide a hook for this role.
"""
return None
def get_contents_hook(self):
""" Return a function accepting full document contents.
The functin will be called with a single argument - the document
contents (after paragraph splitting and role processing), and
should return the transformed contents.
None if the plugin doesn't provide a hook for this role.
"""
return None
def discover_plugins(dirs):
""" Discover the plugin classes contained in Python files, given a
list of directory names to scan. Return a list of plugin classes.
"""
for dir in dirs:
for filename in os.listdir(dir):
modname, ext = os.path.splitext(filename)
if ext == '.py':
file, path, descr = imp.find_module(modname, [dir])
if file:
# Loading the module registers the plugin in
# IPluginRegistry
mod = imp.load_module(modname, file, path, descr)
return IPluginRegistry.plugins
| unlicense | ce6e2ade599d0a51e65c55f68f4f9e4f | 35.936508 | 80 | 0.578427 | 4.654 | false | false | false | false |
cfpb/owning-a-home-api | ratechecker/tests/test_views.py | 1 | 14027 | import json
from datetime import datetime, timedelta
from django.test import override_settings
from django.utils import timezone
from model_mommy import mommy
from rest_framework import status
from rest_framework.test import APITestCase
from ratechecker.models import Adjustment, Product, Rate, Region
from ratechecker.views import set_lock_max_min
try:
from django.urls import reverse
except ImportError:
from django.core.urlresolvers import reverse
class RateCheckerTestCase(APITestCase):
def setUp(self):
self.url = "/oah-api/rates/rate-checker"
REGIONS = [[1, "DC"], [2, "VA"]]
PRODUCTS = [
# plan_id, institution, loan_purpose, pmt_type, loan_type, loan_term, int_adj_term, _, io, _, _, _, _, _, _, # noqa
# min_ltv, max_ltv, minfico, maxfico, min_loan_amt, max_loan_amt, single_family, condo, coop # noqa
[
11,
"Institution 1",
"PURCH",
"FIXED",
"CONF",
30,
None,
None,
0,
None,
None,
None,
None,
None,
None,
1,
95,
680,
700,
90000,
750000,
1,
0,
0,
],
[
22,
"Institution 2",
"PURCH",
"FIXED",
"CONF",
30,
None,
None,
0,
None,
None,
None,
None,
None,
None,
1,
87,
680,
740,
90000,
550000,
1,
0,
0,
],
[
33,
"Institution 3",
"PURCH",
"ARM",
"CONF",
15,
5,
None,
0,
None,
None,
None,
None,
None,
None,
1,
95,
680,
740,
90000,
550000,
1,
0,
0,
],
[
44,
"Institution 4",
"PURCH",
"FIXED",
"CONF",
30,
None,
None,
0,
None,
None,
None,
None,
None,
None,
1,
87,
680,
740,
90000,
550000,
1,
0,
0,
],
[
55,
"Institution 5",
"PURCH",
"ARM",
"CONF",
30,
5,
None,
0,
None,
None,
None,
None,
None,
None,
1,
87,
680,
740,
90000,
550000,
1,
0,
0,
],
[
66,
"Institution 6",
"PURCH",
"FIXED",
"CONF",
30,
None,
None,
0,
None,
None,
None,
None,
None,
None,
1,
87,
680,
740,
90000,
550000,
1,
0,
0,
],
[
77,
"Institution 7",
"PURCH",
"FIXED",
"FHA-HB",
15,
None,
None,
0,
None,
None,
None,
None,
None,
None,
1,
87,
680,
740,
90000,
550000,
1,
0,
0,
],
[
88,
"Institution 8",
"PURCH",
"FIXED",
"FHA",
30,
None,
None,
0,
None,
None,
None,
None,
None,
None,
1,
87,
680,
740,
90000,
550000,
1,
0,
0,
],
]
RATES = [
# rate_id, product_id, region_id, lock, base_rate, total_points
[111, 11, 1, 50, "3.150", "0.5"],
[112, 11, 2, 60, "4.350", "-0.5"],
[113, 11, 1, 60, "2.125", "0.125"],
[221, 22, 1, 60, "3.555", "0.125"],
[331, 33, 1, 60, "3.250", "0.125"],
[332, 33, 2, 60, "4.650", "-0.5"],
[441, 44, 1, 50, "3.125", "1.25"],
[551, 55, 1, 50, "0.125", "0.125"],
[661, 66, 1, 60, "3.705", "0.5"],
[771, 77, 2, 60, "1.705", "0.25"],
[772, 77, 2, 60, "2.705", "1.25"],
[881, 88, 1, 60, "3.000", "0.5"],
[882, 88, 1, 60, "2.005", "0.25"],
[883, 88, 1, 60, "1.005", "-0.25"],
]
ADJUSTMENTS = [
# rule_id, product_id, affect_rate_type, adj_value, min_loan_amt,
# max_loan_amt, prop_type, minfico, maxfico, minltv, maxltv, state
[
1,
11,
"P",
"-0.35",
100000,
500000,
"CONDO",
660,
780,
30,
95,
"DC",
],
[
2,
11,
"P",
"0.25",
100000,
500000,
"CONDO",
660,
780,
30,
95,
"DC",
],
[
3,
11,
"R",
"0.15",
100000,
500000,
"CONDO",
660,
780,
30,
95,
"DC",
],
[
4,
22,
"R",
"0.25",
100000,
500000,
"CONDO",
660,
780,
30,
95,
"VA",
],
[
5,
22,
"R",
"0.15",
100000,
500000,
"CONDO",
660,
780,
30,
95,
"DC",
],
[
6,
33,
"R",
"0.25",
100000,
500000,
"CONDO",
660,
780,
30,
95,
"DC",
],
[
7,
77,
"P",
"0.125",
100000,
500000,
"CONDO",
660,
780,
30,
95,
"VA",
],
]
NOW = timezone.now()
for region in REGIONS:
reg = Region(
region_id=region[0], state_id=region[1], data_timestamp=NOW
)
reg.save()
for p in PRODUCTS:
product = Product(
plan_id=p[0],
institution=p[1],
loan_purpose=p[2],
pmt_type=p[3],
loan_type=p[4],
loan_term=p[5],
int_adj_term=p[6],
adj_period=p[7],
io=p[8],
arm_index=p[9],
int_adj_cap=p[10],
annual_cap=p[11],
loan_cap=p[12],
arm_margin=p[13],
ai_value=p[14],
min_ltv=p[15],
max_ltv=p[16],
min_fico=p[17],
max_fico=p[18],
min_loan_amt=p[19],
max_loan_amt=p[20],
single_family=p[21],
condo=p[22],
coop=p[23],
data_timestamp=NOW,
)
product.save()
for r in RATES:
rate = Rate(
rate_id=r[0],
product_id=r[1],
region_id=r[2],
lock=r[3],
base_rate=r[4],
total_points=r[5],
data_timestamp=NOW,
)
rate.save()
for a in ADJUSTMENTS:
adjustment = Adjustment(
rule_id=a[0],
product_id=a[1],
affect_rate_type=a[2],
adj_value=a[3],
min_loan_amt=a[4],
max_loan_amt=a[5],
prop_type=a[6],
min_fico=a[7],
max_fico=a[8],
min_ltv=a[9],
max_ltv=a[10],
state=a[11],
data_timestamp=NOW,
)
adjustment.save()
def test_set_lock_max_min(self):
"""Make sure max and min are set"""
locks = {
60: {"lock": "60", "minval": 46},
45: {"lock": "45", "minval": 31},
30: {"lock": "30", "minval": 0},
}
for key in locks.keys():
mock_data = set_lock_max_min(locks[key])
self.assertEqual(mock_data["max_lock"], key)
self.assertEqual(mock_data["min_lock"], locks[key]["minval"])
def test_rate_checker__no_args(self):
"""... when no parameters provided"""
response = self.client.get(self.url, {})
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_rate_checker__valid(self):
"""... when valid parameters are provided"""
params = {
"state": "DC",
"loan_purpose": "PURCH",
"rate_structure": "FIXED",
"loan_type": "CONF",
"max_ltv": 50,
"min_ltv": 50,
"loan_term": 30,
"loan_amount": 160000,
"price": 320000,
"maxfico": 700,
"minfico": 700,
"max_lock": 60,
"min_lock": 45,
"property_type": "CONDO",
"arm_type": "5-1",
"io": 0,
}
response = self.client.get(self.url, params)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data.get("data")), 2)
self.assertEqual(response.data.get("data").get("2.275"), 1)
self.assertEqual(response.data.get("data").get("3.705"), 2)
# self.assertTrue(result)
# self.assertEqual(len(result), 2)
# self.assertEqual(len(result['data']), 2)
# self.assertEqual(result['data']['2.275'], 1)
# self.assertEqual(result['data']['3.705'], 2)
# self.assertFalse(response_fixed.data.get('data') is None)
# self.assertEqual(response_fixed.data.get('data').get('monthly'), 1.5)
# self.assertTrue(response_fixed.data.get('data').get('upfront') is None) # noqa
@override_settings(URLCONF="ratechecker.urls")
class RateCheckerStatusTest(APITestCase):
def get(self):
return self.client.get(
reverse("rate-checker-status"),
headers={"Accepts": "application/json"},
)
def test_no_data_returns_200(self):
response = self.get()
self.assertEqual(response.status_code, 200)
def test_no_data_returns_json(self):
response = self.get()
self.assertEqual(response["Content-type"], "application/json")
def test_no_data_returns_none(self):
response = self.get()
self.assertEqual(json.loads(response.content), {"load": None})
def test_data_returns_200(self):
mommy.make(Region)
response = self.get()
self.assertEqual(response.status_code, 200)
def test_data_returns_json(self):
mommy.make(Region)
response = self.get()
self.assertEqual(response["Content-type"], "application/json")
def test_data_returns_timestamp(self):
region = mommy.make(Region)
response = self.get()
ts = datetime.strptime(
json.loads(response.content)["load"], "%Y-%m-%dT%H:%M:%S.%fZ"
)
ts = timezone.make_aware(ts, timezone=timezone.utc)
# These might not match exactly due to ISO8601 JSON formatting.
self.assertTrue(abs(ts - region.data_timestamp) < timedelta(seconds=1))
def test_data_format_iso8601(self):
timestamp = datetime(2017, 1, 2, 3, 4, 56, tzinfo=timezone.utc)
mommy.make(Region, data_timestamp=timestamp)
response = self.get()
self.assertContains(response, "2017-01-02T03:04:56Z")
| cc0-1.0 | 6de36e24864bf0c61ebf93e17eeb3121 | 25.871648 | 128 | 0.337563 | 4.296172 | false | false | false | false |
mozilla-services/tokenserver | tokenserver/tests/test_remote_browserid_verifier.py | 1 | 7579 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import json
import contextlib
import unittest
from pyramid.config import Configurator
from tokenserver.verifiers import RemoteBrowserIdVerifier, IBrowserIdVerifier
from browserid.tests.support import make_assertion
import browserid.errors
class mockobj(object):
pass
class TestRemoteBrowserIdVerifier(unittest.TestCase):
DEFAULT_SETTINGS = { # noqa; identation below is non-standard
"tokenserver.backend":
"tokenserver.assignment.memorynode.MemoryNodeAssignmentBackend",
"browserid.backend":
"tokenserver.verifiers.RemoteBrowserIdVerifier",
"tokenserver.secrets.backend":
"mozsvc.secrets.FixedSecrets",
"tokenserver.secrets.secrets":
"steve-let-the-dogs-out",
"browserid.backend":
"tokenserver.verifiers.RemoteBrowserIdVerifier",
}
def _make_config(self, settings={}):
all_settings = self.DEFAULT_SETTINGS.copy()
all_settings.update(settings)
config = Configurator(settings=all_settings)
config.include("tokenserver")
config.commit()
return config
@contextlib.contextmanager
def _mock_verifier(self, verifier, exc=None, **response_attrs):
def replacement_post_method(*args, **kwds):
if exc is not None:
raise exc
response = mockobj()
response.status_code = 200
response.text = ""
response.__dict__.update(response_attrs)
return response
orig_post_method = verifier.session.post
verifier.session.post = replacement_post_method
try:
yield None
finally:
verifier.session.post = orig_post_method
def test_verifier_config_loading_defaults(self):
config = self._make_config()
verifier = config.registry.getUtility(IBrowserIdVerifier)
self.assertTrue(isinstance(verifier, RemoteBrowserIdVerifier))
self.assertEquals(verifier.verifier_url,
"https://verifier.accounts.firefox.com/v2")
self.assertEquals(verifier.audiences, None)
self.assertEquals(verifier.trusted_issuers, None)
self.assertEquals(verifier.allowed_issuers, None)
def test_verifier_config_loading_values(self):
config = self._make_config({ # noqa; indentation below is non-standard
"browserid.verifier_url":
"https://trustyverifier.notascam.com/endpoint/path",
"browserid.audiences":
"https://testmytoken.com",
"browserid.trusted_issuers":
"example.com trustyidp.org",
"browserid.allowed_issuers":
"example.com trustyidp.org\nmockmyid.com",
})
verifier = config.registry.getUtility(IBrowserIdVerifier)
self.assertTrue(isinstance(verifier, RemoteBrowserIdVerifier))
self.assertEquals(verifier.verifier_url,
"https://trustyverifier.notascam.com/endpoint/path")
self.assertEquals(verifier.audiences, "https://testmytoken.com")
self.assertEquals(verifier.trusted_issuers,
["example.com", "trustyidp.org"])
self.assertEquals(verifier.allowed_issuers,
["example.com", "trustyidp.org", "mockmyid.com"])
def test_verifier_failure_cases(self):
config = self._make_config({ # noqa; indentation below is non-standard
"browserid.audiences":
"https://testmytoken.com",
})
verifier = config.registry.getUtility(IBrowserIdVerifier)
assertion = make_assertion(email="test@example.com",
audience="https://testmytoken.com")
with self._mock_verifier(verifier, status_code=500):
with self.assertRaises(browserid.errors.ConnectionError):
verifier.verify(assertion)
with self._mock_verifier(verifier, text="<h1>Server Error</h1>"):
with self.assertRaises(browserid.errors.ConnectionError):
verifier.verify(assertion)
with self._mock_verifier(verifier, text='{"status": "error"}'):
with self.assertRaises(browserid.errors.InvalidSignatureError):
verifier.verify(assertion)
with self._mock_verifier(verifier, text='{"status": "potato"}'):
with self.assertRaises(browserid.errors.InvalidSignatureError):
verifier.verify(assertion)
def test_verifier_rejects_unallowed_issuers(self):
config = self._make_config({ # noqa; indentation below is non-standard
"browserid.audiences":
"https://testmytoken.com",
"browserid.allowed_issuers":
"accounts.firefox.com mockmyid.com",
})
verifier = config.registry.getUtility(IBrowserIdVerifier)
assertion = make_assertion(email="test@example.com",
audience="https://testmytoken.com")
mock_response = {
"status": "okay",
"principal": {
"email": "test@example.com",
},
"audience": "https://testmytoken.com",
"issuer": "login.persona.org",
}
with self._mock_verifier(verifier, text=json.dumps(mock_response)):
with self.assertRaises(browserid.errors.InvalidIssuerError):
verifier.verify(assertion)
mock_response["issuer"] = "mockmyid.com"
with self._mock_verifier(verifier, text=json.dumps(mock_response)):
self.assertEquals(verifier.verify(assertion)["principal"]["email"],
"test@example.com")
mock_response["issuer"] = "accounts.firefox.com"
with self._mock_verifier(verifier, text=json.dumps(mock_response)):
self.assertEquals(verifier.verify(assertion)["principal"]["email"],
"test@example.com")
mock_response["issuer"] = "mockmyid.org"
with self._mock_verifier(verifier, text=json.dumps(mock_response)):
with self.assertRaises(browserid.errors.InvalidIssuerError):
verifier.verify(assertion)
mock_response["issuer"] = "http://mockmyid.com"
with self._mock_verifier(verifier, text=json.dumps(mock_response)):
with self.assertRaises(browserid.errors.InvalidIssuerError):
verifier.verify(assertion)
mock_response["issuer"] = "mockmyid.co"
with self._mock_verifier(verifier, text=json.dumps(mock_response)):
with self.assertRaises(browserid.errors.InvalidIssuerError):
verifier.verify(assertion)
mock_response["issuer"] = 42
with self._mock_verifier(verifier, text=json.dumps(mock_response)):
with self.assertRaises(browserid.errors.InvalidIssuerError):
verifier.verify(assertion)
mock_response["issuer"] = None
with self._mock_verifier(verifier, text=json.dumps(mock_response)):
with self.assertRaises(browserid.errors.InvalidIssuerError):
verifier.verify(assertion)
del mock_response["issuer"]
with self._mock_verifier(verifier, text=json.dumps(mock_response)):
with self.assertRaises(browserid.errors.InvalidIssuerError):
verifier.verify(assertion)
| mpl-2.0 | a1e616ea06cbc163187eaf27ab392136 | 45.213415 | 79 | 0.628315 | 4.255474 | false | true | false | false |
mozilla-services/tokenserver | tokenserver/scripts/count_users.py | 1 | 3191 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
"""
Script to emit total-user-count metrics for exec dashboard.
This script takes a tokenserver config file, uses it to load the assignment
backend, and then outputs the reported user count.
"""
import os
import sys
import time
import json
import socket
import optparse
from datetime import datetime, timedelta, tzinfo
from tokenserver.assignment import INodeAssignment
import tokenserver.scripts
import logging
logger = logging.getLogger("tokenserver.scripts.count_users")
ZERO = timedelta(0)
class UTC(tzinfo):
def utcoffset(self, dt):
return ZERO
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return ZERO
utc = UTC()
def count_users(config_file, outfile, timestamp=None):
logger.debug("Using config file %r", config_file)
config = tokenserver.scripts.load_configurator(config_file)
config.begin()
try:
if timestamp is None:
ts = time.gmtime()
midnight = (ts[0], ts[1], ts[2], 0, 0, 0, ts[6], ts[7], ts[8])
timestamp = int(time.mktime(midnight)) * 1000
backend = config.registry.getUtility(INodeAssignment)
logger.debug("Counting users created before %i", timestamp)
count = backend.count_users(timestamp)
logger.debug("Found %d users", count)
# Output has heka-filter-compatible JSON object.
ts_sec = timestamp / 1000
output = {
"hostname": socket.gethostname(),
"pid": os.getpid(),
"op": "sync_count_users",
"total_users": count,
"time": datetime.fromtimestamp(ts_sec, utc).isoformat(),
"v": 0
}
json.dump(output, outfile)
outfile.write("\n")
finally:
config.end()
def main(args=None):
"""Main entry-point for running this script.
This function parses command-line arguments and passes them on
to the add_node() function.
"""
usage = "usage: %prog [options] config_file"
descr = "Count total users in the tokenserver database"
parser = optparse.OptionParser(usage=usage, description=descr)
parser.add_option("-t", "--timestamp", type="int",
help="Max creation timestamp; default previous midnight")
parser.add_option("-o", "--output",
help="Output file; default stderr")
parser.add_option("-v", "--verbose", action="count", dest="verbosity",
help="Control verbosity of log messages")
opts, args = parser.parse_args(args)
if len(args) != 1:
parser.print_usage()
return 1
tokenserver.scripts.configure_script_logging(opts)
config_file = os.path.abspath(args[0])
if opts.output in (None, "-"):
count_users(config_file, sys.stdout, opts.timestamp)
else:
with open(opts.output, "a") as outfile:
count_users(config_file, outfile, opts.timestamp)
return 0
if __name__ == "__main__":
tokenserver.scripts.run_script(main)
| mpl-2.0 | 9d711a04ba8ad0911e62cee24f80b858 | 28.546296 | 79 | 0.633657 | 3.915337 | false | true | false | false |
mozilla-services/tokenserver | tokenserver/assignment/sqlnode/migrations/versions/6569dd9a060_populate_nodeid_column_and_index.py | 1 | 1476 | # flake8: noqa
"""populate nodeid column and index
Revision ID: 6569dd9a060
Revises: 846f28d1b6f
Create Date: 2014-04-14 05:26:44.146236
This updates the values in the "nodeid" column to ensure that they match
the value in the string-based "node" column, then indexes the column for fast
node-based lookup. It should only be applied *after* all servers have bee
upgraded to properly write the value of the "nodeid" column; it's part 2 of 2
in getting to the desired state without downtime.
See https://bugzilla.mozilla.org/show_bug.cgi?id=988643
"""
# revision identifiers, used by Alembic.
revision = '6569dd9a060'
down_revision = '846f28d1b6f'
from alembic import op
import sqlalchemy as sa
def upgrade():
# Populate nodeid with the proper id for each existing row.
# XXX NOTE: MySQL-specific!
op.execute("""
UPDATE users, nodes
SET users.nodeid = nodes.id
WHERE users.node = nodes.node
""".strip())
# Set the column non-nullable so it doesn't mask bugs in the future.
op.alter_column(
'users', 'nodeid',
nullable=False,
existing_type=sa.BigInteger(),
existing_server_default=None,
)
# Index the nodeid column.
op.create_index('node_idx', 'users', ['nodeid'])
def downgrade():
op.drop_index('node_idx', 'users')
op.alter_column(
'users', 'nodeid',
nullable=True,
existing_type=sa.BigInteger(),
existing_server_default=None,
)
| mpl-2.0 | 6cfb5c119188fa22187982e801ab46ba | 27.384615 | 77 | 0.678184 | 3.522673 | false | false | false | false |
onecodex/onecodex | onecodex/exceptions.py | 1 | 3218 | class OneCodexException(Exception):
pass
class MethodNotSupported(OneCodexException):
"""The object does not support this operation."""
pass
class PermissionDenied(OneCodexException):
pass
class ServerError(OneCodexException):
pass
class UnboundObject(OneCodexException):
"""To use against the One Codex server, all classes must be derived from an Api() instance."""
pass
class ValidationError(OneCodexException):
pass
class UploadException(Exception):
"""An exception for when things go wrong with uploading."""
pass
class RetryableUploadException(UploadException):
pass
class PlottingException(OneCodexException):
"""User-facing plotting error for cases when the user may take action to resolve the issue."""
pass
class PlottingWarning(UserWarning):
"""User-facing plotting warning for cases when the user may take action to resolve the issue."""
pass
def raise_api_error(resp, state=None):
"""Raise an exception with a pretty message in various states of upload."""
# TODO: Refactor into an Exception class
error_code = resp.status_code
if error_code == 402:
error_message = (
"Please add a payment method to upload more samples. If you continue to "
"experience problems, contact us at support@onecodex.com for assistance."
)
elif error_code == 403:
error_message = "Please login to your One Codex account or pass the appropriate API key."
else:
try:
error_json = resp.json()
except ValueError:
error_json = {}
if "msg" in error_json:
error_message = error_json["msg"].rstrip(".")
elif "message" in error_json:
error_message = error_json["message"].rstrip(".")
else:
error_message = None
if state == "init" and not error_message:
error_message = (
"Could not initialize upload. Are you logged in? If this problem "
"continues, please contact support@onecodex.com for assistance."
)
elif state == "upload" and not error_message:
error_message = (
"File could not be uploaded. If this problem continues, please contact "
"support@onecodex.com for assistance."
)
elif state == "callback" and not error_message:
error_message = (
"Callback could not be completed. If this problem continues, please "
"contact support@onecodex.com for assistance."
)
if error_message is None:
error_message = "Upload failed. Please contact support@onecodex.com for assistance."
raise UploadException(error_message)
def raise_connectivity_error(file_name):
# TODO: This is really a general NonRetryableUploadError
# with a default msg. Refactor.
raise UploadException(
"The command line client is experiencing connectivity issues and "
"cannot complete the upload of {} at this time. Please try again "
"later. If the problem persists, contact us at support@onecodex.com "
"for assistance.".format(file_name)
)
| mit | 032a57055ee2c3df5056122066d6c2a6 | 29.358491 | 100 | 0.646053 | 4.408219 | false | false | false | false |
pycket/pycket | pycket/hidden_classes.py | 4 | 7665 |
from rpython.rlib import jit, unroll, rweakref
from rpython.rlib.objectmodel import specialize
def make_map_type(getter, keyclass):
class Map(object):
""" A basic implementation of a map which assigns Racket values to an index
based on the identity of the Racket value. A Map consists of
* indexes: a map from objects to indicies for object described by the current map
* other_maps: sub maps which are extensions the current map
"""
_attrs_ = _immutable_fields_ = ['indexes', 'other_maps', 'parent']
def __init__(self, parent):
self.indexes = {}
self.other_maps = rweakref.RWeakValueDictionary(keyclass, Map)
# NB: The parent pointer is needed to prevent the GC from collecting
# the chain of parent maps which produced this one.
self.parent = parent
def __iter__(self):
return self.indexes.iteritems()
def iterkeys(self):
return self.indexes.iterkeys()
def itervalues(self):
return self.indexes.itervalues()
def iteritems(self):
return self.indexes.iteritems()
@jit.elidable_promote('all')
def get_index(self, name):
return self.indexes.get(name, -1)
@specialize.argtype(2)
def lookup(self, name, storage, default=None, offset=0):
idx = self.get_index(name)
if idx == -1:
return default
assert storage is not None
return getattr(storage, getter)(idx+offset)
@jit.elidable_promote('all')
def add_attribute(self, name):
newmap = self.other_maps.get(name)
if newmap is None:
newmap = Map(self)
newmap.indexes.update(self.indexes)
newmap.indexes[name] = len(self.indexes)
newmap.parent = self
self.other_maps.set(name, newmap)
return newmap
@jit.elidable
def has_attribute(self, name):
return name in self.indexes
@jit.elidable
def storage_size(self):
return len(self.indexes)
Map.EMPTY = Map(None)
return Map
# TODO Find a beter name for this
def make_caching_map_type(getter, keyclass):
class CachingMap(object):
""" A map implementation which partitions its data into two groups, a collection
of static data stored in the map itself, and a collection of indexes used to
index into a corresponding data array.
This partitioning allows structures such as impersonators to share not just
their layout but common data as well.
"""
_attrs_ = _immutable_fields_ = [
'indexes', 'static_data', 'static_submaps',
'dynamic_submaps', 'parent']
def __init__(self, parent):
self.indexes = {}
self.static_data = {}
self.dynamic_submaps = rweakref.RWeakValueDictionary(keyclass, CachingMap)
self.static_submaps = {}
self.parent = parent
def iterkeys(self):
for key in self.indexes.iterkeys():
yield key
for key in self.static_data.iterkeys():
yield key
def iteritems(self):
for item in self.indexes.iteritems():
yield item
for item in self.static_data.iteritems():
yield item
def itervalues(self):
for val in self.indexes.itervalues():
yield val
for val in self.static_data.itervalues():
yield val
@jit.elidable
def storage_size(self):
return len(self.indexes)
@jit.elidable_promote('all')
def get_dynamic_index(self, name):
return self.indexes.get(name, -1)
@jit.elidable_promote('all')
def get_static_data(self, name, default):
if name not in self.static_data:
return default
return self.static_data[name]
@specialize.argtype(2)
def lookup(self, name, storage, default=None, offset=0):
idx = self.get_dynamic_index(name)
if idx == -1:
return self.get_static_data(name, default)
assert storage is not None
return getattr(storage, getter)(idx+offset)
@jit.elidable_promote('all')
def add_static_attribute(self, name, value):
assert name not in self.indexes and name not in self.static_data
key = (name, value)
newmap = self.static_submaps.get(key, None)
if newmap is None:
newmap = CachingMap(self)
newmap.indexes.update(self.indexes)
newmap.static_data.update(self.static_data)
newmap.static_data[name] = value
self.static_submaps[key] = newmap
return newmap
@jit.elidable_promote('all')
def add_dynamic_attribute(self, name):
assert name not in self.indexes and name not in self.static_data
newmap = self.dynamic_submaps.get(name)
if newmap is None:
newmap = CachingMap(self)
newmap.indexes.update(self.indexes)
newmap.static_data.update(self.static_data)
newmap.indexes[name] = len(self.indexes)
self.dynamic_submaps.set(name, newmap)
return newmap
@jit.elidable
def is_dynamic_attribute(self, name):
return name in seld.indexes
@jit.elidable
def is_static_attribute(self, name):
return name in self.static_data
CachingMap.EMPTY = CachingMap(None)
return CachingMap
# These maps are simply unique products of various other map types.
# They are unique based on their component maps.
def make_composite_map_type():
class CompositeMap(object):
_attrs_ = _immutable_fields_ = ['handlers', 'properties']
@staticmethod
@jit.elidable
def instantiate(handlers, properties):
key = (handlers, properties)
result = CompositeMap.CACHE.get(key, None)
if result is None:
result = CompositeMap(handlers, properties)
CompositeMap.CACHE[key] = result
return result
def __init__(self, handlers, properties):
self.handlers = handlers
self.properties = properties
@specialize.argtype(2)
def lookup_handler(self, key, storage, default=None):
jit.promote(self)
return self.handlers.lookup(key, storage, default=default)
@specialize.argtype(2)
def lookup_property(self, key, storage, default=None):
""" We make the assumption that data for the handlers are laid out
in the form [handler_0, handler_1, ..., property_0, property_1, ...]"""
jit.promote(self)
return self.properties.lookup(key, storage, default=default, offset=0)
# We would really like to use an RWeakValueDictionary here, but tuple keys are
# not supported, as far as I can tell, and neither are custom hash/equality
# functions, so we are stuck using a regular dictionary for now.
#
# A dictionary of (key1, key2) -> weakref<CompositeMap> may avoid holding onto
# some bits of memory for too long.
# Another option is to use two layers of dictionaries
# key1 -> (key2 -> CompositeMap)
CompositeMap.CACHE = {} # rweakref.RWeakValueDictionary(tuple, CompositeMap)
return CompositeMap
| mit | 028a412064fdc174014f3ae690b597e3 | 35.15566 | 89 | 0.589954 | 4.246537 | false | false | false | false |
onecodex/onecodex | onecodex/models/analysis.py | 1 | 2826 | from onecodex.models import OneCodexBase
class Analyses(OneCodexBase):
_resource_path = "/api/v1/analyses"
_cached_result = None
def results(self, json=True):
"""Fetch the results of an Analyses resource.
Parameters
----------
json : bool, optional
Return a JSON result (raw API result)? Default True.
Returns
-------
Return type varies by Analyses resource sub-type. See, e.g., Classifications or Panels for
documentation.
"""
if json is True:
return self._results()
else:
raise NotImplementedError("No non-JSON result format implemented.")
def _results(self):
try:
if not getattr(self._resource, "_cached_result", None):
self._resource._cached_result = self._resource.results()
return self._resource._cached_result
except AttributeError:
raise NotImplementedError(".results() not implemented for this Analyses resource.")
class Alignments(Analyses):
_resource_path = "/api/v1/alignments"
class Classifications(Analyses):
_resource_path = "/api/v1/classifications"
_cached_table = None
def results(self, json=True):
"""Return the complete results table for a classification.
Parameters
----------
json : `bool`, optional
Return result as JSON? Default True.
Returns
-------
table : `dict` or `pd.DataFrame`
Return a JSON object with the classification results or a `pd.DataFrame` if json=False.
"""
if json is True:
return self._results()
else:
return self._table()
def _readlevel(self):
return self._resource.readlevel()
def _table(self):
import pandas as pd
if self._cached_table is None:
self._cached_table = pd.DataFrame(self._results()["table"])
return self._cached_table
def table(self):
"""Return the complete results table for the classification.
Returns
-------
table : `pd.DataFrame`
A Pandas DataFrame of the classification results.
"""
return self.results(json=False)
@classmethod
def where(cls, *filters, **keyword_filters):
from onecodex.models.collection import SampleCollection
wrapped = super(Classifications, cls).where(*filters, **keyword_filters)
return SampleCollection([w._resource for w in wrapped], Classifications)
class Panels(Analyses):
_resource_path = "/api/v1/panels"
def results(self, json=True):
if json is True:
return self._results()
else:
raise NotImplementedError("Panel results only available as JSON at this time.")
| mit | 2edbee16baacc7d296318991ca00111c | 28.4375 | 99 | 0.601203 | 4.558065 | false | false | false | false |
pycket/pycket | pycket/prims/continuation_marks.py | 1 | 6193 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from pycket import impersonators as imp
from pycket import values
from pycket import vector
from pycket.cont import call_cont, Cont
from pycket.error import SchemeException
from pycket.prims.expose import default, expose, make_callable_label, procedure
# Can use this to promote a get_cmk operation to a callable function.
CMKSetToListHandler = make_callable_label([values.W_Object])
@expose("current-continuation-marks",
[default(values.W_ContinuationPromptTag, values.w_default_continuation_prompt_tag)],
simple=False)
def current_cont_marks(prompt_tag, env, cont):
from pycket.interpreter import return_value
return return_value(values.W_ContinuationMarkSet(cont, prompt_tag), env, cont)
@expose("continuation-marks",
[values.W_Continuation,
default(values.W_ContinuationPromptTag, values.w_default_continuation_prompt_tag)])
def continuation_marks(cont, prompt_tag):
# TODO Prompt tag
return values.W_ContinuationMarkSet(cont.cont, prompt_tag)
@expose("continuation-mark-set->list",
[values.W_ContinuationMarkSet, values.W_Object,
default(values.W_ContinuationPromptTag, None)],
simple=False)
def cms_list(cms, mark, prompt_tag, env, cont):
from pycket.interpreter import return_value
from pycket.prims.general import map_loop
if isinstance(mark, values.W_ContinuationMarkKey):
func = CMKSetToListHandler(mark.get_cmk)
marks = cms.cont.get_marks(imp.get_base_object(mark),
upto=[prompt_tag, cms.prompt_tag])
return map_loop(func, [marks], env, cont)
marks = cms.cont.get_marks(mark, upto=[prompt_tag, cms.prompt_tag])
return return_value(marks, env, cont)
def get_marks_all(cont, keys, not_found, upto=[]):
results = vector.W_Vector.fromelement(not_found, len(keys))
while True:
if cont is None:
return values.w_null
found = False
next = None
for i, key in enumerate(keys):
value, _ = cont.find_cm(key)
if value is not None:
found = True
else:
value = not_found
results.set(i, value)
cont = cont.get_previous_continuation(upto=upto)
if found:
break
rest = get_marks_all(cont, keys, not_found, upto=upto)
return values.W_Cons.make(results, rest)
@expose("continuation-mark-set->list*",
[values.W_ContinuationMarkSet,
values.W_List,
default(values.W_Object, values.w_false),
default(values.W_ContinuationPromptTag, values.w_default_continuation_prompt_tag)])
def continuation_mark_set_to_list_star(mark_set, key_list, none_v, prompt_tag):
cont = mark_set.cont
keys = values.from_list(key_list)
return get_marks_all(cont, keys, none_v, upto=[prompt_tag])
def is_ast_cont_with_surrounding_lambda(k):
from pycket import interpreter as i
cs = [i.LetCont,
i.LetrecCont,
i.BeginCont,
i.Begin0Cont,
i.Begin0BodyCont,
i.WCMKeyCont,
i.WCMValCont]
# the ones having the method "get_next_executed_ast"
for c in cs:
if isinstance(k, c):
a = k.get_ast()
if isinstance(a, i.AST) and a.surrounding_lambda:
return True
return False
@expose("continuation-mark-set->context", [values.W_ContinuationMarkSet])
def cms_context(marks):
from pycket.values_string import W_String
# TODO: Pycket does not have a mark to denote context. We need to fix that.
k = marks.cont
n = 0
# find out the length
while isinstance(k, Cont):
if is_ast_cont_with_surrounding_lambda(k):
n += 1
k = k.get_previous_continuation()
# second traversal saves us from reversing it later
ls = [None]*n
k = marks.cont
i = n-1
while isinstance(k, Cont):
if is_ast_cont_with_surrounding_lambda(k):
surrounding_lam = k.get_ast().surrounding_lambda
lam_str = W_String.make(surrounding_lam.tostring())
ls[i] = values.W_Cons.make(lam_str, values.w_false)
i -= 1
k = k.get_previous_continuation()
return values.to_list(ls)
@expose("continuation-mark-set-first",
[values.W_Object,
values.W_Object,
default(values.W_Object, values.w_false),
default(values.W_Object, values.w_default_continuation_prompt_tag)],
simple=False)
def cms_first(cms, key, missing, prompt_tag, env, cont):
from pycket.interpreter import return_value
is_cmk = isinstance(key, values.W_ContinuationMarkKey)
m = imp.get_base_object(key) if is_cmk else key
if prompt_tag is values.w_default_continuation_prompt_tag and \
(key is values.break_enabled_key or key is values.parameterization_key):
prompt_tag = values.w_root_continuation_prompt_tag
if cms is values.w_false:
the_cont = cont
v = cont.get_mark_first(m, upto=[prompt_tag])
elif isinstance(cms, values.W_ContinuationMarkSet):
the_cont = cms.cont
v = cont.get_mark_first(m, upto=[prompt_tag, cms.prompt_tag])
else:
raise SchemeException("Expected #f or a continuation-mark-set")
val = v if v is not None else missing
if is_cmk:
return key.get_cmk(val, env, cont)
return return_value(val, env, cont)
@expose("make-continuation-mark-key", [default(values.W_Symbol, None)])
def mk_cmk(s):
from pycket.interpreter import Gensym
s = Gensym.gensym("cm") if s is None else s
return values.W_ContinuationMarkKey(s)
@expose("call-with-immediate-continuation-mark",
[values.W_Object, procedure, default(values.W_Object, values.w_false)],
simple=False,
extra_info=True)
def cwicm(key, proc, default, env, cont, extra_call_info):
lup, _ = cont.find_cm(key)
val = default if lup is None else lup
if isinstance(key, values.W_ContinuationMarkKey):
return key.get_cmk(val, env, call_cont(proc, env, cont))
return proc.call_with_extra_info([val], env, cont, extra_call_info)
| mit | c5bff62c44675d9902975060a923101d | 37.70625 | 92 | 0.647021 | 3.397148 | false | false | false | false |
onecodex/onecodex | onecodex/vendored/potion_client/links.py | 2 | 4737 | # flake8: noqa
try:
import simplejson as json
except ImportError:
import json
import re
from requests import Request
from requests.exceptions import HTTPError
from .collection import PaginatedList
from .converter import PotionJSONEncoder, PotionJSONDecoder
from .schema import Schema
class Link(object):
def __init__(self, client, method, href, rel, schema=None, target_schema=None):
self.method = method
self.href_placeholders = re.findall(r"{(\w+)}", href)
self.href = href
self.rel = rel
self.schema = Schema(schema)
self.target_schema = Schema(target_schema)
@property
def requires_instance(self):
return '{id}' in self.href
def returns_pagination(self):
if self.method == 'GET' and self.schema is not None:
schema_properties = self.schema.get('properties', {})
return 'page' in schema_properties and 'per_page' in schema_properties
return False
def __get__(self, instance, owner):
return LinkBinding(self, instance, owner)
class LinkBinding(object):
def __init__(self, link, instance, owner):
self.link = link
self.instance = instance
self.owner = owner
def request_factory(self, data, params):
if self.instance is None:
request_url = self.owner._client._root_url + self.link.href.format(**params)
else:
request_url = self.owner._client._root_url + self.link.href.format(id=self.instance.id, **self.instance)
request_data = data
request_params = {name: value for name, value in params.items()
if name not in self.link.href_placeholders and self.link.schema.can_include_property(name)}
if data is None:
request_data = request_params
elif isinstance(data, dict):
request_params = data
if self.link.method == 'GET':
req = Request(self.link.method,
request_url,
params={k: json.dumps(v, cls=PotionJSONEncoder)
for k, v in request_params.items()})
else:
req = Request(self.link.method,
request_url,
headers={'content-type': 'application/json'},
data=json.dumps(request_data, cls=PotionJSONEncoder))
return req
def raise_for_status(self, response):
http_error_msg = ''
if 400 <= response.status_code < 500:
try:
http_error_msg = response.json()
except:
http_error_msg = ('{code} Client Error: {reason} for url: {url}'.format(
code=response.status_code, reason=response.reason, url=response.url)
)
elif 500 <= response.status_code < 600:
http_error_msg = ('{code} Server Error: {reason} for url: {url}'.format(
code=response.status_code, reason=response.reason, url=response.url)
)
if http_error_msg:
raise HTTPError(http_error_msg, response=response)
def make_request(self, data, params):
req = self.request_factory(data, params)
prepared_request = self.owner._client.session.prepare_request(req)
# Update cilent session so that it will properly load from env vars
# See https://requests.readthedocs.io/en/master/user/advanced/#prepared-requests
# for more details
settings = self.owner._client.session.merge_environment_settings(
url=prepared_request.url, proxies={}, stream=None, verify=None, cert=None
)
response = self.owner._client.session.send(prepared_request, **settings)
# return error for some error conditions
self.raise_for_status(response)
if response.status_code == 204:
return response, None
return response, response.json(cls=PotionJSONDecoder,
client=self.owner._client,
default_instance=self.instance)
def __getattr__(self, item):
return getattr(self.link, item)
def __call__(self, *arg, **params):
data = None
# Need to pass positional argument as *arg so that properties of the same name are not overridden in **params.
if len(arg) > 1:
raise TypeError('Link must be called with no more than one positional argument')
elif len(arg) == 1:
data = arg[0]
if self.link.returns_pagination():
return PaginatedList(self, params)
response, response_data = self.make_request(data, params)
return response_data
| mit | 3a2b9c1a07d5c0e67360ee797a1494a4 | 35.160305 | 118 | 0.594469 | 4.283002 | false | false | false | false |
pycket/pycket | pycket/test/test_impersonators.py | 4 | 23872 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from pycket.test.testhelper import *
from pycket.values import *
from pycket.impersonators import *
from pycket.values_struct import *
import pytest
import sys
sys.setrecursionlimit(10000)
def test_impersonator_properties():
m = run_mod(
"""
#lang pycket
(define-values (prop:blue blue? blue-ref) (make-impersonator-property 'blue))
(define-values (prop:green green? green-ref) (make-impersonator-property 'green))
(define-struct point (x y))
(define mystruct (point 1 2))
(define mystruct^ (chaperone-struct mystruct point-x #f prop:blue 7))
(define is-blue (blue? mystruct^))
(define is-green (green? mystruct^))
(define blue-val (blue-ref mystruct^))
""")
is_blue = m.defs[W_Symbol.make("is-blue")]
is_green = m.defs[W_Symbol.make("is-green")]
blue_val = m.defs[W_Symbol.make("blue-val")]
assert is_blue is w_true
assert is_green is w_false
assert isinstance(blue_val, W_Fixnum) and blue_val.value == 7
# This test ensures the new property based on this change to Racket:
# http://git.racket-lang.org/plt/commit/0b71b8481dcf0c8eb99edf5fef9bfdfeb4f92465
def test_chaperone_struct_self_arg():
m = run_mod(
"""
#lang pycket
(struct point (x y))
(define p (point 1 2))
(define cell #f)
(define p-chap
(chaperone-struct p
point-x (lambda (self val) (set! cell self) val)))
(point-x p-chap)
""")
prox = m.defs[W_Symbol.make("p")]
chap = m.defs[W_Symbol.make("p-chap")]
cell = m.defs[W_Symbol.make("cell")]
assert isinstance(prox, W_Struct)
assert isinstance(cell, W_Cell)
assert isinstance(chap, W_ChpStruct)
self = cell.get_val()
#assert self is not prox
assert self is chap
def test_impersonate_struct_self_arg():
m = run_mod(
"""
#lang pycket
(struct point (x y) #:mutable)
(define p (point 1 2))
(define cell #f)
(define p-chap
(impersonate-struct p
point-x (lambda (self val) (set! cell self) val)))
(point-x p-chap)
""")
prox = m.defs[W_Symbol.make("p")]
chap = m.defs[W_Symbol.make("p-chap")]
cell = m.defs[W_Symbol.make("cell")]
assert isinstance(prox, W_Struct)
assert isinstance(cell, W_Cell)
assert isinstance(chap, W_ImpStruct)
self = cell.get_val()
#assert self is not prox
assert self is chap
def test_noninterposing_chaperone():
sym = W_Symbol.make
m = run_mod(
"""
#lang pycket
(define-values (prop:blue blue? blue-ref) (make-impersonator-property 'blue))
(define-values (prop:green green? green-ref) (make-struct-type-property 'green 'can-impersonate))
(define a-equal+hash (list
(lambda (v1 v2 equal?)
(equal? (aa-y v1) (aa-y v2)))
(lambda (v1 hash)
(hash (aa-y v1)))
(lambda (v2 hash)
(hash (aa-y v2)))))
(define (a-impersonator-of v) (a-x v))
(define (aa-y v) (if (a? v) (a-y v) (pre-a-y v)))
(define-struct pre-a (x y)
#:property prop:equal+hash a-equal+hash
#:property prop:green 'color)
(define-struct a (x y)
#:property prop:impersonator-of a-impersonator-of
#:property prop:equal+hash a-equal+hash)
(define-struct (a-more a) (z))
(define-struct (a-new-impersonator a) ()
#:property prop:impersonator-of a-impersonator-of)
(define-struct (a-new-equal a) ()
#:property prop:equal+hash a-equal+hash)
(define a-pre-a (chaperone-struct (make-pre-a 17 1) pre-a-y (lambda (a v) v)))
(define t1 (chaperone-of? a-pre-a a-pre-a))
(define t2
(chaperone-of?
(make-pre-a 17 1)
(chaperone-struct (make-pre-a 17 1) pre-a-y #f prop:blue 'color)))
(define t3
(chaperone-of?
(make-pre-a 17 1)
(chaperone-struct a-pre-a pre-a-y #f prop:blue 'color)))
(define t4
(chaperone-of? a-pre-a
(chaperone-struct a-pre-a pre-a-y #f prop:blue 'color)))
(define t5
(chaperone-of?
(chaperone-struct a-pre-a pre-a-y #f prop:blue 'color)
a-pre-a))
(define t6
(chaperone-of?
a-pre-a
(chaperone-struct a-pre-a pre-a-y (lambda (a v) v) prop:blue 'color)))
(define t7
(chaperone-of? a-pre-a
(chaperone-struct a-pre-a green-ref (lambda (a v) v))))
(define non-interposing (chaperone-struct (make-pre-a 17 1) pre-a-y #f prop:blue 'color))
""")
assert m.defs[sym("t1")] is w_true
assert m.defs[sym("t2")] is w_true
assert m.defs[sym("t3")] is w_false
assert m.defs[sym("t4")] is w_true
assert m.defs[sym("t5")] is w_true
assert m.defs[sym("t6")] is w_false
assert m.defs[sym("t7")] is w_false
a_pre_a = m.defs[sym("a-pre-a")]
assert not a_pre_a.is_non_interposing_chaperone()
interp = m.defs[sym("non-interposing")]
assert interp.is_non_interposing_chaperone()
blue = m.defs[sym("prop:blue")]
assert isinstance(interp, W_InterposeStructBase)
assert interp.get_property(blue) == sym("color")
def test_noninterposing_impersonator():
sym = W_Symbol.make
m = run_mod(
"""
#lang pycket
(define-values (prop:blue blue? blue-ref) (make-impersonator-property 'blue))
(define-values (prop:green green? green-ref) (make-struct-type-property 'green 'can-impersonate))
(define a-equal+hash (list
(lambda (v1 v2 equal?)
(equal? (aa-y v1) (aa-y v2)))
(lambda (v1 hash)
(hash (aa-y v1)))
(lambda (v2 hash)
(hash (aa-y v2)))))
(define (a-impersonator-of v) (a-x v))
(define (aa-y v) (if (a? v) (a-y v) (pre-a-y v)))
(define-struct pre-a (x y)
#:mutable
#:property prop:equal+hash a-equal+hash
#:property prop:green 'color)
(define-struct a (x y)
#:mutable
#:property prop:impersonator-of a-impersonator-of
#:property prop:equal+hash a-equal+hash)
(define-struct (a-more a) (z) #:mutable)
(define-struct (a-new-impersonator a) ()
#:mutable
#:property prop:impersonator-of a-impersonator-of)
(define-struct (a-new-equal a) ()
#:mutable
#:property prop:equal+hash a-equal+hash)
(define a-pre-a (impersonate-struct (make-pre-a 17 1) pre-a-y (lambda (a v) v)))
(define t1 (impersonator-of? a-pre-a a-pre-a))
(define t2
(impersonator-of?
(make-pre-a 17 1)
(impersonate-struct (make-pre-a 17 1) pre-a-y #f prop:blue 'color)))
(define t3
(impersonator-of?
(make-pre-a 17 1)
(impersonate-struct a-pre-a pre-a-y #f prop:blue 'color)))
(define t4
(impersonator-of? a-pre-a
(impersonate-struct a-pre-a pre-a-y #f prop:blue 'color)))
(define t5
(impersonator-of?
(impersonate-struct a-pre-a pre-a-y #f prop:blue 'color)
a-pre-a))
(define t6
(impersonator-of?
a-pre-a
(impersonate-struct a-pre-a pre-a-y (lambda (a v) v) prop:blue 'color)))
(define t7
(impersonator-of? a-pre-a
(impersonate-struct a-pre-a green-ref (lambda (a v) v))))
(define non-interposing (impersonate-struct (make-pre-a 17 1) pre-a-y #f prop:blue 'color))
""")
assert m.defs[sym("t1")] is w_true
assert m.defs[sym("t2")] is w_true
assert m.defs[sym("t3")] is w_false
assert m.defs[sym("t4")] is w_true
assert m.defs[sym("t5")] is w_true
assert m.defs[sym("t6")] is w_false
assert m.defs[sym("t7")] is w_false
a_pre_a = m.defs[sym("a-pre-a")]
assert not a_pre_a.is_non_interposing_chaperone()
interp = m.defs[sym("non-interposing")]
assert interp.is_non_interposing_chaperone()
blue = m.defs[sym("prop:blue")]
assert isinstance(interp, W_InterposeStructBase)
assert interp.get_property(blue) == sym("color")
def test_noninterposing_chaperone_procedure():
m = run_mod(
"""
#lang racket/base
(define-values (prop:blue blue? blue-ref) (make-impersonator-property 'blue))
(define wrapper (lambda (x) x))
(define f1 (lambda (a) a))
(define f2 (lambda (b) b))
(define f3 (lambda (c) c))
(define g1 (chaperone-procedure f1 wrapper))
(define g2 (chaperone-procedure f2 wrapper))
(define g3 (chaperone-procedure f2 wrapper))
(define t1 (chaperone-of? g1 (chaperone-procedure g1 #f prop:blue 'color)))
(define t2 (chaperone-of? g2 (chaperone-procedure g2 #f prop:blue 'color)))
(define t3 (chaperone-of? g3 (chaperone-procedure g3 #f prop:blue 'color)))
(define t4 (chaperone-of? f3 (chaperone-procedure f3 #f prop:blue 'color)))
(define t5 (chaperone-of? f3 (chaperone-procedure g3 #f prop:blue 'color)))
""")
assert m.defs[W_Symbol.make("t1")] is values.w_true
assert m.defs[W_Symbol.make("t2")] is values.w_true
assert m.defs[W_Symbol.make("t3")] is values.w_true
assert m.defs[W_Symbol.make("t4")] is values.w_true
assert m.defs[W_Symbol.make("t5")] is values.w_false
def test_noninterposing_impersonate_procedure():
m = run_mod(
"""
#lang racket/base
(define-values (prop:blue blue? blue-ref) (make-impersonator-property 'blue))
(define wrapper (lambda (x) x))
(define f1 (lambda (a) a))
(define f2 (lambda (b) b))
(define f3 (lambda (c) c))
(define g1 (impersonate-procedure f1 wrapper))
(define g2 (impersonate-procedure f2 wrapper))
(define g3 (impersonate-procedure f2 wrapper))
(define t1 (impersonator-of? g1 (impersonate-procedure g1 #f prop:blue 'color)))
(define t2 (impersonator-of? g2 (impersonate-procedure g2 #f prop:blue 'color)))
(define t3 (impersonator-of? g3 (impersonate-procedure g3 #f prop:blue 'color)))
(define t4 (impersonator-of? f3 (impersonate-procedure f3 #f prop:blue 'color)))
(define t5 (impersonator-of? f3 (impersonate-procedure g3 #f prop:blue 'color)))
""")
assert m.defs[W_Symbol.make("t1")] is values.w_true
assert m.defs[W_Symbol.make("t2")] is values.w_true
assert m.defs[W_Symbol.make("t3")] is values.w_true
assert m.defs[W_Symbol.make("t4")] is values.w_true
assert m.defs[W_Symbol.make("t5")] is values.w_false
def test_chaperone_procedure_star():
m = run_mod(
"""
#lang racket/base
(define val #f)
(define proc (lambda (x) x))
(define g
(chaperone-procedure* proc (lambda (p x) (set! val p) x)))
(g 1)
""")
proc = m.defs[W_Symbol.make("g")]
val = m.defs[W_Symbol.make("val")]
assert isinstance(val, W_Cell)
assert proc is val.get_val()
def test_chaperone_vector_stack_exhaustion():
m = run_mod(
"""
#lang racket/base
(define d
(for/fold ([v (vector 1 2 3)])
([i 1000])
(chaperone-vector v (lambda (x i val) val) (lambda (x i val) val))))
(vector-ref d 0)
""")
def test_impersonate_vector_stack_exhaustion():
m = run_mod(
"""
#lang racket/base
(define d
(for/fold ([v (vector 1 2 3)])
([i 1000])
(impersonate-vector v (lambda (x i val) val) (lambda (x i val) val))))
(vector-ref d 0)
""")
def test_chaperone_vector_to_list():
m = run_mod(
"""
#lang pycket
(define v (vector 1 2 3 4 5))
(define cell 0)
(define imp
(chaperone-vector v
(lambda (self i v) (set! cell (+ cell 1)) v)
(lambda (self i v) v)))
(define chp
(chaperone-vector v
(lambda (self i v) (set! cell (+ cell 1)) v)
(lambda (self i v) v)))
(define base (vector->list v))
(define lst1 (vector->list imp))
(define lst2 (vector->list chp))
(define cmp1 (equal? base lst1))
(define cmp2 (equal? base lst2))
""")
cmp1 = m.defs[W_Symbol.make("cmp1")]
cmp2 = m.defs[W_Symbol.make("cmp2")]
cell = m.defs[W_Symbol.make("cell")]
assert cmp1 is values.w_true
assert cmp2 is values.w_true
assert isinstance(cell, values.W_Cell)
count = cell.get_val()
assert isinstance(count, values.W_Fixnum) and count.value == 10
def test_impersonate_vector_to_list():
m = run_mod(
"""
#lang pycket
(define v (vector 1 2 3 4 5))
(define cell 0)
(define imp
(impersonate-vector v
(lambda (self i v) (set! cell (+ cell 1)) v)
(lambda (self i v) v)))
(define chp
(impersonate-vector v
(lambda (self i v) (set! cell (+ cell 1)) v)
(lambda (self i v) v)))
(define base (vector->list v))
(define lst1 (vector->list imp))
(define lst2 (vector->list chp))
(define cmp1 (equal? base lst1))
(define cmp2 (equal? base lst2))
""")
cmp1 = m.defs[W_Symbol.make("cmp1")]
cmp2 = m.defs[W_Symbol.make("cmp2")]
cell = m.defs[W_Symbol.make("cell")]
assert cmp1 is values.w_true
assert cmp2 is values.w_true
assert isinstance(cell, values.W_Cell)
count = cell.get_val()
assert isinstance(count, values.W_Fixnum) and count.value == 10
def test_impersonate_procedure_application_mark():
m = run_mod(
"""
#lang racket/base
(define saved '())
(define (f x)
(call-with-immediate-continuation-mark
'z
(lambda (val)
(list val
(continuation-mark-set->list (current-continuation-marks) 'z)))))
(define g (impersonate-procedure
f
(lambda (a)
(set! saved (cons (continuation-mark-set-first #f 'z)
saved))
(values (lambda (r) r)
a))
impersonator-prop:application-mark
(cons 'z 12)))
(define h (impersonate-procedure
g
(lambda (a)
(values (lambda (r) r)
a))
impersonator-prop:application-mark
(cons 'z 9)))
(define i (impersonate-procedure
f
(lambda (a)
(set! saved (cons (continuation-mark-set-first #f 'z)
saved))
a)
impersonator-prop:application-mark
(cons 'z 11)))
(define j (impersonate-procedure
i
(lambda (a) a)
impersonator-prop:application-mark
(cons 'z 12)))
(define valid1 (equal? (g 10) '(12 (12))))
(define valid2 (equal? (h 10) '(12 (12 9))))
(define valid3 (equal? (i 10) '(11 (11))))
(define valid4 (equal? (j 10) '(11 (11))))
(define valid5 (equal? saved '(12 #f 9 #f)))
""")
valid1 = m.defs[W_Symbol.make("valid1")]
valid2 = m.defs[W_Symbol.make("valid2")]
valid3 = m.defs[W_Symbol.make("valid3")]
valid4 = m.defs[W_Symbol.make("valid4")]
valid5 = m.defs[W_Symbol.make("valid5")]
assert valid1 is w_true
assert valid2 is w_true
assert valid3 is w_true
assert valid4 is w_true
assert valid5 is w_true
def test_impersonator_of_basic(doctest):
r"""
! (require racket/base)
! (define assert (lambda (v) (unless v (error 'imp-predicates "Assertion violation"))))
! (define (sym a b) (or (chaperone-of? a b) (chaperone-of? b a)))
> (chaperone-of? 10 10)
#t
> (chaperone-of? '(10) '(10))
#t
> (impersonator-of? '(10) '(10))
#t
> (chaperone-of? '#(1 2 3) '#(1 2 3))
#t
> (impersonator-of? '#(1 2 3) '#(1 2 3))
#t
> (chaperone-of? '#&(1 2 3) '#&(1 2 3))
#t
> (impersonator-of? '#&(1 2 3) '#&(1 2 3))
#t
> (chaperone-of? (make-string 1 #\x) (make-string 1 #\x))
#f
> (impersonator-of? (make-string 1 #\x) (make-string 1 #\x))
#t
> (sym (string->immutable-string "x") (make-string 1 #\x))
#f
> (sym '#(1 2 3) (vector 1 2 3))
#f
> (sym '#&17 (box 17))
#f
> (equal? (chaperone-procedure add1 void) (chaperone-procedure add1 void))
#t
> (equal? (impersonate-procedure add1 void) (chaperone-procedure add1 void))
#t
> (equal? (chaperone-procedure add1 void) (impersonate-procedure add1 void))
#t
"""
def test_impersonator_of_structs():
m = run_mod(
"""
#lang pycket
(define assert
(lambda (v)
(unless v
(error 'imp-predicates "Assertion violation"))))
(define (chaperone-of?/impersonator a b)
(assert (impersonator-of? a b))
(chaperone-of? a b))
(define-struct o (a b))
(define-struct p (x y) #:transparent)
(define-struct (p2 p) (z) #:transparent)
(define-struct q (u [w #:mutable]) #:transparent)
(define-struct (q2 q) (v) #:transparent)
(define r1 (chaperone-of? (make-o 1 2) (make-o 1 2)))
(define r2 (impersonator-of? (make-o 1 2) (make-o 1 2)))
(define r3 (chaperone-of?/impersonator (make-p 1 2) (make-p 1 2)))
(define r4 (chaperone-of?/impersonator (make-p 1 (box 2)) (make-p 1 (box 2))))
(define r5 (chaperone-of?/impersonator (make-p2 1 2 3) (make-p2 1 2 3)))
(define r6 (chaperone-of?/impersonator (make-q 1 2) (make-q 1 2)))
(define r7 (chaperone-of?/impersonator (make-q2 1 2 3) (make-q2 1 2 3)))
""")
r1 = m.defs[W_Symbol.make("r1")]
r2 = m.defs[W_Symbol.make("r2")]
r3 = m.defs[W_Symbol.make("r3")]
r4 = m.defs[W_Symbol.make("r4")]
r5 = m.defs[W_Symbol.make("r5")]
r6 = m.defs[W_Symbol.make("r6")]
r7 = m.defs[W_Symbol.make("r7")]
assert r1 is w_false
assert r2 is w_false
assert r3 is w_true
assert r4 is w_false
assert r5 is w_true
assert r6 is w_false
assert r7 is w_false
def test_chaperone_procedure_application_mark():
m = run_mod(
"""
#lang racket/base
(define saved '())
(define (f x)
(call-with-immediate-continuation-mark
'z
(lambda (val)
(list val
(continuation-mark-set->list (current-continuation-marks) 'z)))))
(define g (chaperone-procedure
f
(lambda (a)
(set! saved (cons (continuation-mark-set-first #f 'z)
saved))
(values (lambda (r) r)
a))
impersonator-prop:application-mark
(cons 'z 12)))
(define h (chaperone-procedure
g
(lambda (a)
(values (lambda (r) r)
a))
impersonator-prop:application-mark
(cons 'z 9)))
(define i (chaperone-procedure
f
(lambda (a)
(set! saved (cons (continuation-mark-set-first #f 'z)
saved))
a)
impersonator-prop:application-mark
(cons 'z 11)))
(define j (chaperone-procedure
i
(lambda (a) a)
impersonator-prop:application-mark
(cons 'z 12)))
(define valid1 (equal? (g 10) '(12 (12))))
(define valid2 (equal? (h 10) '(12 (12 9))))
(define valid3 (equal? (i 10) '(11 (11))))
(define valid4 (equal? (j 10) '(11 (11))))
(define valid5 (equal? saved '(12 #f 9 #f)))
""")
valid1 = m.defs[W_Symbol.make("valid1")]
valid2 = m.defs[W_Symbol.make("valid2")]
valid3 = m.defs[W_Symbol.make("valid3")]
valid4 = m.defs[W_Symbol.make("valid4")]
valid5 = m.defs[W_Symbol.make("valid5")]
assert valid1 is w_true
assert valid2 is w_true
assert valid3 is w_true
assert valid4 is w_true
assert valid5 is w_true
def test_application_mark_propagation():
m = run_mod(
u"""
#lang racket/base
(define msgs '())
(define f
(chaperone-procedure
(λ (x) 'wrong)
(λ (x)
(call-with-immediate-continuation-mark
'key
(λ (m)
(set! msgs (cons m msgs))
(values x))))
impersonator-prop:application-mark
(cons 'key 'skip-this-check)))
(void ((chaperone-procedure f (lambda (x) x)) 42)
(f 42))
(define r (equal? msgs '(#f #f)))
""")
r = m.defs[W_Symbol.make("r")]
assert r is w_true
def test_chaperone_vector_to_immutable_vector(doctest):
u"""
! (define v (vector 1 2 3 4 5))
! (define cell '())
! (define v^ (chaperone-vector v (λ (self i val) (set! cell (append cell (list i))) val) (λ (self i val) val)))
> (vector->immutable-vector v^)
#(1 2 3 4 5)
> cell
'(0 1 2 3 4)
"""
def test_rfindler_impersonator_examples(doctest):
ur"""
! (require racket/base)
! (define (add15 x) (+ x 15))
! (define store '())
! (define (clear) (let ([v store]) (begin (set! store '()) v)))
! (define (printf^ fmt . args) (set! store (append store (list (apply format fmt args)))))
! (define add15+print (impersonate-procedure add15 (λ (x) (printf^ "called with ~s" x) (values (λ (res) (printf^ "returned ~s" res) res) x))))
! (define-values (imp-prop:p1 imp-prop:p1? imp-prop:p1-get) (make-impersonator-property 'imp-prop:p1))
! (define-values (imp-prop:p2 imp-prop:p2? imp-prop:p2-get) (make-impersonator-property 'imp-prop:p2))
! (define add15.2 (impersonate-procedure add15 #f imp-prop:p1 11))
! (define add15.3 (impersonate-procedure add15.2 #f imp-prop:p2 13))
! (define add15.4 (impersonate-procedure add15.3 #f imp-prop:p1 101))
> (add15 27)
42
> (add15+print 27)
42
> (clear)
'("called with 27" "returned 42")
> (add15.2 2)
17
> (imp-prop:p1? add15.2)
#t
> (imp-prop:p1-get add15.2)
11
> (imp-prop:p2? add15.2)
#f
> (add15.3 3)
18
> (imp-prop:p1? add15.3)
#t
> (imp-prop:p1-get add15.3)
11
> (imp-prop:p2? add15.3)
#t
> (imp-prop:p2-get add15.3)
13
> (add15.4 4)
19
> (imp-prop:p1? add15.4)
#t
> (imp-prop:p1-get add15.4)
101
> (imp-prop:p2? add15.4)
#t
> (imp-prop:p2-get add15.4)
13
"""
def test_impersonate_procedure_callable_struct():
m = run_mod(
"""
#lang racket/kernel
(#%require racket/private/define
racket/private/small-scheme
racket/private/more-scheme)
(define-values (struct:keyword-procedure mk-kw-proc keyword-procedure?
keyword-procedure-ref keyword-procedure-set!)
(make-struct-type 'keyword-procedure #f 4 0 #f
(list (cons prop:checked-procedure #t))
(current-inspector)
#f
'(0 1 2 3)))
(define-values (struct:okp make-optional-keyword-procedure okp? okp-ref okp-set!)
(make-struct-type 'procedure
struct:keyword-procedure
1 0 #f
(list)
(current-inspector) 0))
(define v5 (make-optional-keyword-procedure #f #f null '(#:x) #f))
(define cv2 (chaperone-procedure v5 void))
(define result (keyword-procedure? cv2))
""")
assert m.defs[W_Symbol.make("result")] is w_true
def test_impersonate_procedure_callable_struct2(doctest):
u"""
! (require racket/private/kw)
! (struct annotated-proc (base note) #:property prop:procedure (struct-field-index base))
! (define example (annotated-proc (λ (x) x) "The identity function"))
! (define imp (impersonate-procedure example add1))
! (define imp2 (chaperone-struct example annotated-proc-note (λ (self x) x)))
> (imp 5)
6
> (annotated-proc-note example)
"The identity function"
> (annotated-proc-note imp)
"The identity function"
> (eq? (annotated-proc-note example) (annotated-proc-note imp))
#t
> (eq? (annotated-proc-note example) (annotated-proc-note imp2))
#t
"""
| mit | 2493efe3d6212f16fe9f6bc38fbfcc86 | 33.887427 | 146 | 0.568747 | 3.036005 | false | false | false | false |
pycket/pycket | pycket/values_parameter.py | 1 | 7530 |
from pycket import values
from pycket.arity import Arity
from pycket.base import W_Object
from pycket.cont import call_cont, continuation, BaseCont
from pycket.error import SchemeException
from pycket.hash.persistent_hash_map import make_persistent_hash_type
from rpython.rlib import jit, objectmodel
from rpython.rlib.rarithmetic import r_uint
@objectmodel.always_inline
def compute_hash(x):
assert objectmodel.we_are_translated() or type(x) is ParamKey
return r_uint(objectmodel.compute_hash(x))
@objectmodel.always_inline
def equal(a, b):
assert objectmodel.we_are_translated() or type(a) is ParamKey
assert objectmodel.we_are_translated() or type(b) is ParamKey
return a is b
ParameterizationHashTable = make_persistent_hash_type(
super = W_Object,
name = "ParameterizationHashTable",
hashfun = compute_hash,
equal = equal)
# This is a Scheme_Parameterization in Racket
class RootParameterization(object):
_attrs_ = ["table"]
def __init__(self):
# This table maps ParamKey -> W_ThreadCell
self.table = {}
@continuation
def extend_cont(paramz, i, eventual_vals, vals, params, env, cont, _vals):
from pycket.interpreter import check_one_val, return_value
new_val = check_one_val(_vals)
eventual_vals[i] = new_val
i += 1
if i == len(vals):
new_paramz = paramz._extend(params, vals)
return return_value(new_paramz, env, cont)
else:
return step_extend(paramz, i, eventual_vals, vals, params, env, cont)
def step_extend(paramz, i, eventual_vals, vals, params, env, cont):
from pycket.interpreter import check_one_val, return_value
this_param = params[i]
assert isinstance(this_param, W_BaseParameter)
new_cont = extend_cont(paramz, i, eventual_vals, vals, params, env, cont)
guard = this_param.guard
if guard is None:
return return_value(vals[i], env, new_cont)
else:
return guard.call([vals[i]], env, new_cont)
# This is a Scheme_Config in Racket
# Except that Scheme_Config uses a functional hash table and this uses a list that we copy
class W_Parameterization(W_Object):
_immutable_fields_ = ["root", "map"]
_attrs_ = ["root", "map"]
errorname = "parameterization"
def __init__(self, root, map):
self.root = root
self.map = map
@jit.look_inside_iff(lambda self, params, vals: \
jit.loop_unrolling_heuristic(params, len(params), values.UNROLLING_CUTOFF) and
jit.loop_unrolling_heuristic(vals, len(vals), values.UNROLLING_CUTOFF))
def _extend(self, params, vals):
assert len(params) == len(vals)
map = self.map
for i, param in enumerate(params):
cell = values.W_ThreadCell(vals[i], True)
map = map.assoc(param.get_key(), cell)
return W_Parameterization(self.root, map)
def extend(self, params, vals, env, cont):
from pycket.interpreter import check_one_val, return_value
assert len(params) == len(vals)
if len(params) == 0:
return return_value(self, env, cont)
eventual_vals = [None] * len(vals)
return step_extend(self, 0, eventual_vals, vals, params, env, cont)
@jit.elidable
def get(self, param):
key = param.key
result = self.map.val_at(key, None)
if result is not None:
return result
result = self.root.table[key]
assert result is not None
return result
def tostring(self):
return "#<parameterization>"
# This will need to be thread-specific
top_level_config = W_Parameterization(RootParameterization(), ParameterizationHashTable.EMPTY())
def find_param_cell(cont, param):
assert isinstance(cont, BaseCont)
p = cont.get_mark_first(values.parameterization_key)
assert isinstance(p, W_Parameterization)
assert isinstance(param, W_Parameter)
v = p.get(param)
assert isinstance(v, values.W_ThreadCell)
return v
@continuation
def param_set_cont(cell, env, cont, vals):
from pycket.interpreter import check_one_val, return_value
v = check_one_val(vals)
cell.set(v)
return return_value(values.w_void, env, cont)
# a token
# Must be a subclass of W_Object to fit into immutable hash tables
class ParamKey(W_Object):
_attrs_ = []
def __init__(self):
pass
OBJ_NAME = "parameter-procedure"
class W_BaseParameter(W_Object):
errorname = "parameter"
_attrs_ = _immutable_fields_ = ["guard", "name"]
ARITY = Arity.oneof(0, 1)
def __init__(self, guard=None, name=OBJ_NAME):
self.name = name
self.guard = None if guard is values.w_false else guard
def iscallable(self):
return True
def get_key(self):
raise NotImplementedError("abstract base class")
def get_arity(self, promote=False):
return W_BaseParameter.ARITY
def tostring(self):
return "#<procedure:%s>" % self.name
class W_Parameter(W_BaseParameter):
_immutable_fields_ = ["key"]
_attrs_ = ["key"]
def __init__(self, val, guard=None, name=OBJ_NAME):
W_BaseParameter.__init__(self, guard, name)
self.key = ParamKey()
cell = values.W_ThreadCell(val, True)
top_level_config.root.table[self.key] = cell
def get(self, cont):
return self.get_cell(cont).get()
def get_cell_value(self, cont):
# same with the get right above.
# To be called internally without confusing rpython
# (other classes have different get methods as well)
return self.get_cell(cont).get()
def get_cell(self, cont):
cell = find_param_cell(cont, self)
assert isinstance(cell, values.W_ThreadCell)
return cell
def get_key(self):
return self.key
def call(self, args, env, cont):
from pycket.interpreter import return_value
if len(args) == 0:
return return_value(self.get(cont), env, cont)
elif len(args) == 1:
cell = find_param_cell(cont, self)
assert isinstance(cell, values.W_ThreadCell)
if self.guard:
return self.guard.call([args[0]], env, param_set_cont(cell, env, cont))
else:
cell.set(args[0])
return return_value(values.w_void, env, cont)
else:
raise SchemeException("wrong number of arguments to parameter")
class W_DerivedParameter(W_BaseParameter):
_immutable_fields_ = ["parameter", "wrap"]
_attrs_ = ["parameter", "wrap"]
def __init__(self, param, guard, wrap):
W_BaseParameter.__init__(self, guard)
self.parameter = param
self.wrap = wrap
def get(self, cont):
return self.parameter.get(cont)
def get_cell(self, cont):
return self.parameter.get_cell(cont)
def get_key(self):
return self.parameter.get_key()
def call(self, args, env, cont):
from pycket.interpreter import return_value
if len(args) == 0:
return self.parameter.call(args, env, call_cont(self.wrap, env, cont))
elif len(args) == 1:
if self.guard:
return self.guard.call(args, env,
call_cont(self.parameter, env, cont))
return self.parameter.call(args, env, cont)
else:
raise SchemeException("wrong number of arguments to parameter")
| mit | 87d4d46d7412e91b7ffe254d536e446c | 32.918919 | 96 | 0.62656 | 3.594272 | false | false | false | false |
pycket/pycket | pycket/hash/equal.py | 2 | 19602 |
from pycket import config
from pycket import values, values_string
from pycket.base import SingletonMeta, UnhashableType
from pycket.hash.base import W_HashTable, get_dict_item, next_valid_index, w_missing
from pycket.error import SchemeException
from pycket.cont import continuation, loop_label
from rpython.rlib import rerased, jit
from rpython.rlib.rarithmetic import r_uint, intmask
from rpython.rlib.objectmodel import compute_hash, import_from_mixin, r_dict, specialize
import sys
def elidable_iff(pred):
def wrapper(func):
@jit.elidable
def elidable(*args):
return func(*args)
def inner(*args):
if jit.we_are_jitted() and pred(*args):
return elidable(*args)
return func(*args)
return inner
return wrapper
@loop_label
def equal_hash_ref_loop(data, idx, key, env, cont):
from pycket.interpreter import return_value
from pycket.prims.equal import equal_func_unroll_n, EqualInfo
if idx >= len(data):
return return_value(w_missing, env, cont)
k, v = data[idx]
info = EqualInfo.BASIC_SINGLETON
cont = catch_ref_is_equal_cont(data, idx, key, v, env, cont)
return equal_func_unroll_n(k, key, info, env, cont, 5)
@continuation
def catch_ref_is_equal_cont(data, idx, key, v, env, cont, _vals):
from pycket.interpreter import check_one_val, return_value
val = check_one_val(_vals)
if val is not values.w_false:
return return_value(v, env, cont)
return equal_hash_ref_loop(data, idx + 1, key, env, cont)
def equal_hash_set_loop(data, idx, key, val, env, cont):
from pycket.interpreter import check_one_val, return_value
from pycket.prims.equal import equal_func, EqualInfo
if idx >= len(data):
data.append((key, val))
return return_value(values.w_void, env, cont)
k, _ = data[idx]
info = EqualInfo.BASIC_SINGLETON
return equal_func(k, key, info, env,
catch_set_is_equal_cont(data, idx, key, val, env, cont))
@continuation
def catch_set_is_equal_cont(data, idx, key, val, env, cont, _vals):
from pycket.interpreter import check_one_val, return_value
cmp = check_one_val(_vals)
if cmp is not values.w_false:
data[idx] = (key, val)
return return_value(values.w_void, env, cont)
return equal_hash_set_loop(data, idx + 1, key, val, env, cont)
class HashmapStrategy(object):
__metaclass__ = SingletonMeta
def get(self, w_dict, w_key, env, cont):
raise NotImplementedError("abstract base class")
def set(self, w_dict, w_key, w_val, env, cont):
raise NotImplementedError("abstract base class")
def rem(self, w_dict, w_key, env, cont):
raise NotImplementedError("abstract base class")
def rem_inplace(self, w_dict, w_key, env, cont):
raise NotImplementedError("abstract base class")
def items(self, w_dict):
raise NotImplementedError("abstract base class")
def get_item(self, w_dict, i):
raise NotImplementedError("abstract base class")
def hash_iterate_next(self, w_dict, i):
index = i.value
if index >= self.length(w_dict) - 1:
return values.w_false
return values.wrap(index + 1)
def hash_iterate_first(self, w_dict):
return 0
def length(self, w_dict):
raise NotImplementedError("abstract base class")
def create_storage(self, keys, vals):
raise NotImplementedError("abstract base class")
@jit.look_inside_iff(lambda keys:
jit.loop_unrolling_heuristic(
keys, len(keys), values.UNROLLING_CUTOFF))
def _find_strategy_class(keys):
if not config.strategies:
return ObjectHashmapStrategy.singleton
if len(keys) == 0:
return EmptyHashmapStrategy.singleton
# An empty vector stays empty forever. Don't implement special EmptyVectorStrategy.
single_class = type(keys[0])
for elem in keys:
if not isinstance(elem, single_class):
return ObjectHashmapStrategy.singleton
if single_class is values.W_Fixnum:
return FixnumHashmapStrategy.singleton
if single_class is values.W_Symbol:
return SymbolHashmapStrategy.singleton
if single_class is values_string.W_String:
return StringHashmapStrategy.singleton
if single_class is values.W_ImmutableBytes:
return ImmutableByteHashmapStrategy.singleton
if single_class is values.W_MutableBytes:
return MutableByteHashmapStrategy.singleton
return ObjectHashmapStrategy.singleton
class UnwrappedHashmapStrategyMixin(object):
# the concrete class needs to implement:
# erase, unerase, is_correct_type, wrap, unwrap
# create_storage needs to be overwritten if an r_dict is needed
@staticmethod
@elidable_iff(
lambda w_dict: jit.isconstant(w_dict) and w_dict.is_immutable)
def get_hstorage(w_dict):
return w_dict.hstorage
def get_storage(self, w_dict):
return self.unerase(self.get_hstorage(w_dict))
def get(self, w_dict, w_key, env, cont):
from pycket.interpreter import return_value
if self.is_correct_type(w_key):
storage = self.get_storage(w_dict)
w_res = storage.get(self.unwrap(w_key), w_missing)
return return_value(w_res, env, cont)
# XXX should not dehomogenize always
self.switch_to_object_strategy(w_dict)
return w_dict.hash_ref(w_key, env, cont)
def set(self, w_dict, w_key, w_val, env, cont):
from pycket.interpreter import return_value
if self.is_correct_type(w_key):
storage = self.get_storage(w_dict)
storage[self.unwrap(w_key)] = w_val
return return_value(values.w_void, env, cont)
self.switch_to_object_strategy(w_dict)
return w_dict.hash_set(w_key, w_val, env, cont)
def _set(self, w_dict, w_key, w_val):
if not self.is_correct_type(w_key):
raise KeyError
storage = self.unerase(w_dict.hstorage)
key = self.unwrap(w_key)
storage[key] = w_val
def rem_inplace(self, w_dict, w_key, env, cont):
from pycket.interpreter import return_value
if not self.is_correct_type(w_key):
raise KeyError
storage = self.unerase(w_dict.hstorage)
key = self.unwrap(w_key)
if key in storage:
del storage[key]
return return_value(values.w_void, env, cont)
def items(self, w_dict):
return [(self.wrap(key), w_val) for key, w_val in self.unerase(w_dict.hstorage).iteritems()]
def get_item(self, w_dict, i):
key, w_val = get_dict_item(self.unerase(w_dict.hstorage), i)
return self.wrap(key), w_val
def length(self, w_dict):
return len(self.unerase(w_dict.hstorage))
def create_storage(self, keys, vals):
d = self._create_empty_dict()
if not keys:
return self.erase(d)
for i, w_key in enumerate(keys):
d[self.unwrap(w_key)] = vals[i]
return self.erase(d)
def _create_empty_dict(self):
return {}
def switch_to_object_strategy(self, w_dict):
d = self.unerase(w_dict.hstorage)
keys = [self.wrap(key) for key in d.keys()]
values = d.values()
strategy = ObjectHashmapStrategy.singleton
storage = strategy.create_storage(keys, values)
w_dict.strategy = strategy
w_dict.hstorage = storage
class EmptyHashmapStrategy(HashmapStrategy):
erase, unerase = rerased.new_static_erasing_pair("object-hashmap-strategy")
def get(self, w_dict, w_key, env, cont):
from pycket.interpreter import return_value
return return_value(w_missing, env, cont) # contains nothing
def set(self, w_dict, w_key, w_val, env, cont):
self.switch_to_correct_strategy(w_dict, w_key)
return w_dict.hash_set(w_key, w_val, env, cont)
def rem(self, w_dict, w_key, env, cont):
from pycket.interpreter import return_value
return return_value(w_dict, env, cont) # there's nothing to remove
def _set(self, w_dict, w_key, w_val):
self.switch_to_correct_strategy(w_dict, w_key)
return w_dict._set(w_key, w_val)
def rem_inplace(self, w_dict, w_key, env, cont):
from pycket.interpreter import return_value
return return_value(values.w_void, env, cont) # there's nothing to remove
def items(self, w_dict):
return []
def get_item(self, w_dict, i):
raise IndexError
def length(self, w_dict):
return 0
def create_storage(self, keys, vals):
assert not keys
assert not vals
return self.erase(None)
def switch_to_correct_strategy(self, w_dict, w_key):
if type(w_key) is values.W_Fixnum:
strategy = FixnumHashmapStrategy.singleton
elif type(w_key) is values.W_Symbol:
strategy = SymbolHashmapStrategy.singleton
elif isinstance(w_key, values_string.W_String):
strategy = StringHashmapStrategy.singleton
elif isinstance(w_key, values.W_ImmutableBytes):
strategy = ImmutableByteHashmapStrategy.singleton
elif isinstance(w_key, values.W_MutableBytes):
strategy = MutableByteHashmapStrategy.singleton
else:
strategy = ObjectHashmapStrategy.singleton
storage = strategy.create_storage([], [])
w_dict.strategy = strategy
w_dict.hstorage = storage
UNHASHABLE_TAG = 0b0001
def tagged_hash(w_object):
try:
return w_object.hash_equal() << 1
except UnhashableType:
return UNHASHABLE_TAG
class ObjectHashmapStrategy(HashmapStrategy):
erase, unerase = rerased.new_static_erasing_pair("object-hashmap-strategy")
import_from_mixin(UnwrappedHashmapStrategyMixin)
def get_bucket(self, w_dict, w_key, nonull=False):
hash = tagged_hash(w_key)
storage = self.get_storage(w_dict)
bucket = storage.get(hash, None)
if nonull and bucket is None:
storage[hash] = bucket = []
return bucket
def get(self, w_dict, w_key, env, cont):
from pycket.interpreter import return_value
bucket = self.get_bucket(w_dict, w_key)
if not bucket:
return return_value(w_missing, env, cont)
return equal_hash_ref_loop(bucket, 0, w_key, env, cont)
def set(self, w_dict, w_key, w_val, env, cont):
bucket = self.get_bucket(w_dict, w_key, nonull=True)
return equal_hash_set_loop(bucket, 0, w_key, w_val, env, cont)
def rem_inplace(self, w_dict, w_key, env, cont):
raise NotImplementedError("hash-remove! not supported for ObjectHashmapStrategy")
def rem(self, w_dict, w_key, env, cont):
from pycket.interpreter import return_value
if not w_dict.immutable():
raise SchemeException("Expected an immutable hash table")
new_keys = []
new_vals = []
for (k, v) in w_dict.hash_items():
if k is w_key:
continue
new_keys.append(k)
new_vals.append(v)
assert isinstance(w_dict, W_EqualHashTable)
new_table = W_EqualHashTable(new_keys, new_vals, True)
return return_value(new_table, env, cont)
def _set(self, w_dict, w_key, w_val):
raise NotImplementedError("Unsafe set not supported for ObjectHashmapStrategy")
def items(self, w_dict):
items = []
storage = self.unerase(w_dict.hstorage)
for bucket in storage.itervalues():
for item in bucket:
items.append(item)
return items
if sys.maxint == 2147483647:
def get_item(self, w_dict, i):
storage = self.unerase(w_dict.hstorage)
for bucket in storage.itervalues():
size = len(bucket)
if size > i:
return bucket[i]
i -= size
raise IndexError
else:
@staticmethod
def _valid_bucket(v):
return bool(v[1])
def get_item(self, w_dict, i):
from pycket.hash.persistent_hash_map import MASK_32
storage = self.unerase(w_dict.hstorage)
assert i >= 0
i = r_uint(i)
index = i & MASK_32
subindex = (i >> 32) & MASK_32
bucket = get_dict_item(storage, index)[1]
if bucket is None:
raise IndexError
return bucket[subindex]
def hash_iterate_next(self, w_dict, pos):
from pycket.hash.persistent_hash_map import MASK_32
storage = self.unerase(w_dict.hstorage)
i = r_uint(pos.value)
assert i >= 0
index = r_uint(i & MASK_32)
subindex = r_uint((i >> 32) & MASK_32)
bucket = get_dict_item(storage, index)[1]
subindex += 1
if subindex == r_uint(len(bucket)):
subindex = r_uint(0)
try:
next = next_valid_index(storage, intmask(index),
valid=self._valid_bucket)
except IndexError:
return values.w_false
index = r_uint(next)
next = intmask((subindex << r_uint(32)) | index)
return values.wrap(next)
def hash_iterate_first(self, w_dict):
return next_valid_index(w_dict, 0, valid=self._valid_bucket)
def length(self, w_dict):
storage = self.unerase(w_dict.hstorage)
size = 0
for bucket in storage.itervalues():
size += len(bucket)
return size
def create_storage(self, keys, vals):
storage = {}
for i, key in enumerate(keys):
val = vals[i]
hash = tagged_hash(key)
bucket = storage.get(hash, None)
if bucket is None:
storage[hash] = bucket = []
bucket.append((key, val))
return self.erase(storage)
class FixnumHashmapStrategy(HashmapStrategy):
import_from_mixin(UnwrappedHashmapStrategyMixin)
erase, unerase = rerased.new_static_erasing_pair("fixnum-hashmap-strategy")
def is_correct_type(self, w_obj):
return isinstance(w_obj, values.W_Fixnum)
def wrap(self, val):
assert isinstance(val, int)
return values.W_Fixnum(val)
def unwrap(self, w_val):
assert isinstance(w_val, values.W_Fixnum)
return w_val.value
class SymbolHashmapStrategy(HashmapStrategy):
import_from_mixin(UnwrappedHashmapStrategyMixin)
erase, unerase = rerased.new_static_erasing_pair("symbol-hashmap-strategy")
def is_correct_type(self, w_obj):
return isinstance(w_obj, values.W_Symbol)
def wrap(self, val):
assert isinstance(val, values.W_Symbol)
return val
def unwrap(self, w_val):
assert isinstance(w_val, values.W_Symbol)
return w_val
def rem(self, w_dict, w_key, env, cont):
from pycket.interpreter import return_value
if not w_dict.immutable():
raise Exception("Expected an immutable hash table")
new_keys = []
new_vals = []
for (k, v) in w_dict.hash_items():
if k is w_key:
continue
new_keys.append(k)
new_vals.append(v)
assert isinstance(w_dict, W_EqualHashTable)
new_table = W_EqualHashTable(new_keys, new_vals, True)
return return_value(new_table, env, cont)
def hash_strings(w_b):
assert isinstance(w_b, values_string.W_String)
return w_b.hash_equal()
def cmp_strings(w_a, w_b):
assert isinstance(w_a, values_string.W_String)
assert isinstance(w_b, values_string.W_String)
return w_a.equal(w_b)
class StringHashmapStrategy(HashmapStrategy):
import_from_mixin(UnwrappedHashmapStrategyMixin)
erase, unerase = rerased.new_static_erasing_pair("string-hashmap-strategy")
def is_correct_type(self, w_obj):
return isinstance(w_obj, values_string.W_String)
def wrap(self, w_val):
return w_val
def unwrap(self, w_val):
return w_val
def _create_empty_dict(self):
return r_dict(cmp_strings, hash_strings)
def hash_mutable_bytes(w_b):
assert isinstance(w_b, values.W_MutableBytes)
return w_b.hash_equal()
def hash_immutable_bytes(w_b):
assert isinstance(w_b, values.W_ImmutableBytes)
return w_b.hash_equal()
def cmp_mutable_bytes(w_a, w_b):
assert isinstance(w_a, values.W_MutableBytes)
assert isinstance(w_b, values.W_MutableBytes)
return w_a.value == w_b.value
def cmp_immutable_bytes(w_a, w_b):
assert isinstance(w_a, values.W_ImmutableBytes)
assert isinstance(w_b, values.W_ImmutableBytes)
return w_a.value == w_b.value
class MutableByteHashmapStrategy(HashmapStrategy):
import_from_mixin(UnwrappedHashmapStrategyMixin)
erase, unerase = rerased.new_static_erasing_pair("byte-hashmap-strategy")
def is_correct_type(self, w_obj):
return isinstance(w_obj, values.W_MutableBytes)
def wrap(self, val):
return val
def unwrap(self, w_val):
assert isinstance(w_val, values.W_MutableBytes)
return w_val
def _create_empty_dict(self):
return r_dict(cmp_mutable_bytes, hash_mutable_bytes)
class ImmutableByteHashmapStrategy(HashmapStrategy):
import_from_mixin(UnwrappedHashmapStrategyMixin)
erase, unerase = rerased.new_static_erasing_pair("byte-hashmap-strategy")
def is_correct_type(self, w_obj):
return isinstance(w_obj, values.W_ImmutableBytes)
def wrap(self, val):
return val
def unwrap(self, w_val):
assert isinstance(w_val, values.W_ImmutableBytes)
return w_val
def _create_empty_dict(self):
return r_dict(cmp_immutable_bytes, hash_immutable_bytes)
class W_EqualHashTable(W_HashTable):
_attrs_ = ['strategy', 'hstorage', 'is_immutable']
_immutable_fields_ = ['is_immutable']
def __init__(self, keys, vals, immutable=False):
self.is_immutable = immutable
self.strategy = _find_strategy_class(keys)
self.hstorage = self.strategy.create_storage(keys, vals)
def immutable(self):
return self.is_immutable
def hash_items(self):
return self.strategy.items(self)
def _set(self, key, val):
return self.strategy._set(self, key, val)
def hash_set(self, key, val, env, cont):
return self.strategy.set(self, key, val, env, cont)
def hash_equal(self, info=None):
return self.length()
def hash_ref(self, key, env, cont):
return self.strategy.get(self, key, env, cont)
def hash_remove(self, key, env, cont):
return self.strategy.rem(self, key, env, cont)
def hash_remove_inplace(self, key, env, cont):
return self.strategy.rem_inplace(self, key, env, cont)
def get_item(self, i):
return self.strategy.get_item(self, i)
def hash_iterate_next(self, pos):
return self.strategy.hash_iterate_next(self, pos)
def hash_iterate_first(self):
return self.strategy.hash_iterate_first(self)
def length(self):
return self.strategy.length(self)
def make_empty(self):
return W_EqualHashTable([], [], immutable=self.is_immutable)
def tostring(self):
lst = [values.W_Cons.make(k, v).tostring() for k, v in self.hash_items()]
return "#hash(%s)" % " ".join(lst)
| mit | 2d6ccdf6292434a5e34c8e209e945d17 | 32.97227 | 100 | 0.624477 | 3.531256 | false | false | false | false |
onecodex/onecodex | tests/test_dataframes.py | 2 | 3650 | import pytest
pytest.importorskip("pandas") # noqa
import pandas as pd
from onecodex.analyses import AnalysisMixin
from onecodex.dataframes import ClassificationsDataFrame, ClassificationsSeries, OneCodexAccessor
def test_pandas_subclass():
inner_df = pd.DataFrame({"datum1": [7, 4, 21], "datum2": [8, 16, 24]})
ocx_data = {
"ocx_rank": "jedi",
"ocx_metric": "bantha poodoo",
"ocx_taxonomy": inner_df.copy(),
"ocx_metadata": inner_df.copy(),
}
df = ClassificationsDataFrame({"1279": [1, 2, 3], "1280": [4, 5, 6]}, **ocx_data)
# we want to be sure that slices of our df are returned as ResultsSeries
assert type(df["1279"]) is ClassificationsSeries
# We're mostly interested in whether our metadata is transferred between copies. Here we're just
# testing that the metadata carries over between slicing. There's another test below that tests
# other mathematical operations. There is currently a bug upstream in pandas where subclass
# metadata doesn't carry through certain operations, but does on others.
new_df = df[["1279"]]
# rank is explicitly /not/ passed on, since we don't know what the user has done to the df and
# we therefore can't trust the rank to be correct
assert new_df.ocx_rank == "jedi"
assert new_df.ocx_metric == "bantha poodoo"
assert (new_df.ocx_taxonomy == inner_df).all().all()
assert (new_df.ocx_metadata == inner_df).all().all()
def test_pandas_extension(ocx, api_data):
samples = ocx.Samples.where(project="4b53797444f846c4")
samples._collate_results(metric="readcount_w_children")
results = samples.to_df()
# extension should be in ocx namespace of ClassificationsDataFrame
assert getattr(results, "ocx", False)
assert isinstance(results.ocx, OneCodexAccessor)
assert type(results.ocx).__base__ == AnalysisMixin
# changes to contents of results df should affect contents of taxonomy df, by keeping only
# tax_ids in the results df and their parents
results = samples.to_df(top_n=2, rank="genus")
assert sorted(results.ocx.taxonomy.index.tolist(), key=int) == [
"1",
"2",
"191",
"815",
"816",
"976",
"1224",
"28211",
"41295",
"68336",
"131567",
"171549",
"200643",
"204441",
"1783270",
]
# Current, there's a bug in pandas where these types of operations don't carryover
# the metadata on the subclass.
# https://github.com/pandas-dev/pandas/issues/34177
@pytest.mark.xfail
def test_pandas_subclass_math():
inner_df = pd.DataFrame({"datum1": [7, 4, 21], "datum2": [8, 16, 24]})
ocx_data = {
"ocx_rank": "jedi",
"ocx_metric": "bantha poodoo",
"ocx_taxonomy": inner_df.copy(),
"ocx_metadata": inner_df.copy(),
}
df = ClassificationsDataFrame({"1279": [1, 2, 3], "1280": [4, 5, 6]}, **ocx_data)
# we're mostly interested in whether our metadata is transferred between copies. some operations
# split the df into series and concat back to df, so by doing all this stuff to it we're actually
# testing several consecutive copy operations using different parts of the pandas API
new_df = (((df * 10) / 2.2).round(5)) ** 0.5
# rank is explicitly /not/ passed on, since we don't know what the user has done to the df and
# we therefore can't trust the rank to be correct
assert new_df.ocx_rank == "jedi"
assert new_df.ocx_metric == "bantha poodoo"
assert (new_df.ocx_taxonomy == inner_df).all().all()
assert (new_df.ocx_metadata == inner_df).all().all()
| mit | 4b2f8518150c8aff4303e7aa88df5e40 | 36.628866 | 101 | 0.650137 | 3.456439 | false | true | false | false |
pycket/pycket | pycket/values_struct.py | 1 | 55181 | import itertools, sys
from pycket import config
from pycket import values
from pycket import vector as values_vector
from pycket.arity import Arity
from pycket.base import SingleResultMixin, UnhashableType
from pycket.cont import continuation, label
from pycket.error import SchemeException
from pycket.prims.expose import default, make_call_method
from pycket.small_list import inline_small_list
from pycket.util import strip_immutable_field_name
from pycket.values_parameter import W_Parameter
from rpython.rlib import jit
from rpython.rlib.objectmodel import import_from_mixin, not_rpython
from rpython.rlib.unroll import unrolling_iterable
w_prefab_symbol = values.W_Symbol.make("prefab")
class W_StructInspector(values.W_Object):
errorname = "struct-inspector"
_immutable_fields_ = ["w_super"]
_attrs_ = ["w_super"]
@staticmethod
def make(w_inspector, issibling = False):
assert isinstance(w_inspector, W_StructInspector)
w_super = w_inspector
if issibling:
w_super = w_inspector.w_super if w_inspector is not None else None
return W_StructInspector(w_super)
def __init__(self, w_super):
self.w_super = w_super
@jit.elidable
def has_control(self, struct_type):
w_inspector = struct_type.w_inspector
if not isinstance(w_inspector, W_StructInspector):
return True
else:
w_inspector = w_inspector.w_super
while isinstance(w_inspector, W_StructInspector):
if w_inspector is self:
return True
w_inspector = w_inspector.w_super
return False
current_inspector = W_StructInspector(None)
current_inspector_param = W_Parameter(current_inspector)
class W_StructType(values.W_Object):
errorname = "struct-type-descriptor"
_immutable_fields_ = [
"name", "constructor_name", "w_super",
"init_field_count", "auto_field_count", "total_field_count",
"total_auto_field_count", "total_init_field_count",
"w_auto_value", "properties", "w_inspector", "immutables[*]",
"immutable_fields[*]", "w_guard", "auto_values_w[*]", "offsets[*]",
"constructor", "predicate", "accessor", "mutator", "prop_procedure",
"constructor_arity", "procedure_source", "isprefab", "isopaque",
"prop_sealed"]
_attrs_ = map(strip_immutable_field_name, _immutable_fields_)
unbound_prefab_types = {}
@jit.unroll_safe
def __init__(self, w_name, w_super_type, init_field_count, auto_field_count,
w_auto_value, w_inspector, w_proc_spec, immutables, w_guard,
w_constructor_name):
self.name = w_name
self.constructor_name = w_constructor_name
self.w_super = w_super_type
self.init_field_count = init_field_count
self.total_init_field_count = init_field_count
self.auto_field_count = auto_field_count
self.total_auto_field_count = auto_field_count
self.total_field_count = init_field_count + auto_field_count
if isinstance(w_super_type, W_StructType):
self.total_field_count += w_super_type.total_field_count
self.total_init_field_count += w_super_type.total_init_field_count
self.total_auto_field_count += w_super_type.total_auto_field_count
self.w_auto_value = w_auto_value
self.properties = []
self.prop_procedure = None
self.prop_sealed = False
self.procedure_source = None
self.w_inspector = w_inspector
if isinstance(w_proc_spec, values.W_Fixnum):
immutables = [w_proc_spec.value] + immutables
self.immutables = immutables
self.w_guard = w_guard
self.auto_values_w = [self.w_auto_value] * self.auto_field_count
self.setup_prefab()
self._calculate_offsets()
self._generate_methods()
def setup_prefab(self):
self.isprefab = self.w_inspector is w_prefab_symbol
if self.isprefab:
self.isopaque = False
else:
self.isopaque = self.w_inspector is not values.w_false
@jit.unroll_safe
def _generate_methods(self):
""" Generate constructor, predicate, mutator, and accessor """
count = self.total_init_field_count
self.constructor_arity = Arity([count], -1)
self.constructor = W_StructConstructor(self)
self.predicate = W_StructPredicate(self)
self.accessor = W_StructAccessor(self)
self.mutator = W_StructMutator(self)
@jit.unroll_safe
def _calculate_offsets(self):
offsets = []
immutable_fields = [] # absolut indices
w_struct_type = self
while isinstance(w_struct_type, W_StructType):
offset = (w_struct_type.total_field_count -
w_struct_type.init_field_count -
w_struct_type.auto_field_count)
offsets.append((w_struct_type, offset))
for immutable_field in w_struct_type.immutables:
immutable_fields.append(immutable_field + offset)
w_struct_type = w_struct_type.w_super
self.offsets = offsets[:]
self.immutable_fields = immutable_fields[:]
@staticmethod
def make(w_name, w_super_type, init_field_count, auto_field_count,
w_auto_value=values.w_false, w_properties=values.w_null,
w_inspector=values.w_false, w_proc_spec=values.w_false,
immutables=[], w_guard=values.w_false,
w_constructor_name=values.w_false, env=None, cont=None):
"""
This method returns five instances:
W_StructType
W_StructConstructor
W_StructPredicate
W_StructAccessor
W_StructMutator
"""
w_struct_type = W_StructType.make_simple(
w_name=w_name,
w_super_type=w_super_type,
init_field_count=init_field_count,
auto_field_count=auto_field_count,
w_auto_value=w_auto_value,
w_inspector=w_inspector,
w_proc_spec=w_proc_spec,
immutables=immutables,
w_guard=w_guard,
w_constructor_name=w_constructor_name)
return w_struct_type.initialize_properties(w_properties, w_proc_spec, env, cont)
@staticmethod
@jit.elidable
def make_prefab(prefab_key):
if prefab_key in W_StructType.unbound_prefab_types:
return W_StructType.unbound_prefab_types[prefab_key]
if prefab_key.super_key:
w_super_type = W_StructType.make_prefab(prefab_key.super_key)
else:
w_super_type = values.w_false
immutables = [i for i in range(prefab_key.init_field_count) \
if i not in prefab_key.mutables]
w_struct_type = W_StructType(
w_name=prefab_key.w_name,
w_super_type=w_super_type,
init_field_count=prefab_key.init_field_count,
auto_field_count=prefab_key.auto_field_count,
w_auto_value=prefab_key.w_auto_value,
w_inspector=w_prefab_symbol,
w_proc_spec=values.w_false,
immutables=immutables,
w_guard=values.w_false,
w_constructor_name=values.w_false)
W_StructType.unbound_prefab_types[prefab_key] = w_struct_type
return w_struct_type
@staticmethod
def make_simple(w_name, w_super_type, init_field_count,
auto_field_count, w_auto_value=values.w_false,
w_inspector=values.w_false,
w_proc_spec=values.w_false, immutables=[],
w_guard=values.w_false, w_constructor_name=values.w_false):
"""
This method returns an instance of W_StructType only.
It does not support properties.
"""
w_struct_type = W_StructType(
w_name=w_name,
w_super_type=w_super_type,
init_field_count=init_field_count,
auto_field_count=auto_field_count,
w_auto_value=w_auto_value,
w_inspector=w_inspector,
w_proc_spec=w_proc_spec,
immutables=immutables,
w_guard=w_guard,
w_constructor_name=w_constructor_name)
if w_inspector is w_prefab_symbol:
prefab_key = W_PrefabKey.from_raw_params(w_name, init_field_count,
auto_field_count, w_auto_value, immutables, w_super_type)
if prefab_key in W_StructType.unbound_prefab_types:
return W_StructType.unbound_prefab_types[prefab_key]
W_StructType.unbound_prefab_types[prefab_key] = w_struct_type
return w_struct_type
@continuation
def save_property_value(self, properties, idx, is_checked, env, cont, _vals):
from pycket.interpreter import check_one_val
property = properties[idx][0]
property_val = check_one_val(_vals)
properties[idx] = (property, property_val, None)
return self.attach_property(properties, idx, is_checked, env, cont)
@label
def attach_property(self, properties, idx, is_checked, env, cont):
from pycket.interpreter import return_multi_vals
if idx < len(properties):
(property, property_val, sub_property) = properties[idx]
if sub_property is not None:
for p in properties:
if p[0] is sub_property:
return property_val.call([p[1]], env,
self.save_property_value(properties, idx, False, env, cont))
assert isinstance(property, W_StructProperty)
if not is_checked and property.w_guard.iscallable():
return property.w_guard.call([property_val, values.to_list(self.struct_type_info(cont))],
env, self.save_property_value(properties, idx, True, env, cont))
if property.isinstance(w_prop_procedure):
self.prop_procedure = property_val
if property.isinstance(w_prop_sealed):
# The value associated with the property is ignored; the
# presence of the property itself makes the structure
# type sealed.
self.prop_sealed = True
self.properties.append((property, property_val))
return self.attach_property(properties, idx + 1, False, env, cont)
# at this point all properties are saved, next step is to copy
# propertyes from super types
w_struct_type = self.w_super
while isinstance(w_struct_type, W_StructType):
self.properties = self.properties + w_struct_type.properties
if not self.prop_procedure and w_struct_type.prop_procedure:
self.prop_procedure = w_struct_type.prop_procedure
self.procedure_source = w_struct_type.procedure_source
w_struct_type = w_struct_type.w_super
struct_tuple = [self, self.constructor, self.predicate, self.accessor, self.mutator]
return return_multi_vals(values.Values.make(struct_tuple), env, cont)
@jit.unroll_safe
def initialize_property(self, properties, p, sub_property=None):
property = p.car()
property_val = p.cdr()
if sub_property is None:
if property.isinstance(w_prop_procedure):
if self.prop_procedure is not None and\
self.prop_procedure is not property_val:
raise SchemeException(
"make-struct-type: duplicate property binding\nproperty: %s" %
property.tostring())
self.prop_procedure = property_val
self.procedure_source = self
elif property.isinstance(w_prop_checked_procedure):
if self.total_field_count < 2:
raise SchemeException("need at least two fields in the structure type")
properties.append((property, property_val, sub_property))
assert isinstance(property, W_StructProperty)
for super_p in property.supers:
self.initialize_property(properties, super_p, property)
@jit.unroll_safe
def initialize_properties(self, properties, proc_spec, env, cont):
"""
Properties initialization contains few steps:
1. call initialize_property for each property from the input list,
it extracts all super values and stores them into properties array
with a flat structure
2. recursively call attach_property for each property from properties and
prepare the value:
* if the current property has a subproperty, the value is the result
of calling value procedure with a sub value as an argument
* if the current property has a guard, the value is the result of
calling guard with a value and struct type info as arguments
* in other case, just keep the current value
"""
proplist = values.from_list(properties)
properties = []
for p in proplist:
self.initialize_property(properties, p)
if proc_spec is not values.w_false:
self.initialize_property(properties, values.wrap(w_prop_procedure, proc_spec))
return self.attach_property(properties, 0, False, env, cont)
@jit.elidable
def get_offset(self, type):
for t, v in self.offsets:
if t is type:
return v
elif t.isprefab and type.isprefab:
## They might not be the same object but they
## might be the same prefab struct, so
## check if their fields are the same
if (t.name is type.name and
t.init_field_count is type.init_field_count and
t.total_init_field_count is type.total_init_field_count and
t.auto_field_count is type.auto_field_count and
t.total_auto_field_count is type.total_auto_field_count and
t.total_field_count is type.total_field_count and
t.w_auto_value is type.w_auto_value and
t.immutables == type.immutables and
t.auto_values_w == type.auto_values_w and
t.isopaque is type.isopaque and
t.immutable_fields == type.immutable_fields and
t.constructor_arity.get_arity_list() == type.constructor_arity.get_arity_list()
#t.constructor_name is type.constructor_name and
#t.super is type.super and
#t.props == type.props and
#t.prop_procedure is type.prop_procedure and
#t.procedure_source is type.procedure_source and
#t.inspector is type.inspector and
#t.guard is type.guard and
#t.isprefab is type.isprefab and
#t.offsets is type.offsets and
):
return v
return -1
@jit.elidable
def is_immutable_field_index(self, i):
return i in self.immutable_fields
def all_fields_immutable(self):
self = jit.promote(self)
return self.total_field_count == len(self.immutable_fields)
def struct_type_info(self, cont):
w_name = self.name
w_init_field_count = values.wrap(self.init_field_count)
w_auto_field_count = values.wrap(self.auto_field_count)
w_immutable_k_list = values.wrap_list(self.immutables)
current_inspector = current_inspector_param.get(cont)
w_super = values.w_false
w_struct_type = self.w_super
while isinstance(w_struct_type, W_StructType):
if current_inspector.has_control(w_struct_type):
w_super = w_struct_type
w_struct_type = w_struct_type.w_super
w_skipped = values.W_Bool.make(w_super is values.w_false and
isinstance(self.w_super, W_StructType))
return [w_name, w_init_field_count, w_auto_field_count, self.accessor,
self.mutator, w_immutable_k_list, w_super, w_skipped]
def make_struct_tuple(self):
return [self, self.constructor, self.predicate, self.accessor, self.mutator]
@jit.elidable_promote('all')
def read_property_precise(self, property):
for p, val in self.properties:
if p is property:
return val
return None
@jit.elidable_promote('all')
def read_property(self, property):
for p, val in self.properties:
if p.isinstance(property):
return val
return None
@jit.elidable
def all_opaque(self):
if not self.isopaque:
return False
elif isinstance(self.w_super, W_StructType):
return self.w_super.all_opaque()
return True
@jit.elidable
def is_transparent(self):
while self is not None and self is not values.w_false:
if self.get_inspector() is not values.w_false:
return False
self = self.get_super()
return True
def get_inspector(self):
return self.w_inspector
def get_super(self):
return self.w_super
@jit.elidable
def has_subtype(self, type):
while isinstance(type, W_StructType):
if type is self:
return True
type = type.w_super
return False
def hash_value(self):
pass
def tostring(self):
return "#<struct-type:%s>" % self.name.variable_name()
class W_PrefabKey(values.W_Object):
_attrs_ = _immutable_fields_ = ["w_name", "init_field_count", "auto_field_count",
"w_auto_value", "mutables", "super_key"]
all_keys = []
@staticmethod
@jit.elidable
def make(w_name, init_field_count, auto_field_count, w_auto_value, mutables, super_key):
assert isinstance(w_name, values.W_Symbol)
for key in W_PrefabKey.all_keys:
if key.equal_tuple((w_name, init_field_count, auto_field_count,
w_auto_value, mutables, super_key)):
return key
key = W_PrefabKey(w_name, init_field_count, auto_field_count,
w_auto_value, mutables, super_key)
W_PrefabKey.all_keys.append(key)
return key
@staticmethod
def from_struct_type(struct_type):
assert isinstance(struct_type, W_StructType)
w_name = struct_type.name
init_field_count = struct_type.init_field_count
auto_field_count = struct_type.auto_field_count
w_auto_value = struct_type.w_auto_value
super_key = None
mutables = []
prev_idx = 1
for i in struct_type.immutables:
for j in range(prev_idx, i):
mutables.append(j)
prev_idx = i + 1
if struct_type.w_super is not values.w_false:
super_key = W_PrefabKey.from_struct_type(struct_type.w_super)
return W_PrefabKey.make(w_name, init_field_count, auto_field_count,
w_auto_value, mutables, super_key)
@staticmethod
def from_raw_params(w_name, init_field_count, auto_field_count,
w_auto_value, immutables, super_type):
mutables = []
super_key = None
prev_idx = 1
for i in immutables:
for j in range(prev_idx, i):
mutables.append(j)
prev_idx = i + 1
if super_type is not values.w_false:
super_key = W_PrefabKey.from_struct_type(super_type)
return W_PrefabKey.make(w_name, init_field_count, auto_field_count,
w_auto_value, mutables, super_key)
@staticmethod
@jit.unroll_safe
def parse_key(w_key, total_count, is_super):
init_count = -1
auto_count = 0
w_auto_value = values.w_false
super_key = None
mutables = []
w_name = None
name_seen = init_seen = auto_seen = mutable_seen = False
if isinstance(w_key, values.W_Symbol):
w_name = w_key
init_count = total_count
w_key = values.w_null
name_seen = True
while w_key is not values.w_null:
w_val, w_rest = w_key.to_tuple()
if isinstance(w_val, values.W_Symbol):
if name_seen:
# super key
if init_seen:
super_total = total_count - init_count
else:
super_total = total_count
super_key = W_PrefabKey.from_raw_key(
w_key, super_total, True)
# there's nothing after a properly parsed super key
w_rest = values.w_null
else:
# prefab name
assert not (init_seen or auto_seen or mutable_seen)
w_name = w_val
name_seen = True
elif isinstance(w_val, values.W_Fixnum):
# init field count
assert name_seen and not init_seen
init_count = w_val.value
init_seen = True
elif isinstance(w_val, values.W_List):
# auto fields
assert name_seen and not auto_seen
if is_super: assert init_seen
w_auto_count, w_val = w_val.to_tuple()
w_auto_value, w_val = w_val.to_tuple()
assert isinstance(w_auto_count, values.W_Fixnum)
auto_count = w_auto_count.value
auto_seen = True
elif isinstance(w_val, values_vector.W_Vector):
# mutable fields
assert name_seen and not mutable_seen
if is_super: assert init_seen
for i in range(w_val.len):
mutable = w_val.ref(i)
assert isinstance(mutable, values.W_Fixnum)
mutables.append(mutable.value)
mutable_seen = True
w_key = w_rest
assert name_seen
if is_super: assert init_seen
return (w_name, init_count, auto_count,
w_auto_value, super_key, mutables)
@staticmethod
@jit.elidable
def from_raw_key(w_key, total_count=0, is_super=False):
w_name, \
init_count, \
auto_count, \
w_auto_value, \
super_key, \
mutables = W_PrefabKey.parse_key(w_key, total_count, is_super)
if init_count == -1:
init_count = total_count - auto_count
s_key = super_key
while s_key:
init_count -= (s_key.init_field_count + s_key.auto_field_count)
# TODO: originally this on shapes, but master has just that one:
# init_count -= s_key.init_field_count
s_key = s_key.super_key
return W_PrefabKey.make(w_name, init_count, auto_count,
w_auto_value, mutables, super_key)
@staticmethod
def is_prefab_key(v):
if isinstance(v, values.W_Symbol):
return values.w_true
elif isinstance(v, values.W_Cons):
key = values.from_list(v)
if not isinstance(key[0], values.W_Symbol):
return values.w_false
idx = 1
if isinstance(key[idx], values.W_Fixnum):
idx += 1
if len(key) > idx:
if isinstance(key[idx], values.W_Cons):
idx += 1
if len(key) > idx:
if isinstance(key[idx], values_vector.W_Vector):
idx += 1
if len(key) > idx:
w_super_key = values.to_list(key[idx:])
return W_PrefabKey.is_prefab_key(w_super_key)
return values.W_Bool.make(len(key) == idx)
else:
return values.w_false
def __init__(self, w_name, init_field_count, auto_field_count,
w_auto_value, mutables, super_key):
self.w_name = w_name
self.init_field_count = init_field_count
self.auto_field_count = auto_field_count
self.w_auto_value = w_auto_value
self.mutables = mutables
self.super_key = super_key
def equal(self, other):
if isinstance(other, W_PrefabKey):
return self.make_key_tuple() == other.make_key_tuple()
return False
def equal_tuple(self, other):
return self.make_key_tuple() == other
def key(self):
key = []
key.append(self.w_name)
key.append(values.wrap(self.init_field_count))
if self.auto_field_count > 0:
lst = values.wrap(self.w_auto_value, values.w_null)
lst = values.wrap(self.auto_field_count, lst)
key.append(lst)
if self.mutables:
vector = values_vector.wrap_vector(self.mutables)
key.append(vector)
if self.super_key:
key.extend(self.super_key.key())
return key
def short_key(self):
key = self.key()
short_key = key[:1] + key[2:]
return values.to_list(short_key) if len(short_key) > 1 else key[0]
def make_key_tuple(self):
return self.w_name, self.init_field_count, self.auto_field_count,\
self.w_auto_value, self.mutables, self.super_key
class W_RootStruct(values.W_Object):
errorname = "root-struct"
_attrs_ = []
_settled_ = True
def __init__(self):
raise NotImplementedError("abstract base class")
def iscallable(self):
return self.struct_type().prop_procedure is not None
@continuation
def arity_error_cont(self, env, cont, _vals):
from pycket.interpreter import check_one_val
msg = check_one_val(_vals)
raise SchemeException("expected: " + msg.tostring())
@continuation
def receive_proc_cont(self, args, app, env, cont, _vals):
from pycket.interpreter import check_one_val
proc = check_one_val(_vals)
return self.checked_call(proc, args, env, cont, app)
def checked_call(self, proc, args, env, cont, app):
args_len = len(args)
arity = proc.get_arity(promote=True)
if not arity.arity_includes(args_len):
w_property_val = self.struct_type().read_property(w_prop_arity_string)
if w_property_val:
return w_property_val.call_with_extra_info([self], env,
self.arity_error_cont(env, cont), app)
return proc.call_with_extra_info(args, env, cont, app)
def call_with_extra_info(self, args, env, cont, app):
type = self.struct_type()
proc = type.prop_procedure
if isinstance(proc, values.W_Fixnum):
return type.procedure_source.accessor.access(self, proc.value,
env, self.receive_proc_cont(args, app, env, cont), app)
args = [self] + args
return self.checked_call(proc, args, env, cont, app)
# For all subclasses, it should be sufficient to implement ref, set, and
# struct_type for call and iscallable to work properly.
@label
@make_call_method(simple=False)
def call(self, args, env, cont):
return self.call_with_extra_info(args, env, cont, None)
def get_arity(self, promote=False):
raise NotImplementedError("abstract base class")
def struct_type(self):
raise NotImplementedError("abstract base class")
def ref(self, field, env, cont):
return self.ref_with_extra_info(field, None, env, cont)
def ref_with_extra_info(self, field, app, env, cont):
raise NotImplementedError("abstract base class")
def set(self, field, val, env, cont):
return self.set_with_extra_info(field, val, None, env, cont)
def set_with_extra_info(self, field, val, app, env, cont):
raise NotImplementedError("abstract base class")
# unsafe versions
def _ref(self, k):
raise NotImplementedError("abstract base class")
def _set(self, k, val):
raise NotImplementedError("abstract base class")
@label
def get_prop(self, property, env, cont):
raise NotImplementedError("abstract base class")
def get_struct_info(self, env, cont):
raise NotImplementedError("abstract base class")
def vals(self):
raise NotImplementedError("abstract base class")
def get_hash_proc(self, prop):
if isinstance(prop, values.W_Cons):
return prop.cdr().car()
elif isinstance(prop, values_vector.W_Vector):
return prop.ref(1)
else:
raise SchemeException("unexpected property value for prop:equal+hash: %s"%prop.tostring())
def hash_equal(self, info=None):
struct_type = self.struct_type()
prop_equal_hash = struct_type.read_property(w_prop_equal_hash)
if not struct_type.is_transparent():
# if not transparent, eqv?
if prop_equal_hash:
from pycket.prims.hash import equal_hash_code
w_hash_proc = self.get_hash_proc(prop_equal_hash)
w_hash_proc_recur = equal_hash_code.w_prim # equal-hash-code to recur
h = w_hash_proc.call_interpret([self, w_hash_proc_recur])
assert isinstance(h, values.W_Fixnum)
return h.value
return values.W_Object.hash_equal(self, info)
else:
if not prop_equal_hash:
# if transparent, equal?
size = self._get_size_list()
struct_name = struct_type.name
total_hash_val = struct_name.hash_equal()
for n in range(0, size):
try:
field_hash = self._get_list(n).hash_equal()
total_hash_val = int((field_hash*total_hash_val)%sys.maxint)
except UnhashableType:
continue
return total_hash_val
else:
from pycket.prims.hash import equal_hash_code
w_hash_proc = prop_equal_hash.cdr().car()
w_hash_proc_recur = equal_hash_code.w_prim
h = w_hash_proc.call_interpret([self, w_hash_proc_recur])
assert isinstance(h, values.W_Fixnum)
return h.value
@inline_small_list(immutable=True, attrname="storage", unbox_num=True)
class W_Struct(W_RootStruct):
errorname = "struct"
_attrs_ = _immutable_fields_ = ["_type"]
@staticmethod
@jit.unroll_safe
def make_prefab(w_key, w_values):
w_struct_type = W_StructType.make_prefab(
W_PrefabKey.from_raw_key(w_key, len(w_values)))
constant_false = []
for i, value in enumerate(w_values):
if not w_struct_type.is_immutable_field_index(i):
w_values[i] = values.W_Cell(value)
elif value is values.w_false:
constant_false.append(i)
cls = lookup_struct_class(constant_false)
if cls is not W_Struct:
w_values = reduce_field_values(w_values, constant_false)
return cls.make(w_values, w_struct_type)
def __init__(self, type):
self._type = type
def struct_type(self):
return jit.promote(self._type)
@jit.unroll_safe
def vals(self):
size = self._get_size_list()
values = [None] * size
for i in range(size):
values[i] = self._ref(i)
return values
# Rather than reference functions, we store the continuations. This is
# necessarray to get constant stack usage without adding extra preamble
# continuations.
def ref_with_extra_info(self, field, app, env, cont):
from pycket.interpreter import return_value
value = self._ref(field)
return return_value(value, env, cont)
def set_with_extra_info(self, field, val, app, env, cont):
from pycket.interpreter import return_value
w_cell = self._set(field, val)
return return_value(values.w_void, env, cont)
# unsafe versions
def _ref(self, i):
w_res = self._get_list(i)
immutable = self.struct_type().is_immutable_field_index(i)
if not immutable:
assert isinstance(w_res, values.W_Cell)
w_res = w_res.get_val()
return w_res
def _set(self, k, val):
w_cell = self._get_list(k)
assert isinstance(w_cell, values.W_Cell)
w_cell.set_val(val)
# We provide a method to get properties from a struct rather than a struct_type,
# since impersonators can override struct properties.
def get_prop(self, property, env, cont):
from pycket.interpreter import return_value
val = self.struct_type().read_property_precise(property)
if val is not None:
return return_value(val, env, cont)
raise SchemeException("%s-accessor: expected %s? but got %s" %
(property.name, property.name, self.tostring()))
def get_arity(self, promote=False):
# XXX: --> struct type?
if self.iscallable():
typ = self.struct_type()
proc = typ.prop_procedure
if isinstance(proc, values.W_Fixnum):
offset = typ.get_offset(typ.procedure_source)
proc = self._ref(proc.value + offset)
return proc.get_arity(promote)
else:
# -1 for the self argument
arity = proc.get_arity(promote)
return arity.shift_arity(-1)
else:
raise SchemeException("%s does not have arity" % self.tostring())
def get_struct_info(self, env, cont):
from pycket.interpreter import return_multi_vals
vals = values.Values._make2(self.struct_type(), values.w_false)
return return_multi_vals(vals, env, cont)
# # TODO: currently unused
# def tostring_proc(self, env, cont):
# w_val = self.struct_type().read_property(w_prop_custom_write)
# if w_val is not None:
# assert isinstance(w_val, values_vector.W_Vector)
# w_write_proc = w_val.ref(0)
# port = values.W_StringOutputPort()
# # TODO: #t for write mode, #f for display mode,
# # or 0 or 1 indicating the current quoting depth for print mode
# mode = values.w_false
# return w_write_proc.call([self, port, mode], env, cont)
# return self.tostring()
def tostring_prefab(self):
prefab_key = W_PrefabKey.from_struct_type(self.struct_type())
key_and_values_all_str = [prefab_key.short_key().tostring()] + [val.tostring() for val in self.vals()]
return ("#s(%s)" % (' '.join(key_and_values_all_str)))
def write_prefab(self, port, env):
from pycket.prims.input_output import write_loop
prefab_key = W_PrefabKey.from_struct_type(self.struct_type())
port.write("#s( ")
sk = prefab_key.short_key().tostring()
port.write(sk)
for val in self.vals():
port.write(" ")
write_loop(val, port, env)
port.write(")")
@jit.unroll_safe
def tostring_values(self, fields, w_type, is_super=False):
" fill fields with tostring() version of field if applicable "
assert isinstance(w_type, W_StructType)
w_super = w_type.w_super
has_super = isinstance(w_super, W_StructType)
if has_super:
self.tostring_values(fields=fields,w_type=w_super,is_super=True)
offset = self.struct_type().get_offset(w_type)
count = w_type.total_field_count
if has_super:
count -= w_super.total_field_count
assert len(fields) >= count + offset
if w_type.isopaque and offset < len(fields):
fields[offset] = "..."
else:
for i in range(offset, offset + count):
fields[i] = self._ref(i).tostring()
@jit.unroll_safe
def _string_from_list(self, l):
return ' '.join([s for s in l if s is not None])
def write_values(self, port, w_type, env):
from pycket.prims.input_output import write_loop
assert isinstance(w_type, W_StructType)
w_super = w_type.w_super
has_super = isinstance(w_super, W_StructType)
if has_super:
self.write_values(port, w_super, env)
offset = self.struct_type().get_offset(w_type)
count = w_type.total_field_count
if has_super:
count -= w_super.total_field_count
for i in range(offset, offset + count):
write_loop(self._ref(i), port, env)
port.write(" ")
def write(self, port, env):
w_type = self.struct_type()
typename = w_type.name.utf8value
if w_type.isprefab:
self.write_prefab(port, env)
elif w_type.all_opaque():
port.write("#<%s>" % typename)
else:
w_val = w_type.read_property(w_prop_custom_write)
if w_val is not None:
assert isinstance(w_val, values_vector.W_Vector)
w_write_proc = w_val.ref(0)
# #t for write mode, #f for display mode,
# or 0 or 1 indicating the current quoting depth for print mode
mode = values.w_true
w_write_proc.call_interpret([self, port, mode])
else:
port.write("(%s " % typename)
self.write_values(port, w_type, env)
port.write(")")
def tostring(self):
w_type = self.struct_type()
typename = w_type.name.utf8value
if w_type.isprefab:
return self.tostring_prefab()
elif w_type.all_opaque():
# import pdb;pdb.set_trace()
# ret_str = "#<%s" % typename
# for i in range(0, self._get_size_list()):
# ret_str += ":%s" % self._ref(i).tostring()
# ret_str += ">"
#return ret_str
return "#<%s>" % typename
else:
fields = [None] * w_type.total_field_count
self.tostring_values(fields=fields, w_type=w_type, is_super=False)
custom_huh = w_type.read_property(w_prop_custom_write)
return "(%s %s)" % (typename, self._string_from_list(fields))
"""
This method generates a new structure class with inline stored immutable #f
values on positions from constant_false array. If a new structure instance get
immutable #f fields on the same positions, this class will be used, thereby
reducing its size.
"""
def generate_struct_class(constant_false):
if not len(constant_false):
return W_Struct
unrolling_constant_false = unrolling_iterable(constant_false)
clsname = 'W_ImmutableBooleanStruct_' + \
'_'.join([str(i) for i in constant_false])
@jit.unroll_safe
def _ref(self, i):
pos = i
for j in unrolling_constant_false:
if i > j:
pos -= 1
elif i == j:
return values.w_false
# original index
immutable = self.struct_type().is_immutable_field_index(i)
# altered index
w_res = self._get_list(pos)
if not immutable:
assert isinstance(w_res, values.W_Cell)
w_res = w_res.get_val()
return w_res
@jit.unroll_safe
def _set(self, i, val):
pos = i
for j in unrolling_constant_false:
if i > j:
pos -= 1
# altered index
w_cell = self._get_list(pos)
assert isinstance(w_cell, values.W_Cell)
w_cell.set_val(val)
cls = type(clsname, (W_Struct,), {'_ref':_ref, '_set': _set})
cls = inline_small_list(sizemax=min(11,CONST_FALSE_SIZE),
immutable=True,
attrname="storage",
unbox_num=True)(cls)
return cls
if config.immutable_boolean_field_elision:
CONST_FALSE_SIZE = 5 # the complexity grows exponentially
else:
CONST_FALSE_SIZE = 0 # disabled
struct_classes = []
for i in range(0, CONST_FALSE_SIZE):
for comb in itertools.combinations(range(CONST_FALSE_SIZE), i+1):
struct_classes.append(generate_struct_class(comb))
struct_class_iter = unrolling_iterable(enumerate(struct_classes))
@jit.elidable
def fac(n):
return n * fac(n-1) if n > 1 else 1
@jit.elidable
def ncr(n,r):
if n == 0:
return 0
return fac(n) / fac(r) / fac(n-r)
@jit.unroll_safe
def lookup_struct_class(constant_false):
if CONST_FALSE_SIZE and constant_false and constant_false[-1] < CONST_FALSE_SIZE:
n = CONST_FALSE_SIZE
pos = 0
# offset of combinations with smaller amount of fields
for r in range(1, len(constant_false)):
pos += ncr(n, r)
# and the precise position
r = len(constant_false)
last_idx = 0
for idx in constant_false:
pos += ncr(n, r) - ncr(n-idx+last_idx, r)
n -= idx - last_idx + 1
r -= 1
last_idx = idx + 1
# lookup class by its position
for i, cls in struct_class_iter:
if i == pos:
return cls
return W_Struct
@jit.unroll_safe
def reduce_field_values(field_values, constant_false):
reduced_field_values = [None] * (len(field_values) - len(constant_false))
k = 0
for i, val in enumerate(field_values):
found = False
for j in constant_false:
if j == i:
found = True
if not found:
reduced_field_values[k] = val
k += 1
return reduced_field_values
@jit.unroll_safe
def splice_array(array, index, insertion):
array_len = len(array)
insertion_len = len(insertion)
new_array = [None] * (array_len + insertion_len)
for pre_index in range(index):
new_array[pre_index] = array[pre_index]
for insert_index in range(insertion_len):
new_array[index + insert_index] = insertion[insert_index]
for post_index in range(index, array_len):
new_array[post_index + insertion_len] = array[post_index]
return new_array
@jit.unroll_safe
def construct_struct_final(struct_type, field_values, env, cont):
from pycket.interpreter import return_value
assert len(field_values) == struct_type.total_field_count
constant_false = [] if CONST_FALSE_SIZE else None
for i, value in enumerate(field_values):
if not struct_type.is_immutable_field_index(i):
value = values.W_Cell(value)
field_values[i] = value
elif CONST_FALSE_SIZE and value is values.w_false:
constant_false.append(i)
cls = lookup_struct_class(constant_false)
if cls is not W_Struct:
field_values = reduce_field_values(field_values, constant_false)
result = cls.make(field_values, struct_type)
return return_value(result, env, cont)
def construct_struct_loop(init_type, struct_type, field_values, env, cont):
struct_type = jit.promote(struct_type)
if not isinstance(struct_type, W_StructType):
return construct_struct_final(init_type, field_values, env, cont)
auto_field_start = struct_type.total_init_field_count
w_guard = struct_type.w_guard
if w_guard is values.w_false:
return construct_struct_loop_body(init_type, struct_type, field_values,
auto_field_start, env, cont)
assert auto_field_start >= 0
typename = init_type.name
args = field_values[:auto_field_start] + [typename]
cont = receive_guard_values_cont(init_type, struct_type, field_values,
auto_field_start, env, cont)
return w_guard.call(args, env, cont)
def construct_struct_loop_body(init_type, struct_type, field_values,
auto_field_start, env, cont):
# Figure out where in the array the auto values start for this struct type.
# Recall, the struct is built from the bottom up in the inheritance heirarchy.
auto_values_w = struct_type.auto_values_w
field_values = splice_array(field_values, auto_field_start, auto_values_w)
w_super_type = struct_type.w_super
return construct_struct_loop(init_type, w_super_type, field_values, env, cont)
@continuation
def receive_guard_values_cont(init_type, struct_type, field_values,
auto_field_start, env, cont, _vals):
assert _vals.num_values() == auto_field_start, "XXX Turn me into an exception"
for i in range(auto_field_start):
field_values[i] = _vals.get_value(i)
return construct_struct_loop_body(init_type, struct_type, field_values,
auto_field_start, env, cont)
#
class W_StructTypeProcedure(values.W_Procedure):
"Base for all Structure type related procedures"
_attrs_ = _immutable_fields_ = ["_struct_type"]
import_from_mixin(SingleResultMixin)
def __init__(self, struct_type):
self._struct_type = struct_type
def struct_type_name(self):
return self._struct_type.name.variable_name()
def struct_type(self):
return self._struct_type
def struct_type_promote(self):
return jit.promote(self.struct_type())
class W_StructConstructor(W_StructTypeProcedure):
@make_call_method(simple=False)
def call_with_extra_info(self, args, env, cont, app):
type = self.struct_type_promote()
arity = type.constructor_arity
if not arity.arity_includes(len(args)):
raise SchemeException("%s: wrong number of arguments; expected %s but got %s" % (self.tostring(),arity.tostring(), len(args)))
return construct_struct_loop(type, type, args, env, cont)
def get_arity(self, promote=False):
if promote:
self = jit.promote(self)
return self.struct_type().constructor_arity
def tostring(self):
return "#<procedure:%s>" % self.struct_type_name()
class W_StructPredicate(W_StructTypeProcedure):
errorname = "struct-predicate"
@make_call_method([values.W_Object])
@jit.unroll_safe
def call(self, struct):
from pycket.impersonators import get_base_object
struct = get_base_object(struct)
if isinstance(struct, W_RootStruct):
struct_type = struct.struct_type()
while isinstance(struct_type, W_StructType):
if struct_type is self.struct_type():
return values.w_true
struct_type = struct_type.w_super
return values.w_false
def get_arity(self, promote=False):
return Arity.ONE
def tostring(self):
return "#<procedure:%s?>" % self.struct_type_name()
class W_StructFieldAccessor(values.W_Procedure):
errorname = "struct-field-accessor"
_attrs_ = _immutable_fields_ = ["accessor", "field", "field_name"]
import_from_mixin(SingleResultMixin)
def __init__(self, accessor, field, field_name):
assert isinstance(accessor, W_StructAccessor)
self.accessor = accessor
self.field = field
self.field_name = field_name
def get_absolute_index(self, type):
return type.get_offset(self.accessor.struct_type()) + self.field
def get_arity(self, promote=False):
return Arity.ONE
@make_call_method([values.W_Object], simple=False,
name="<struct-field-accessor-method>")
def call_with_extra_info(self, struct, env, cont, app):
jit.promote(self)
return self.accessor.access(struct, self.field, env, cont, app)
def tostring(self):
name = self.accessor.struct_type_name()
return "#<procedure:%s-%s>" % (name, self.field_name.variable_name())
class W_StructAccessor(W_StructTypeProcedure):
errorname = "struct-accessor"
def get_arity(self, promote=False):
return Arity.TWO
def access(self, struct, field, env, cont, app=None):
self = jit.promote(self)
st = jit.promote(struct.struct_type())
if st is None:
raise SchemeException("%s got %s" % (self.tostring(), struct.tostring()))
offset = st.get_offset(self.struct_type())
if offset == -1:
raise SchemeException("%s: expected a %s but got a %s" % (self.tostring(), self.struct_type_name(), st.name.variable_name()))
return struct.ref_with_extra_info(field + offset, app, env, cont)
@make_call_method([values.W_Object, values.W_Fixnum], simple=False,
name="<struct-accessor-method>")
def call_with_extra_info(self, struct, field, env, cont, app):
return self.access(struct, field.value, env, cont, app)
def tostring(self):
return "#<procedure:%s-ref>" % self.struct_type_name()
class W_StructFieldMutator(values.W_Procedure):
errorname = "struct-field-mutator"
_attrs_ = _immutable_fields_ = ["mutator", "field", "field_name"]
import_from_mixin(SingleResultMixin)
def __init__ (self, mutator, field, field_name):
assert isinstance(mutator, W_StructMutator)
self.mutator = mutator
self.field = field
self.field_name = field_name
def get_arity(self, promote=False):
return Arity.TWO
def get_absolute_index(self, type):
return type.get_offset(self.mutator.struct_type()) + self.field
@make_call_method([values.W_Object, values.W_Object], simple=False,
name="<struct-field-mutator-method>")
def call_with_extra_info(self, struct, val, env, cont, app):
return self.mutator.mutate(struct, self.field, val, env, cont, app)
def tostring(self):
return "#<procedure:%s-%s!>" % (self.mutator.struct_type_name(), self.field_name.variable_name())
class W_StructMutator(W_StructTypeProcedure):
errorname = "struct-mutator"
def get_arity(self, promote=False):
return Arity.THREE
def mutate(self, struct, field, val, env, cont, app=None):
self = jit.promote(self)
st = jit.promote(struct.struct_type())
if st is None:
raise SchemeException("%s got %s" % (self.tostring(), struct.tostring()))
offset = st.get_offset(self.struct_type())
if offset == -1:
raise SchemeException("cannot reference an identifier before its definition")
return struct.set_with_extra_info(field + offset, val, app, env, cont)
@make_call_method([values.W_Object, values.W_Fixnum, values.W_Object],
simple=False, name="<struct-mutator-method>")
def call_with_extra_info(self, struct, field, val, env, cont, app):
return self.mutate(struct, field.value, val, env, cont, app)
def tostring(self):
return "#<procedure:%s-set!>" % self.struct_type_name()
class W_StructProperty(values.W_Object):
errorname = "struct-type-property"
_attrs_ = _immutable_fields_ = ["name", "w_guard", "supers", "can_imp"]
def __init__(self, w_name, w_guard, supers=values.w_null, can_imp=False):
self.name = w_name.utf8value
self.w_guard = w_guard
self.supers = values.from_list(supers)
self.can_imp = can_imp
@jit.elidable
def isinstance(self, property):
if self is property:
return True
for super in self.supers:
if super.car().isinstance(property):
return True
return False
def tostring(self):
return "#<struct-type-property:%s>"%self.name
sym = values.W_Symbol.make
w_prop_object_name = W_StructProperty(sym("prop:object-name"), values.w_false)
w_prop_authentic = W_StructProperty(sym("prop:authentic"), values.w_false)
w_prop_sealed = W_StructProperty(sym("prop:sealed"), values.w_false)
#FIXME: check if these propeties need guards or not
w_prop_procedure = W_StructProperty(sym("prop:procedure"), values.w_false)
w_prop_checked_procedure = W_StructProperty(sym("prop:checked-procedure"), values.w_false)
w_prop_arity_string = W_StructProperty(sym("prop:arity-string"), values.w_false)
w_prop_incomplete_arity = W_StructProperty(sym("prop:incomplete-arity"), values.w_false)
w_prop_custom_write = W_StructProperty(sym("prop:custom-write"), values.w_false)
w_prop_equal_hash = W_StructProperty(sym("prop:equal+hash"), values.w_false)
w_prop_chaperone_unsafe_undefined = W_StructProperty(sym("prop:chaperone-unsafe-undefined"), values.w_false)
w_prop_set_bang_transformer = W_StructProperty(sym("prop:set!-transformer"), values.w_false)
w_prop_rename_transformer = W_StructProperty(sym("prop:rename-transformer"), values.w_false)
w_prop_expansion_contexts = W_StructProperty(sym("prop:expansion-contexts"), values.w_false)
#FIXME: add guards for these checking for immutable
w_prop_output_port = W_StructProperty(sym("prop:output-port"), values.w_false)
w_prop_input_port = W_StructProperty(sym("prop:input-port"), values.w_false)
del sym
class W_StructPropertyPredicate(values.W_Procedure):
errorname = "struct-property-predicate"
_attrs_ = _immutable_fields_ = ["property"]
import_from_mixin(SingleResultMixin)
def __init__(self, prop):
self.property = prop
def get_arity(self, promote=False):
return Arity.ONE
@make_call_method([values.W_Object])
@jit.unroll_safe
def call(self, arg):
if not isinstance(arg, W_RootStruct):
return values.w_false
w_val = arg.struct_type().read_property_precise(self.property)
if w_val is not None:
return values.w_true
return values.w_false
class W_StructPropertyAccessor(values.W_Procedure):
errorname = "struct-property-accessor"
_attrs_ = _immutable_fields_ = ["property"]
import_from_mixin(SingleResultMixin)
def __init__(self, prop):
self.property = prop
def get_arity(self, promote=False):
return Arity.ONE
@make_call_method([values.W_Object, default(values.W_Object, None)], simple=False)
def call_with_extra_info(self, arg, fail, env, cont, app):
from pycket.interpreter import return_value
if isinstance(arg, W_StructType):
w_val = arg.read_property_precise(self.property)
if w_val is not None:
return return_value(w_val, env, cont)
elif arg.struct_type() is not None:
return arg.get_prop(self.property, env, cont)
elif fail is not None:
if fail.iscallable():
return fail.call_with_extra_info([], env, cont, app)
return return_value(fail, env, cont)
raise SchemeException("%s-accessor: expected %s? but got %s" %
(self.property.name, self.property.name, arg.tostring()))
def struct2vector(struct, immutable=False):
struct_desc = struct.struct_type().name.utf8value
first_el = values.W_Symbol.make("struct:" + struct_desc)
return values_vector.W_Vector.fromelements([first_el] + struct.vals(), immutable=immutable)
| mit | b728a9604d4dd93aa25bfd261dec5bde | 38.841877 | 138 | 0.596238 | 3.692025 | false | false | false | false |
pycket/pycket | pycket/prims/logging.py | 2 | 4642 |
from pycket import values, values_parameter, values_string
from pycket.arity import Arity
from pycket.argument_parser import ArgParser, EndOfInput
from pycket.prims.expose import default, expose, expose_val
from rpython.rlib import jit
DEBUG = values.W_Symbol.make("debug")
LOG_LEVEL_STR = ['none', 'fatal', 'error', 'warning', 'info', 'debug']
LOG_LEVEL = tuple(map(values.W_Symbol.make, LOG_LEVEL_STR) + [values.w_false])
logger_max_wanted_level = values.W_Symbol.make("debug")
def get_level_from_index(lvl_index):
if not (lvl_index >= 0 and lvl_index <= 5):
raise Exception("invalid level index : %s" % lvl_index)
return LOG_LEVEL[lvl_index] # W_Symbol
def level_value(lvl_sym):
lvl_str = lvl_sym.tostring()
try:
return LOG_LEVEL_STR.index(lvl_str)
except ValueError:
raise Exception("invalid level type : %s" % lvl_str)
def level_geq(lvl_sym_1, lvl_sym_2):
return level_value(lvl_sym_1) >= level_value(lvl_sym_2)
def check_level(lvl_sym):
from pycket.error import SchemeException
if lvl_sym not in LOG_LEVEL:
raise SchemeException("Invalid log level : %s" % lvl_sym.tostring())
init_syslog_level = values.W_Symbol.make("none") # INIT_SYSLOG_LEVEL
init_stderr_level = values.W_Symbol.make("error") # SCHEME_LOG_ERROR
init_stdout_level = values.W_Symbol.make("none")
w_main_logger = values.W_Logger(values.w_false,
values.w_false,
values.w_false,
[],
init_syslog_level,
init_stderr_level,
init_stdout_level)
@expose("make-logger", arity=Arity.geq(0))
@jit.unroll_safe
def make_logger(args):
parser = ArgParser("make-logger", args)
topic = values.w_false
parent = values.w_false
propagate_level = DEBUG # propagate everything
try:
topic = parser.expect(values.W_Symbol, values.w_false)
parent = parser.expect(values.W_Logger, values.w_false)
propagate_level = parser.expect(*LOG_LEVEL)
except EndOfInput:
pass
# Any remaining arguments are propagate topics
propagate_topic = parser.expect_many(values.W_Symbol, values.w_false)
init_syslog_level = w_main_logger.get_syslog_level()
init_stderr_level = w_main_logger.get_stderr_level()
init_stdout_level = w_main_logger.get_stdout_level()
return values.W_Logger(topic, parent, propagate_level, propagate_topic, init_syslog_level, init_stderr_level, init_stdout_level)
@expose("log-level?", [values.W_Object, values.W_Object, default(values.W_Object, values.w_false)])
def log_level(logger, level, topic):
# performance-logger, info, performance
if logger.is_anyone_interested(level, topic):
return values.w_true
return values.w_false
@expose("log-message", arity=Arity.oneof(4, 5, 6))
def log_message(args):
# currently relying on this information about log-level? :
# Use this function to avoid work generating an event for
# log-message if no receiver is interested in the information;
# this shortcut is built into log-fatal, log-error, log-warning,
# log-info, log-debug, and forms bound by define-logger
# The complete solution is to :
# FIXME : implement log_receivers, create an event and distribute it
parser = ArgParser("log-message", args)
# logger : logger?
logger = parser.expect(values.W_Logger)
# level : log-level/c
level = parser.expect(*LOG_LEVEL)
# topic : (or/c symbol? #f) = (logger-name logger)
try:
topic = parser.expect(values.W_Symbol, values.w_false)
except Exception, e:
topic = logger.get_name()
# message : string?
message = parser.expect(values_string.W_String)
# data : any/c
data = parser.expect(values.W_Object)
try:
# prefix-message? : any/c = #t
prefix_message_huh = parser.expect(values.W_Object, values.w_true)
except EndOfInput:
prefix_message_huh = values.w_true
print_str = message.tostring()
if (prefix_message_huh is not values.w_false) and (topic is not values.w_false):
print_str = "%s: %s" % (topic.tostring(), print_str)
print(print_str)
return values.w_void
@expose("logger-name", [values.W_Logger])
def logger_name(logger):
return logger.topic
w_current_logger = values_parameter.W_Parameter(w_main_logger)
expose_val("current-logger", w_current_logger)
@expose("make-log-receiver")
def make_log_reciever(args):
return values.W_LogReciever()
| mit | d0d247617ee0dd0f4a27ac5b5ed20794 | 33.132353 | 132 | 0.649289 | 3.393275 | false | false | false | false |
pycket/pycket | pycket/values.py | 1 | 67304 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from pycket import config
from pycket.base import W_Object, W_ProtoObject, UnhashableType
from pycket.cont import continuation, label, NilCont
from pycket.env import ConsEnv
from pycket.error import SchemeException
from pycket.prims.expose import make_call_method
from pycket.small_list import inline_small_list
from pycket.util import add_copy_method, memoize_constructor
from rpython.tool.pairtype import extendabletype
from rpython.rlib import jit, runicode, rarithmetic, rweaklist
from rpython.rlib.rstring import StringBuilder
from rpython.rlib.objectmodel import always_inline, r_dict, compute_hash, we_are_translated
from rpython.rlib.objectmodel import specialize, try_inline, import_from_mixin
from rpython.rlib.rarithmetic import r_longlong, intmask
import rpython.rlib.rweakref as weakref
from rpython.rlib.rbigint import rbigint, NULLRBIGINT
from rpython.rlib.debug import check_list_of_chars, make_sure_not_resized, check_regular_int
UNROLLING_CUTOFF = 5
@inline_small_list(immutable=True, attrname="vals", factoryname="_make")
class Values(W_ProtoObject):
_attrs_ = []
_immutable_ = True
def __init__(self):
pass
@staticmethod
def make(values_w):
if len(values_w) == 1:
return Values.make1(values_w[0])
return Values._make(values_w)
@staticmethod
def make1(w_value):
assert w_value is not None
return w_value
@staticmethod
def make2(w_value1, w_value2):
return Values._make2(w_value1, w_value2)
def num_values(self):
return self._get_size_list()
def get_value(self, index):
return self._get_list(index)
def get_all_values(self):
return self._get_full_list()
def tostring(self):
vals = self._get_full_list()
if len(vals) == 1:
return vals[0].tostring()
if len(vals) == 0:
return "(values)"
else: # This shouldn't be called in real code
return "\n".join([v.tostring() for v in vals])
class W_Cell(W_Object): # not the same as Racket's box
_attrs_ = ["w_value"]
def __init__(self, v):
assert not isinstance(v, W_Cell)
if isinstance(v, W_Fixnum):
v = W_CellIntegerStrategy(v.value)
elif isinstance(v, W_Flonum):
v = W_CellFloatStrategy(v.value, v.is_single_prec)
self.w_value = v
def get_val(self):
w_value = self.w_value
if isinstance(w_value, W_CellIntegerStrategy):
return W_Fixnum(w_value.value)
elif isinstance(w_value, W_CellFloatStrategy):
return W_Flonum(w_value.value, w_value.is_single)
return w_value
def set_val(self, w_value):
from pycket import config
if not config.strategies:
self.w_value = w_value
return
if isinstance(w_value, W_Fixnum):
w_v = self.w_value
if isinstance(w_v, W_CellIntegerStrategy):
w_v.value = w_value.value
else:
self.w_value = W_CellIntegerStrategy(w_value.value)
elif isinstance(w_value, W_Flonum):
w_v = self.w_value
if isinstance(w_v, W_CellFloatStrategy):
w_v.value = w_value.value
else:
self.w_value = W_CellFloatStrategy(w_value.value)
else:
self.w_value = w_value
class W_CellIntegerStrategy(W_Object):
_attrs_ = ["value"]
# can be stored in cells only, is mutated when a W_Fixnum is stored
def __init__(self, value):
self.value = value
class W_CellFloatStrategy(W_Object):
_attrs_ = ["value", "is_single"]
# can be stored in cells only, is mutated when a W_Flonum is stored
def __init__(self, value, is_single=False):
self.value = value
self.is_single = is_single
class W_Undefined(W_Object):
errorname = "unsafe-undefined"
_attrs_ = []
def __init__(self):
pass
def tostring(self):
return "#<unsafe-undefined>"
w_unsafe_undefined = W_Undefined()
class W_ModulePathIndex(W_Object):
errorname = "module-path-index"
_attrs_ = []
def __init__(self):
pass
def tostring(self):
return "#<module-path-index>"
class W_ResolvedModulePath(W_Object):
errorname = "resolved-module-path"
_attrs_ = _immutable_fields_ = ["name"]
def __init__(self, name):
self.name = name
def tostring(self):
return "#<resolved-module-path:%s>" % self.name
class W_LogReciever(W_Object):
errorname = "log-reciever"
# FIXME
def __init__(self):
pass
class W_Logger(W_Object):
errorname = "logger"
_immutable_fields_ = ['topic', 'parent', 'propagate_level', 'propagate_topic[*]']
_attrs_ = ['topic', 'parent', 'propagate_level', 'propagate_topic', 'syslog_level', 'stderr_level', 'stdout_level']
def __init__(self, topic, parent, propagate_level, propagate_topic, syslog_level, stderr_level, stdout_level):
self.topic = topic # (or/c symbol? #f) = #f performance
self.parent = parent # (or/c symbol? #f) = #f
self.propagate_level = propagate_level # log-level/c = 'debug
self.propagate_topic = propagate_topic # (or/c #f symbol?) = #f
self.syslog_level = syslog_level
self.stderr_level = stderr_level
self.stdout_level = stdout_level
def get_name(self):
return self.topic # io/logger/logger.rkt
def get_syslog_level(self):
return self.syslog_level
def get_stderr_level(self):
return self.syslog_level
def get_stdout_level(self):
return self.syslog_level
def set_syslog_level(self, lvl_str):
from pycket.prims.logging import check_level
lvl = W_Symbol.make(lvl_str)
check_level(lvl)
self.syslog_level = lvl
def set_stderr_level(self, lvl_str):
from pycket.prims.logging import check_level
lvl = W_Symbol.make(lvl_str)
check_level(lvl)
self.stderr_level = lvl
def set_stdout_level(self, lvl_str):
from pycket.prims.logging import check_level
lvl = W_Symbol.make(lvl_str)
check_level(lvl)
self.stdout_level = lvl
def is_anyone_interested(self, level, topic):
from pycket.prims.logging import level_geq
if self.topic is w_false or self.topic is topic:
# self.topic #f : we're interested in events at level for any topic
if level_geq(self.syslog_level, level):
return True
# cheating : any of these three types are enough to trigger logging
if level_geq(self.stderr_level, level):
return True
if level_geq(self.stdout_level, level):
return True
if self.parent is w_false or level_geq(level, self.propagate_level):
return False
return self.parent.is_anyone_interested(level, topic)
def tostring(self):
return "#<logger>"
class W_ContinuationPromptTag(W_Object):
errorname = "continuation-prompt-tag"
_attrs_ = _immutable_fields_ = ["name"]
def __init__(self, name):
self.name = name
def tostring(self):
if self.name is None:
return "#<continuation-prompt-tag>"
name = self.name.utf8value
return "#<continuation-prompt-tag:%s>" % name
w_default_continuation_prompt_tag = W_ContinuationPromptTag(None)
w_root_continuation_prompt_tag = W_ContinuationPromptTag(None)
class W_ContinuationMarkSet(W_Object):
errorname = "continuation-mark-set"
_attrs_ = _immutable_fields_ = ["cont", "prompt_tag"]
def __init__(self, cont, prompt_tag):
self.cont = cont
self.prompt_tag = prompt_tag
def tostring(self):
return "#<continuation-mark-set>"
class W_ContinuationMarkKey(W_Object):
errorname = "continuation-mark-key"
_attrs_ = _immutable_fields_ = ["name"]
def __init__(self, name):
self.name = name
def get_cmk(self, value, env, cont):
from pycket.interpreter import return_value
return return_value(value, env, cont)
def set_cmk(self, body, value, update, env, cont):
update.update_cm(self, value)
return body.call([], env, cont)
def tostring(self):
return "#<continuation-mark-name>"
class W_VariableReference(W_Object):
errorname = "variable-reference"
_attrs_ = _immutable_fields_ = ['varref', 'linklet_instance']
def __init__(self, varref, l_instance=None):
self.varref = varref
self.linklet_instance = l_instance
def is_unsafe(self):
if self.varref.unsafe:
return w_true
return w_false
def get_instance(self):
return self.linklet_instance
def tostring(self):
return "#<#%variable-reference>"
# A super class for both fl/fx/regular vectors
class W_VectorSuper(W_Object):
errorname = "vector"
_attrs_ = []
def __init__(self):
raise NotImplementedError("abstract base class")
def vector_set(self, i, new, env, cont, app=None):
raise NotImplementedError("abstract base class")
def vector_ref(self, i, env, cont, app=None):
raise NotImplementedError("abstract base class")
def length(self):
raise NotImplementedError("abstract base class")
def immutable(self):
raise NotImplementedError("abstract base class")
# abstract methods for vector implementations that use strategies
# we would really not like to have them here, but would need multiple
# inheritance to express that
# impersonators can just not implement them
def get_storage(self):
raise NotImplementedError
def set_storage(self, storage):
raise NotImplementedError
def get_strategy(self):
raise NotImplementedError
def set_strategy(self, strategy):
raise NotImplementedError
# Things that are vector?
class W_MVector(W_VectorSuper):
errorname = "vector"
_attrs_ = []
class W_List(W_Object):
errorname = "list"
_attrs_ = []
def __init__(self):
raise NotImplementedError("abstract base class")
class W_Cons(W_List):
"Abstract for specialized conses. Concrete general in W_WrappedCons"
errorname = "pair"
_attrs_ = []
@staticmethod
@specialize.arg(2)
def make(car, cdr, force_proper=False):
from pycket import config
if not config.type_size_specialization:
if cdr.is_proper_list():
return W_WrappedConsProper(car, cdr)
return W_WrappedCons(car, cdr)
elif isinstance(car, W_Fixnum):
if force_proper or cdr.is_proper_list():
return W_UnwrappedFixnumConsProper(car.value, cdr)
return W_UnwrappedFixnumCons(car.value, cdr)
elif isinstance(car, W_Flonum):
if force_proper or cdr.is_proper_list():
return W_UnwrappedFlonumConsProper(car.value, car.is_single_prec, cdr)
return W_UnwrappedFlonumCons(car.value, car.is_single_prec, cdr)
else:
if force_proper or cdr.is_proper_list():
return W_WrappedConsProper(car, cdr)
return W_WrappedCons(car, cdr)
def car(self):
raise NotImplementedError("abstract base class")
def cdr(self):
raise NotImplementedError("abstract base class")
def to_tuple(self):
"convenience accessor"
return (self.car(), self.cdr())
def tostring(self):
cur = self
acc = []
while isinstance(cur, W_Cons):
acc.append(cur.car().tostring())
cur = cur.cdr()
# Are we a dealing with a proper list?
if cur is w_null:
return "(%s)" % " ".join(acc)
# Must be an improper list
return "(%s . %s)" % (" ".join(acc), cur.tostring())
def immutable(self):
return True
def hash_equal(self, info=None):
x = 0x345678
while isinstance(self, W_Cons):
car, self = self.car(), self.cdr()
y = car.hash_equal(info=info)
x = rarithmetic.intmask((1000003 * x) ^ y)
return x
def equal(self, other):
if not isinstance(other, W_Cons):
return False
if self is other:
return True
w_curr1 = self
w_curr2 = other
while isinstance(w_curr1, W_Cons) and isinstance(w_curr2, W_Cons):
if not w_curr1.car().equal(w_curr2.car()):
return False
w_curr1 = w_curr1.cdr()
w_curr2 = w_curr2.cdr()
return w_curr1.equal(w_curr2)
def _unsafe_set_cdr(self, val):
raise NotImplementedError("abstract base class")
def clone(self):
raise NotImplementedError("abstract base class")
@add_copy_method(copy_method="clone")
class W_UnwrappedFixnumCons(W_Cons):
_attrs_ = _immutable_fields_ = ["_car", "_cdr"]
def __init__(self, a, d):
self._car = a
self._cdr = d
def car(self):
return W_Fixnum(self._car)
def cdr(self):
return self._cdr
def _unsafe_set_cdr(self, val):
self._cdr = val
@add_copy_method(copy_method="clone")
class W_UnwrappedFixnumConsProper(W_UnwrappedFixnumCons):
def is_proper_list(self, seen=[]):
return True
@add_copy_method(copy_method="clone")
class W_UnwrappedFlonumCons(W_Cons):
_immutable_fields_ = ["_car", "_car_is_single", "_cdr"]
def __init__(self, a, is_single, d):
self._car = a
self._car_is_single = is_single
self._cdr = d
def car(self):
return W_Flonum(self._car, self._car_is_single)
def cdr(self):
return self._cdr
def _unsafe_set_cdr(self, val):
self._cdr = val
@add_copy_method(copy_method="clone")
class W_UnwrappedFlonumConsProper(W_UnwrappedFlonumCons):
def is_proper_list(self, seen=[]):
return True
@add_copy_method(copy_method="clone")
class W_WrappedCons(W_Cons):
_attrs_ = _immutable_fields_ = ["_car", "_cdr"]
def __init__(self, a, d):
self._car = a
self._cdr = d
def car(self):
return self._car
def cdr(self):
return self._cdr
def _unsafe_set_cdr(self, val):
self._cdr = val
@add_copy_method(copy_method="clone")
class W_WrappedConsProper(W_WrappedCons):
def is_proper_list(self, seen=[]):
return True
class W_WrappedConsMaybe(W_WrappedCons):
def is_proper_list(self, seen=[]):
if self in seen:
return False # contains a cycle
return self._cdr.is_proper_list(seen + [self])
class W_Box(W_Object):
errorname = "box"
_attrs_ = []
def __init__(self):
raise NotImplementedError("abstract base class")
def hash_equal(self, info=None):
raise UnhashableType
def unbox(self, env, cont):
raise NotImplementedError("abstract base class")
def set_box(self, val, env, cont):
raise NotImplementedError("abstract base class")
class W_MBox(W_Box):
errorname = "mbox"
_attrs_ = ['value']
def __init__(self, value):
self.value = value
def unbox(self, env, cont):
from pycket.interpreter import return_value
return return_value(self.value, env, cont)
def set_box(self, val, env, cont):
from pycket.interpreter import return_value
self.value = val
return return_value(w_void, env, cont)
def tostring(self):
return "'#&%s" % self.value.tostring()
class W_IBox(W_Box):
errorname = "ibox"
_attrs_ = _immutable_fields_ = ["value"]
def __init__(self, value):
self.value = value
def immutable(self):
return True
def unbox(self, env, cont):
from pycket.interpreter import return_value
return return_value(self.value, env, cont)
def set_box(self, val, env, cont):
raise SchemeException("set-box!: not supported on immutable boxes")
def tostring(self):
return "'#&%s" % self.value.tostring()
# A weak box does not test as a box for most operations and cannot be
# chaperoned/impersonated, so we start it from W_Object rather than W_Box.
class W_WeakBox(W_Object):
errorname = "weak-box"
_attrs_ = _immutable_fields_ = ["value"]
def __init__(self, value):
assert isinstance(value, W_Object)
self.value = weakref.ref(value)
def get(self):
return self.value()
def tostring(self):
return "#<weak-box>"
class W_Ephemeron(W_Object):
errorname = "ephemeron"
_attrs_ = _immutable_fields_ = ["key", "mapping"]
def __init__(self, key, value):
assert isinstance(key, W_Object)
assert isinstance(value, W_Object)
self.key = weakref.ref(key)
self.mapping = weakref.RWeakKeyDictionary(W_Object, W_Object)
self.mapping.set(key, value)
def get(self):
return self.mapping.get(self.key())
def tostring(self):
return "#<ephemeron>"
class W_Placeholder(W_Object):
errorname = "placeholder"
_attrs_ = ['value']
def __init__(self, value):
self.value = value
def tostring(self):
return "#<placeholder>"
class W_HashTablePlaceholder(W_Object):
errorname = "hash-table-placeholder"
_attrs_ = []
def __init__(self, keys, vals):
pass
def tostring(self):
return "#<hash-table-placeholder>"
class W_MList(W_Object):
errorname = "mlist"
_attrs_ = []
def __init__(self):
raise NotImplementedError("abstract base class")
class W_MCons(W_MList):
errorname = "mpair"
_attrs_ = ["_car", "_cdr"]
def __init__(self, a, d):
self._car = a
self._cdr = d
def tostring(self):
return "(mcons %s %s)" % (self.car().tostring(), self.cdr().tostring())
def car(self):
return self._car
def cdr(self):
return self._cdr
def set_car(self, a):
self._car = a
def set_cdr(self, d):
self._cdr = d
class W_Number(W_Object):
_attrs_ = []
errorname = "number"
def __init__(self):
raise NotImplementedError("abstract base class")
def immutable(self):
return True
def eqv(self, other):
return self.equal(other)
def hash_eqv(self):
return self.hash_equal(info=None)
class W_Real(W_Number):
errorname = "real"
_attrs_ = []
class W_Rational(W_Real):
_attrs_ = _immutable_fields_ = ["_numerator", "_denominator"]
errorname = "rational"
def __init__(self, num, den):
assert isinstance(num, rbigint)
assert isinstance(den, rbigint)
self._numerator = num
self._denominator = den
if not we_are_translated():
assert den.gt(NULLRBIGINT)
@staticmethod
def make(num, den):
if isinstance(num, W_Fixnum):
num = rbigint.fromint(num.value)
else:
assert isinstance(num, W_Bignum)
num = num.value
if isinstance(den, W_Fixnum):
den = rbigint.fromint(den.value)
else:
assert isinstance(den, W_Bignum)
den = den.value
return W_Rational.frombigint(num, den)
@staticmethod
def fromint(n, d=1, need_to_check=True):
assert isinstance(n, int)
assert isinstance(d, int)
from fractions import gcd
g = gcd(n, d)
n = n // g
d = d // g
if need_to_check and d == 1:
return W_Fixnum(n)
return W_Rational(rbigint.fromint(n), rbigint.fromint(d))
@staticmethod
def frombigint(n, d=rbigint.fromint(1), need_to_check=True):
from pycket.arithmetic import gcd
g = gcd(n, d)
n = n.floordiv(g)
d = d.floordiv(g)
if need_to_check and d.eq(rbigint.fromint(1)):
return W_Bignum.frombigint(n)
return W_Rational(n, d)
@staticmethod
def fromfloat(f):
# FIXME: this is the temporary not exact implementation
assert isinstance(f, float)
d = 1000000
n = int(f * d)
from fractions import gcd
_gcd = gcd(n, d)
return W_Rational.fromint(n/_gcd, d/_gcd)
def get_numerator(self):
return self._numerator
def get_denominator(self):
return self._denominator
def tostring(self):
return "%s/%s" % (self._numerator.str(), self._denominator.str())
def equal(self, other):
if not isinstance(other, W_Rational):
return False
return (self._numerator.eq(other._numerator) and
self._denominator.eq(other._denominator))
def hash_equal(self, info=None):
hash1 = self._numerator.hash()
hash2 = self._denominator.hash()
return rarithmetic.intmask(hash1 + 1000003 * hash2)
class W_Integer(W_Real):
errorname = "integer"
_attrs_ = []
def toint(self):
raise NotImplementedError("abstract base class")
@staticmethod
def frombigint(value):
try:
num = value.toint()
except OverflowError:
pass
else:
return W_Fixnum(num)
return W_Bignum(value)
@staticmethod
def fromfloat(value):
try:
val = rarithmetic.ovfcheck_float_to_int(value)
except OverflowError:
return W_Bignum(rbigint.fromfloat(value))
return W_Fixnum(val)
@memoize_constructor
class W_Fixnum(W_Integer):
_immutable_ = True
_attrs_ = _immutable_fields_ = ["value"]
errorname = "fixnum"
MIN_INTERNED = -5
MAX_INTERNED = 256
INTERNED_RANGE = (MIN_INTERNED, MAX_INTERNED)
cache = []
def tostring(self):
return str(self.value)
def __init__(self, val):
if not we_are_translated():
# this is not safe during translation
assert isinstance(val, int)
check_regular_int(val)
self.value = val
def toint(self):
return self.value
def equal(self, other):
if not isinstance(other, W_Fixnum):
return False
return self.value == other.value
def hash_equal(self, info=None):
return self.value
@staticmethod
@try_inline
def make_or_interned(val):
from rpython.rlib.rarithmetic import int_between
if int_between(W_Fixnum.MIN_INTERNED, val, W_Fixnum.MAX_INTERNED):
return W_Fixnum.cache[val - W_Fixnum.MIN_INTERNED]
return W_Fixnum(val)
W_Fixnum.ZERO = W_Fixnum.make(0)
W_Fixnum.ONE = W_Fixnum.make(1)
W_Fixnum.TWO = W_Fixnum.make(2)
W_Fixnum.cache = map(W_Fixnum.make, range(*W_Fixnum.INTERNED_RANGE))
class W_Flonum(W_Real):
_immutable_ = True
_attrs_ = _immutable_fields_ = ["value", "is_single_prec"]
errorname = "flonum"
def __init__(self, val, is_single_prec=False):
self.value = val
self.is_single_prec = is_single_prec
@staticmethod
def make(val, is_single=False):
return W_Flonum(val, is_single)
def tostring(self):
from rpython.rlib.rfloat import formatd, DTSF_STR_PRECISION, DTSF_ADD_DOT_0
RACKET_SINGLE_STR_PREC = 7
RACKET_DOUBLE_STR_PREC = 17
if self.is_single_prec:
rpython_str = formatd(self.value, 'g', RACKET_SINGLE_STR_PREC, DTSF_ADD_DOT_0)
return "%sf0" % rpython_str
else:
return formatd(self.value, 'g', RACKET_DOUBLE_STR_PREC, DTSF_ADD_DOT_0)
def hash_equal(self, info=None):
return compute_hash(self.value)
def equal(self, other):
from rpython.rlib.longlong2float import float2longlong
import math
if not isinstance(other, W_Flonum):
return False
v1 = self.value
v2 = other.value
ll1 = float2longlong(v1)
ll2 = float2longlong(v2)
# Assumes that all non-NaN values are canonical
return ll1 == ll2 or (math.isnan(v1) and math.isnan(v2))
W_Flonum.ZERO = W_Flonum(0.0)
W_Flonum.ONE = W_Flonum(1.0)
W_Flonum.INF = W_Flonum(float("inf"))
W_Flonum.NEGINF = W_Flonum(-float("inf"))
W_Flonum.NAN = W_Flonum(float("nan"))
class W_ExtFlonum(W_Object):
_immutable_ = True
_attrs_ = _immutable_fields_ = ["value_str"]
errorname = "extflonum"
def __init__(self, val_str):
self.value_str = val_str
def tostring(self):
return self.value_str
class W_Bignum(W_Integer):
_immutable_ = True
_attrs_ = _immutable_fields_ = ["value"]
def tostring(self):
return self.value.str()
def __init__(self, val):
self.value = val
def toint(self):
""" raises OverflowError on failure """
return self.value.toint()
def toflonum(self):
bignum = self.value
try:
floatval = bignum.tofloat()
except OverflowError:
return W_Flonum.NEGINF if bignum.sign < 0 else W_Flonum.INF
return W_Flonum(floatval)
def equal(self, other):
if not isinstance(other, W_Bignum):
return False
return self.value.eq(other.value)
def hash_equal(self, info=None):
return self.value.hash()
@memoize_constructor
class W_Complex(W_Number):
_immutable_ = True
_attrs_ = _immutable_fields_ = ["real", "imag"]
def __init__(self, re, im):
assert isinstance(re, W_Real)
assert isinstance(im, W_Real)
self.real = re
self.imag = im
@staticmethod
def from_real_pair(real, imag):
if W_Fixnum.ZERO.eqv(imag):
return real
return W_Complex(real, imag)
def eqv(self, other):
if not isinstance(other, W_Complex):
return False
return self.real.eqv(other.real) and self.imag.eqv(other.imag)
def hash_equal(self, info=None):
hash1 = self.real.hash_equal()
hash2 = self.imag.hash_equal()
return rarithmetic.intmask(hash1 + 1000003 * hash2)
def tostring(self):
return "%s+%si" % (self.real.tostring(), self.imag.tostring())
@memoize_constructor
class W_Character(W_Object):
_attrs_ = _immutable_fields_ = ["value"]
errorname = "char"
def __init__(self, val):
# for now
assert isinstance(val, unicode)
self.value = val
def tostring(self):
from pypy.objspace.std.bytesobject import string_escape_encode
return "#\%s" % string_escape_encode(self.value.encode('utf-8'), '')
def get_value_utf8(self):
return self.value.encode('utf-8')
def immutable(self):
return True
def eqv(self, other):
if not isinstance(other, W_Character):
return False
return self.value == other.value
def hash_eqv(self):
return ord(self.value)
def hash_equal(self, info=None):
return self.hash_eqv()
class W_Thread(W_Object):
errorname = "thread"
_attrs_ = []
def __init__(self):
pass
def tostring(self):
return "#<thread>"
class W_Semaphore(W_Object):
errorname = "semaphore"
_attrs_ = ['n']
def __init__(self, n):
self.n = n
def post(self):
self.n += 1
def wait(self):
if self.n >= 1:
return
else:
raise SchemeException("Waiting for a semaphore will never finish")
def tostring(self):
return "#<semaphore>"
class W_Evt(W_Object):
errorname = "evt"
_attrs_ = []
class W_SemaphorePeekEvt(W_Evt):
errorname = "semaphore-peek-evt"
_attrs_ = _immutable_fields_ = ["sema"]
def __init__(self, sema):
self.sema = sema
def tostring(self):
return "#<semaphore-peek-evt>"
class W_PseudoRandomGenerator(W_Object):
errorname = "pseudo-random-generator"
_attrs_ = []
def __init__(self):
pass
class W_Path(W_Object):
errorname = "path"
_attrs_ = _immutable_fields_ = ["path"]
def __init__(self, p):
self.path = p
def equal(self, other):
if not isinstance(other, W_Path):
return False
return self.path == other.path
def write(self, port, env):
port.write("(p+ %s)" % self.path)
def tostring(self):
return "#<path:%s>" % self.path
class W_Void(W_Object):
_attrs_ = []
def __init__(self):
pass
def tostring(self):
return "#<void>"
class W_Null(W_List):
_attrs_ = []
def __init__(self):
pass
def tostring(self):
return "()"
def is_proper_list(self, seen=[]):
return True
w_void = W_Void()
w_null = W_Null()
class W_Bool(W_Object):
errorname = "boolean"
_attrs_ = []
@staticmethod
def make(b):
if b: return w_true
else: return w_false
def __init__(self):
""" NOT_RPYTHON """
# the previous line produces an error if somebody makes new bool
# objects from primitives
pass
def tostring(self):
return "#t" if self is w_true else "#f"
w_false = W_Bool()
w_true = W_Bool()
class ThreadCellTable(rweaklist.RWeakListMixin):
def __init__(self):
self.initialize()
def __iter__(self):
handles = self.get_all_handles()
for ref in handles:
val = ref()
if val is not None:
yield val
class W_ThreadCellValues(W_Object):
errorname = "thread-cell-values"
_immutable_fields_ = ["assoc"]
_attrs_ = ["assoc", "value"]
def __init__(self):
self.assoc = {}
for threadcell in W_ThreadCell._table:
if threadcell.preserved:
self.assoc[threadcell] = threadcell.value
class W_ThreadCell(W_Object):
errorname = "thread-cell"
_immutable_fields_ = ["initial", "preserved"]
_attrs_ = ["initial", "preserved", "value"]
# All the thread cells in the system
# TODO: Use a weak list to store the existing thread cells
_table = ThreadCellTable()
def __init__(self, val, preserved):
# TODO: This should eventually be a mapping from thread ids to values
self.value = val
self.initial = val
self.preserved = preserved
W_ThreadCell._table.add_handle(self)
def set(self, val):
self.value = val
def get(self):
return self.value
class BytesMixin(object):
def tostring(self):
# TODO: No printable byte values should be rendered as base 8
return "#\"%s\"" % "".join(["\\%o" % ord(i) for i in self.value])
def as_bytes_list(self):
return self.value
def equal(self, other):
if not isinstance(other, W_Bytes):
return False
b1 = self.as_bytes_list()
b2 = other.as_bytes_list()
return b1 == b2
def hash_equal(self, info=None):
from rpython.rlib.rarithmetic import intmask
# like CPython's string hash
s = self.value
length = len(s)
if length == 0:
return -1
x = ord(s[0]) << 7
i = 0
while i < length:
x = intmask((1000003*x) ^ ord(s[i]))
i += 1
x ^= length
return intmask(x)
def ref(self, n):
l = len(self.value)
if n < 0 or n >= l:
raise SchemeException("bytes-ref: index %s out of bounds for length %s"% (n, l))
return W_Fixnum(ord(self.value[n]))
def ref_char(self, n):
l = len(self.value)
if n < 0 or n >= l:
raise SchemeException("bytes-ref: index %s out of bounds for length %s"% (n, l))
return self.value[n]
def as_str(self):
return "".join(self.value)
def getslice(self, start, end):
assert start >= 0 and end >= 0
bytes = self.value
return bytes[start:end]
def length(self):
return len(self.value)
class W_Bytes(W_Object):
errorname = "bytes"
_immutable_fields_ = []
_attrs_ = []
def __init__(self, bs):
raise NotImplementedError("abstract base class")
def as_bytes_list(self):
raise NotImplementedError("abstract base class")
def length(self):
raise NotImplementedError("abstract base class")
@staticmethod
def from_string(str, immutable=True):
if immutable:
return W_ImmutableBytes(list(str))
else:
return W_MutableBytes(list(str))
@staticmethod
def from_charlist(chars, immutable=True):
if immutable:
return W_ImmutableBytes(chars)
else:
return W_MutableBytes(chars)
def tostring(self):
raise NotImplementedError("abstract base class")
def equal(self, other):
raise NotImplementedError("abstract base class")
def hash_equal(self, info=None):
raise NotImplementedError("abstract base class")
def immutable(self):
raise NotImplementedError("abstract base class")
def ref(self, n):
raise NotImplementedError("abstract base class")
def ref_char(self, n):
raise NotImplementedError("abstract base class")
def set(self, n, v):
raise NotImplementedError("abstract base class")
def set_char(self, n, v):
raise NotImplementedError("abstract base class")
def as_str(self):
raise NotImplementedError("abstract base class")
def getslice(self, start, end):
raise NotImplementedError("abstract base class")
class W_MutableBytes(W_Bytes):
errorname = "bytes"
_attrs_ = ['value']
_immutable_fields_ = ['value']
import_from_mixin(BytesMixin)
def __init__(self, bs):
assert bs is not None
self.value = check_list_of_chars(bs)
make_sure_not_resized(self.value)
def as_bytes_list(self):
return self.value
def immutable(self):
return False
def set(self, n, v):
l = len(self.value)
if n < 0 or n >= l:
raise SchemeException("bytes-set!: index %s out of bounds for length %s"% (n, l))
self.value[n] = chr(v)
def set_char(self, n, v):
l = len(self.value)
assert n >= 0 and n < len(self.value)
self.value[n] = v
class W_ImmutableBytes(W_Bytes):
errorname = "bytes"
_attrs_ = ['value']
_immutable_fields_ = ['value[*]']
import_from_mixin(BytesMixin)
def __init__(self, bs):
assert bs is not None
self.value = check_list_of_chars(bs)
make_sure_not_resized(self.value)
def as_bytes_list(self):
return self.value
def immutable(self):
return True
def set(self, n, v):
raise SchemeException("bytes-set!: can't mutate immutable bytes")
def set_char(self, n, v):
assert False
DEFINITELY_NO, MAYBE, DEFINITELY_YES = (-1, 0, 1)
class W_Symbol(W_Object):
errorname = "symbol"
_attrs_ = ["unreadable", "_isascii", "_unicodevalue", "utf8value", "bar_quoted"]
_immutable_fields_ = ["unreadable", "utf8value", "bar_quoted"]
def __init__(self, val, unreadable=False):
assert isinstance(val, str)
self._unicodevalue = None
self.utf8value = val
self.unreadable = unreadable
self._isascii = MAYBE
self.bar_quoted = False
if val == "" or val == ".":
self.bar_quoted = True
else:
for q in " ()[]{}|\\,`'":
if q in val:
self.bar_quoted = True
break
def is_bar_quoted(self):
return self.bar_quoted
def is_unreadable(self):
return self.unreadable
@staticmethod
def _cache_is_ascii(self):
from pycket.values_string import _is_ascii
if not we_are_translated():
assert self._isascii == MAYBE
if _is_ascii(self.utf8value):
self._isascii = DEFINITELY_YES
else:
self._isascii = DEFINITELY_NO
return self._isascii
def asciivalue(self):
isascii = jit.conditional_call_elidable(
self._isascii, W_Symbol._cache_is_ascii, self)
if isascii == DEFINITELY_NO:
return None
return self.utf8value
@jit.elidable
def unicodevalue(self):
if self._unicodevalue is None:
self._unicodevalue = self.utf8value.decode("utf-8")
return self._unicodevalue
@staticmethod
@jit.elidable
def make(string):
# This assert statement makes the lowering phase of rpython break...
# Maybe comment back in and check for bug.
assert isinstance(string, str)
w_result = W_Symbol.all_symbols.get(string, None)
if w_result is None:
w_result = W_Symbol(string)
W_Symbol.all_symbols[string] = w_result
return w_result
@staticmethod
@jit.elidable
def make_unreadable(string):
w_result = W_Symbol.unreadable_symbols.get(string, None)
if w_result is None:
w_result = W_Symbol(string, unreadable=True)
W_Symbol.unreadable_symbols[string] = w_result
return w_result
def __repr__(self):
return self.utf8value
@jit.elidable
def is_interned(self):
if self.unreadable:
return False
string = self.utf8value
symbol = W_Symbol.all_symbols.get(string, None)
if symbol is self:
return True
return False
def tostring(self):
return "%s" % self.utf8value
def variable_name(self):
return self.utf8value
# According to samth, its not safe to use a weak table for symbols
W_Symbol.all_symbols = {}
W_Symbol.unreadable_symbols = {}
break_enabled_key = W_Symbol("break-enabled-key")
exn_handler_key = W_Symbol("exnh")
parameterization_key = W_Symbol("parameterization")
class W_Keyword(W_Object):
errorname = "keyword"
_attrs_= _immutable_fields_ = ["value"]
all_symbols = {}
@staticmethod
def make(string):
# This assert statement makes the lowering phase of rpython break...
# Maybe comment back in and check for bug.
#assert isinstance(string, str)
w_result = W_Keyword.all_symbols.get(string, None)
if w_result is None:
W_Keyword.all_symbols[string] = w_result = W_Keyword(string)
return w_result
def __repr__(self):
return self.value
def __init__(self, val):
self.value = val
def tostring(self):
return "#:%s" % self.value
class W_Procedure(W_Object):
_attrs_ = []
def __init__(self):
raise NotImplementedError("Abstract base class")
def iscallable(self):
return True
def immutable(self):
return True
def set_arity(self, arity):
raise SchemeException("%s is not a procedure" % self.tostring())
def call(self, args, env, cont):
return self.call_with_extra_info(args, env, cont, None)
def call_with_extra_info(self, args, env, cont, app):
return self.call(args, env, cont)
def tostring(self):
return "#<procedure>"
class W_AssignmentTransformer(W_Object):
_attrs_ = []
def __init__(self):
raise NotImplementedError("Abstract base class")
# These next two classes allow for a uniform input to the `set_cmk` operation.
# They are procedures which do the appropriate processing after `set_cmk` is done
# computing.
# This is needed because with-continuation-mark operates over the AST while
# W_InterposeProcedure can do a `set_cmk` with a closure.
class W_ThunkBodyCMK(W_Procedure):
_attrs_ = _immutable_fields_ = ["body", "env"]
def __init__(self, body, env):
self.body = body
self.env = env
@make_call_method([], simple=False)
def call(self, env, cont):
return self.body, self.env, cont
class W_ThunkProcCMK(W_Procedure):
_attrs_ = _immutable_fields_ = ["proc", "args"]
def __init__(self, proc, args):
self.proc = proc
self.args = args
@label
@make_call_method([], simple=False)
def call(self, env, cont):
return self.proc.call(self.args, env, cont)
class W_Prim(W_Procedure):
from pycket.arity import Arity
_attrs_ = _immutable_fields_ = ["name", "code", "arity", "result_arity", "is_nyi"]
def __init__ (self, name, code, arity=Arity.unknown, result_arity=None, is_nyi=False):
from pycket.arity import Arity
self.name = W_Symbol.make(name)
self.code = code
assert isinstance(arity, Arity)
self.arity = arity
self.result_arity = result_arity
self.is_nyi = is_nyi
def is_implemented(self):
return not self.is_nyi
def get_arity(self, promote=False):
if promote:
self = jit.promote(self)
return self.arity
def set_arity(self, arity):
self.arity = arity
def get_result_arity(self):
return self.result_arity
def call_with_extra_info(self, args, env, cont, extra_call_info):
# from pycket.util import active_log
## logging here is useful for debugging, but it's very expensive to keep it uncommented
# active_log("%s is called with" % self.name.variable_name(), keyword="prims")
jit.promote(self)
return self.code(args, env, cont, extra_call_info)
def tostring(self):
return "#<procedure:%s>" % self.name.variable_name()
class W_PrimSimple1(W_Prim):
from pycket.arity import Arity
def simple1(self, arg1):
""" overridden by the generated subclasses in expose.py"""
raise NotImplementedError("abstract base class")
class W_PrimSimple2(W_Prim):
from pycket.arity import Arity
def simple2(self, arg1, arg2):
""" overridden by the generated subclasses in expose.py"""
raise NotImplementedError("abstract base class")
@always_inline
def to_list(l, start=0):
return to_improper(l, w_null, start=start)
def to_improper(l, curr, start=0):
return to_improper_impl(l, curr, start)
@jit.look_inside_iff(
lambda l, curr, start: jit.loop_unrolling_heuristic(l, len(l) - start, UNROLLING_CUTOFF))
def to_improper_impl(l, curr, start):
assert start >= 0
for i in range(len(l) - 1, start - 1, -1):
curr = W_Cons.make(l[i], curr)
return curr
@jit.look_inside_iff(lambda v, curr: v.unrolling_heuristic())
def vector_to_improper(v, curr):
for i in range(v.len - 1, -1, -1):
curr = W_Cons.make(v.ref(i), curr)
return curr
def to_mlist(l):
return to_mimproper(l, w_null)
@jit.look_inside_iff(
lambda l, curr: jit.loop_unrolling_heuristic(l, len(l), UNROLLING_CUTOFF))
def to_mimproper(l, curr):
for i in range(len(l) - 1, -1, -1):
curr = W_MCons(l[i], curr)
return curr
def from_list_unroll_pred(lst, idx, unroll_to=0, force=False):
if not jit.we_are_jitted():
return False
if unroll_to == -1:
return False
if force:
return idx > unroll_to
else:
return not jit.isvirtual(lst) and idx > unroll_to
@jit.elidable
def from_list_elidable(w_curr):
is_improper = not w_curr.is_proper_list()
result = []
while isinstance(w_curr, W_Cons):
result.append(w_curr.car())
w_curr = w_curr.cdr()
if is_improper:
result.append(w_curr)
if is_improper or (w_curr is w_null):
return result[:] # copy to make result non-resizable
else:
raise SchemeException("Expected list, but got something else")
@jit.unroll_safe
@specialize.arg(2)
def from_list(w_curr, unroll_to=0, force=False):
is_improper = not w_curr.is_proper_list()
result = []
n = 0
while isinstance(w_curr, W_Cons):
if from_list_unroll_pred(w_curr, n, unroll_to=unroll_to, force=force):
return result + from_list_elidable(w_curr)
result.append(w_curr.car())
w_curr = w_curr.cdr()
n += 1
if is_improper:
result.append(w_curr)
if is_improper or (w_curr is w_null):
return result[:] # copy to make result non-resizable
else:
raise SchemeException("Expected list, but got something else")
def from_list_iter(lst):
if not lst.is_proper_list():
raise SchemeException("Expected a list")
while isinstance(lst, W_Cons):
val, lst = lst.car(), lst.cdr()
yield val
assert lst is w_null, "is_proper_list lied"
class W_Continuation(W_Procedure):
errorname = "continuation"
_attrs_ = _immutable_fields_ = ["cont", "prompt_tag"]
escape = False
def __init__(self, cont, prompt_tag=None):
self.cont = cont
self.prompt_tag = prompt_tag
def get_arity(self, promote=False):
from pycket.arity import Arity
# FIXME: see if Racket ever does better than this
return Arity.unknown
def call(self, args, env, cont):
from pycket.prims.control import install_continuation
return install_continuation(self.cont, self.prompt_tag, args, env, cont,
escape=self.escape)
def tostring(self):
return "#<continuation>"
class W_EscapeContinuation(W_Continuation):
_attrs_ = []
escape = True
class W_ComposableContinuation(W_Procedure):
errorname = "composable-continuation"
_attrs_ = _immutable_fields_ = ["cont", "prompt_tag"]
def __init__(self, cont, prompt_tag=None):
self.cont = cont
self.prompt_tag = prompt_tag
def get_arity(self, promote=False):
from pycket.arity import Arity
return Arity.unknown
def call(self, args, env, cont):
from pycket.prims.control import install_continuation
return install_continuation(
self.cont, self.prompt_tag, args, env, cont, extend=True)
def tostring(self):
return "#<continuation>"
@inline_small_list(immutable=True, attrname="envs", factoryname="_make")
class W_Closure(W_Procedure):
_immutable_ = True
_immutable_fields_ = ["caselam"]
_attrs_ = ["caselam"]
@jit.unroll_safe
def __init__(self, caselam, env):
self.caselam = caselam
for (i,lam) in enumerate(caselam.lams):
vals = lam.collect_frees(caselam.recursive_sym, env, self)
self._set_list(i, ConsEnv.make(vals, env.toplevel_env()))
def enable_jitting(self):
self.caselam.enable_jitting()
def tostring(self):
return self.caselam.tostring_as_closure()
@staticmethod
@jit.unroll_safe
def make(caselam, env):
from pycket.interpreter import CaseLambda
assert isinstance(caselam, CaseLambda)
num_lams = len(caselam.lams)
if num_lams == 1 and caselam.any_frees:
env_size = len(caselam.lams[0].frees.elems)
vals = caselam.lams[0].collect_frees_without_recursive(
caselam.recursive_sym, env)
return W_Closure1AsEnv.make(vals, caselam, env.toplevel_env())
envs = [None] * num_lams
return W_Closure._make(envs, caselam, env)
def get_arity(self, promote=False):
caselam = self.caselam
if promote:
caselam = jit.promote(caselam)
return self.caselam.get_arity()
@jit.unroll_safe
def _find_lam(self, args):
jit.promote(self.caselam)
for i, lam in enumerate(self.caselam.lams):
actuals = lam.match_args(args)
if actuals is not None:
frees = self._get_list(i)
return actuals, frees, lam
if len(self.caselam.lams) == 1:
single_lambda = self.caselam.lams[0]
single_lambda.raise_nice_error(args)
raise SchemeException("No matching arity in case-lambda")
def call_with_extra_info(self, args, env, cont, calling_app):
from pycket.env import w_global_config
env_structure = None
if calling_app is not None:
env_structure = calling_app.env_structure
jit.promote(self.caselam)
jit.promote(env_structure)
(actuals, frees, lam) = self._find_lam(args)
if not jit.we_are_jitted() and env.pycketconfig().callgraph:
w_global_config.callgraph.register_call(lam, calling_app, cont, env)
# specialize on the fact that often we end up executing in the
# same environment.
prev = lam.env_structure.prev.find_env_in_chain_speculate(
frees, env_structure, env)
return lam.make_begin_cont(
ConsEnv.make(actuals, prev),
cont)
def call(self, args, env, cont):
return self.call_with_extra_info(args, env, cont, None)
@inline_small_list(immutable=True, attrname="vals", factoryname="_make", unbox_num=True, nonull=True)
class W_Closure1AsEnv(ConsEnv):
_immutable_ = True
_attrs_ = _immutable_fields_ = ['caselam']
def __init__(self, caselam, prev):
ConsEnv.__init__(self, prev)
self.caselam = caselam
@staticmethod
@jit.unroll_safe
def make(vals, caselam, prev):
recursive_sym = caselam.recursive_sym
if not we_are_translated() and not vals:
for s in caselam.lams[0].frees.elems:
assert s is recursive_sym
return W_Closure1AsEnv._make(vals, caselam, prev)
def iscallable(self):
return True
def enable_jitting(self):
self.caselam.enable_jitting()
def immutable(self):
return True
def tostring(self):
return self.caselam.tostring_as_closure()
def get_arity(self, promote=False):
caselam = self.caselam
if promote:
caselam = jit.promote(caselam)
return caselam.get_arity()
def call_with_extra_info(self, args, env, cont, calling_app):
from pycket.env import w_global_config
env_structure = None
if calling_app is not None:
env_structure = calling_app.env_structure
jit.promote(self.caselam)
jit.promote(env_structure)
lam = self.caselam.lams[0]
if not jit.we_are_jitted() and env.pycketconfig().callgraph:
w_global_config.callgraph.register_call(lam, calling_app, cont, env)
actuals = lam.match_args(args)
if actuals is None:
lam.raise_nice_error(args)
# specialize on the fact that often we end up executing in the
# same environment.
prev = lam.env_structure.prev.find_env_in_chain_speculate(
self, env_structure, env)
return lam.make_begin_cont(
ConsEnv.make(actuals, prev),
cont)
def call(self, args, env, cont):
return self.call_with_extra_info(args, env, cont, None)
# ____________________________________________________________
# methods as a ConsEnv
@jit.unroll_safe
def consenv_get_size(self):
result = self._get_size_list()
for s in self.caselam.lams[0].frees.elems:
result += s is self.caselam.recursive_sym
return result
@jit.unroll_safe
def lookup(self, sym, env_structure):
jit.promote(env_structure)
if len(env_structure.elems) == self._get_size_list():
return ConsEnv.lookup(self, sym, env_structure)
recursive_sym = jit.promote(self.caselam).recursive_sym
if sym is recursive_sym:
return self
i = 0
for s in env_structure.elems:
if s is recursive_sym:
continue
if s is sym:
v = self._get_list(i)
assert v is not None
return v
i += 1 # only count non-self references
prev = self.get_prev(env_structure)
return prev.lookup(sym, env_structure.prev)
class W_PromotableClosure(W_Procedure):
""" A W_Closure that is promotable, ie that is cached in some place and
unlikely to change. """
_attrs_ = _immutable_fields_ = ["closure", "arity"]
def __init__(self, caselam, toplevel_env):
envs = [toplevel_env] * len(caselam.lams)
self.closure = W_Closure._make(envs, caselam, toplevel_env)
self.arity = caselam._arity
def enable_jitting(self):
self.closure.enable_jitting()
def call(self, args, env, cont):
jit.promote(self)
return self.closure.call(args, env, cont)
def call_with_extra_info(self, args, env, cont, calling_app):
jit.promote(self)
return self.closure.call_with_extra_info(args, env, cont, calling_app)
def get_arity(self, promote=False):
if promote:
self = jit.promote(self)
return self.arity
def tostring(self):
return self.closure.tostring()
class W_EnvVarSet(W_Object):
errorname = "environment-variable-set"
_attrs_ = ["table", "is_system"]
def __init__(self, t, is_system):
self.table = t
self.is_system = is_system
def get(self, s):
import os
if self.is_system:
return os.environ.get(s)
else:
return self.table.get(s, None)
def set(self, s, val):
import os
if self.is_system:
os.environ[s] = val
self.table[s] = val
def get_names(self):
import os
if self.is_system:
return os.environ.keys()
else:
return self.table.keys()
class W_EOF(W_Object):
errorname = "eof"
_attrs_ = []
def __init__(self):
pass
def tostring(self):
return "#<eof>"
eof_object = W_EOF()
class W_ReadTable(W_Object):
errorname = "readtable"
_attrs_ = _immutable_fields_ = ["parent", "key", "mode", "action"]
def __init__(self, parent, key, mode, action):
self.parent = parent
self.key = key
self.mode = mode
self.action = action
class W_Port(W_Object):
errorname = "port"
_attrs_ = ['closed']
def __init__(self):
self.closed = False
def tostring(self):
raise NotImplementedError("abstract base classe")
def close(self):
self.closed = True
def is_stdin(self):
return False
def get_line(self):
raise NotImplementedError("abstract base class")
def get_column(self):
raise NotImplementedError("abstract base class")
def get_position(self):
raise NotImplementedError("abstract base class")
def seek(self, offset, end=False):
raise NotImplementedError("abstract base class")
def tell(self):
raise NotImplementedError("abstract base class")
def obj_name(self):
raise NotImplementedError("abstract base class")
class W_OutputPort(W_Port):
errorname = "output-port"
_attrs_ = []
def __init__(self):
pass
def write(self, str):
raise NotImplementedError("abstract base class")
def flush(self):
raise NotImplementedError("abstract base class")
def tostring(self):
return "#<output-port>"
class W_StringOutputPort(W_OutputPort):
errorname = "output-port"
_attrs_ = ['closed', 'str']
def __init__(self):
self.closed = False
self.str = StringBuilder()
def obj_name(self):
return W_Symbol.make("string")
def get_line(self):
return w_false
def get_column(self):
# FIXME
return w_false
def get_position(self):
return W_Fixnum(self.tell() + 1)
def flush(self):
pass
def write(self, s):
self.str.append(s)
def contents(self, reset=False):
ret_val = self.str.build()
if reset:
# CAUTION : eq?
self.str = StringBuilder()
return ret_val
def seek(self, offset, end=False):
if end or offset == self.str.getlength():
return
if offset > self.str.getlength():
self.str.append("\0" * (self.str.getlength() - offset))
else:
# FIXME: this is potentially slow.
content = self.contents()
self.str = StringBuilder(offset)
self.str.append_slice(content, 0, offset)
def tell(self):
return self.str.getlength()
class W_InputPort(W_Port):
errorname = "input-port"
_attrs_ = []
def read(self, n):
raise NotImplementedError("abstract class")
def peek(self):
raise NotImplementedError("abstract class")
def readline(self):
raise NotImplementedError("abstract class")
def get_read_handler(self):
raise NotImplementedError("abstract class")
def set_read_handler(self, handler):
raise NotImplementedError("abstract class")
def tostring(self):
return "#<input-port>"
def _length_up_to_end(self):
raise NotImplementedError("abstract class")
class W_CustomInputPort(W_InputPort):
errorname = "input-port"
_immutable_fields_ = ["name", 'w_read_in', 'w_peek', 'w_close', 'w_get_progress_evt', 'w_commit', 'w_get_location', 'w_count_lines_bang', 'w_init_position', 'w_buffer_mode']
_attrs_ = ['closed', 'name', 'line', 'column', 'read_handler', 'w_read_in', 'w_peek', 'w_close', 'w_get_progress_evt', 'w_commit', 'w_get_location', 'w_count_lines_bang', 'w_init_position', 'w_buffer_mode']
#_attrs_ = ['closed', 'str', 'ptr', 'read_handler']
def __init__(self, name, w_read_in, w_peek, w_close,
w_get_progress_evt=w_false, w_commit=w_false,
w_get_location=w_false,
w_count_lines_bang=w_false, # count-lines! (-> any)
w_init_position=W_Fixnum.ONE, w_buffer_mode=w_false):
#self.closed = False
self.name = name
self.w_read_in = w_read_in
self.w_peek = w_peek
self.w_close = w_close
self.read_handler = None
self.line = 1
self.column = 0
self.w_get_progress_evt = w_get_progress_evt
self.w_commit = w_commit
self.w_get_location = w_get_location
self.w_count_lines_bang = w_count_lines_bang
self.w_init_position = w_init_position
self.w_buffer_mode = w_buffer_mode
def w_read_is_port(self):
if isinstance(self.w_read_in, W_InputPort):
return self.w_read_in
else:
return w_false
def close(self):
self.closed = True
def _call_read_in(self, bstr, env, cont):
assert self.w_read_in.iscallable()
# (make-bytes 1)
return self.w_read_in.call([bstr], env, cont)
def _call_close(self, env, cont):
close_func = self.w_close
if not close_func or close_func is w_false:
raise SchemeException("CustomInputPort - no close procedure")
assert close_func.iscallable()
return close_func.call([], env, cont)
def _call_peek(self, dest_bstr, skip_bytes_amt, progress, env, cont):
peek_func = self.w_peek
if not peek_func or peek_func is w_false:
raise SchemeException("CustomInputPort - automatic peek through read_in is currently NYI")
assert peek_func.iscallable()
return peek_func.call([dest_bstr, skip_bytes_amt, progress], env, cont)
def obj_name(self):
return W_Symbol.make("string")
def get_read_handler(self):
return self.read_handler
def set_read_handler(self, handler):
self.read_handler = handler
def get_line(self):
return w_false
def get_column(self):
return w_false
def get_position(self):
return w_false
def read(self, n):
#raise NotImplementedError("custom port nyi")
raise SchemeException("custom port - read_in should've been called")
def peek(self):
#raise NotImplementedError("custom port nyi")
raise SchemeException("custom port - peek should've been called")
def readline(self):
#raise NotImplementedError("custom port nyi")
raise SchemeException("custom port readline")
def _length_up_to_end(self):
#raise NotImplementedError("custom port nyi")
raise SchemeException("custom port length up to end")
class W_StringInputPort(W_InputPort):
errorname = "input-port"
_immutable_fields_ = ["str"]
_attrs_ = ['closed', 'str', 'ptr', 'line', 'column', 'read_handler']
#_attrs_ = ['closed', 'str', 'ptr', 'read_handler']
def __init__(self, str):
self.closed = False
self.str = str
self.ptr = 0
self.read_handler = None
self.line = 1
self.column = 0
def obj_name(self):
return W_Symbol.make("string")
def get_read_handler(self):
return self.read_handler
def set_read_handler(self, handler):
self.read_handler = handler
def get_line(self):
return W_Fixnum(self.line)
def get_column(self):
# FIXME
return W_Fixnum(self.column)
def get_position(self):
return W_Fixnum(self.ptr + 1)
def readline(self):
from rpython.rlib.rstring import find
start = self.ptr
assert start >= 0
pos = find(self.str, "\n", start, len(self.str))
if pos < 0:
return self.read()
else:
pos += 1
stop = self.ptr = pos
return self.str[start:stop]
return line
def peek(self):
if self.ptr >= len(self.str):
return ""
return self.str[self.ptr]
def read(self, n=-1):
if self.ptr >= len(self.str):
return ""
p = self.ptr
assert p >= 0
if n == -1 or n >= (len(self.str) - self.ptr):
self.ptr = len(self.str)
assert self.ptr >= 0
return self.str[p:]
else:
self.ptr += n
stop = self.ptr
assert stop < len(self.str)
assert stop >= 0
return self.str[p:stop]
def seek(self, offset, end=False):
if end or offset == self.ptr:
self.ptr = len(self.str)
return
if offset > len(self.str):
raise SchemeException("index out of bounds")
else:
self.ptr = offset
def tell(self):
return self.ptr
def _length_up_to_end(self):
return len(self.str) - self.ptr
class W_FileInputPort(W_InputPort):
errorname = "input-port"
_immutable_fields_ = ["file", "path"]
_attrs_ = ['closed', 'file', 'line', 'column', 'read_handler', 'stdin', 'path']
def __init__(self, f, path, stdin=False):
self.closed = False
self.file = f
self.read_handler = None
self.stdin = stdin
self.line = 1
self.column = 0
self.path = path
def is_stdin(self):
return self.stdin
def close(self):
if not self.closed:
self.closed = True
self.file.close()
def read(self, n):
return self.file.read(n)
def get_read_handler(self):
return self.read_handler
def set_read_handler(self, handler):
self.read_handler = handler
def get_path(self):
return W_Path(self.path)
def obj_name(self):
return self.get_path()
def readline(self):
return self.file.readline()
def get_line(self):
return W_Fixnum(self.line)
def get_column(self):
# FIXME
return W_Fixnum(self.column)
def get_position(self):
return W_Fixnum(self.file.pos + 1)
def peek(self):
offset, string = self.file.peek()
if offset < len(string):
# fast path:
return string[offset]
if self.is_stdin():
res = self.file.read(1)
else:
pos = self.file.tell()
res = self.file.read(1)
self.file.seek(pos, 0)
return res
def seek(self, offset, end=False):
if end:
self.file.seek(0, 2)
else:
self.file.seek(offset, 0)
def tell(self):
# XXX this means we can only deal with 4GiB files on 32bit systems
return int(intmask(self.file.tell()))
def _length_up_to_end(self):
old_ptr = self.tell()
self.seek(0, end=True)
new_ptr = self.tell()
self.seek(old_ptr)
return new_ptr - old_ptr
class W_FileOutputPort(W_OutputPort):
errorname = "output-port"
_immutable_fields_ = ["file", "path"]
_attrs_ = ['closed', 'file', 'stdout', 'path']
def __init__(self, f, path, stdout=False):
self.closed = False
self.file = f
self.stdout = stdout
self.path = path
def obj_name(self):
return self.get_path()
def get_path(self):
return W_Path(self.path)
def get_line(self):
return w_false
def get_column(self):
# FIXME
return w_false
def get_position(self):
return W_Fixnum(self.file.pos + 1)
def is_stdout(self):
return self.stdout
def write(self, str):
self.file.write(str)
def flush(self):
self.file.flush()
def close(self):
if not self.closed:
self.closed = True
self.file.close()
#self.file = None
def seek(self, offset, end=False):
if end:
self.file.seek(0, 2)
else:
self.file.seek(offset, 0)
def tell(self):
# XXX this means we can only deal with 4GiB files on 32bit systems
return int(intmask(self.file.tell()))
@specialize.call_location()
def wrap_list(pyval):
assert isinstance(pyval, list)
acc = w_null
for val in reversed(pyval):
acc = wrap(val, acc)
return acc
@specialize.ll()
def wrap(*_pyval):
# Smart constructor for converting Python values to Racket values
if len(_pyval) == 1:
pyval = _pyval[0]
if isinstance(pyval, bool):
return w_true if pyval else w_false
if isinstance(pyval, int):
return W_Fixnum(pyval)
if isinstance(pyval, float):
return W_Flonum(pyval)
if isinstance(pyval, W_Object):
return pyval
elif len(_pyval) == 2:
car = _pyval[0]
cdr = wrap(_pyval[1])
if isinstance(car, bool):
if cdr.is_proper_list():
return W_WrappedConsProper(wrap(car), cdr)
return W_WrappedCons(wrap(car), cdr)
if isinstance(car, int):
if cdr.is_proper_list():
return W_UnwrappedFixnumConsProper(car, cdr)
return W_UnwrappedFixnumCons(car, cdr)
if isinstance(car, float):
if cdr.is_proper_list():
return W_UnwrappedFlonumConsProper(car, False, cdr)
return W_UnwrappedFlonumCons(car, False, cdr)
if isinstance(car, W_Object):
return W_Cons.make(car, cdr)
assert False
class W_UnquotedPrintingString(W_Object):
errorname = "unquoted-printing-string"
_immutable_fields_ = ["string"]
def __init__(self, s):
self.string = s
def tostring(self):
return self.string.tostring()
class W_SecurityGuard(W_Object):
errorname = "security-guard"
def __init__(self):
pass
class W_Channel(W_Object):
errorname = "channel"
def __init__(self):
pass
# for things we don't implement yet
class W_Impossible(W_Object):
errorname = "impossible"
def __init__(self):
pass
class W_WillExecutor(W_Object):
errorname = "will-executor"
def __init__(self):
pass
| mit | e08a3f0d7a4911481cee9ad1962468b7 | 28.173819 | 210 | 0.592669 | 3.537475 | false | false | false | false |
houtianze/bypy | bypy/const.py | 1 | 8705 | #!/usr/bin/env python
# encoding: utf-8
# PYTHON_ARGCOMPLETE_OK
# from __future__ imports must occur at the beginning of the file
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
import sys
import os
# https://packaging.python.org/single_source_version/
__title__ = 'bypy'
__version__ = '1.8'
__author__ = 'Hou Tianze'
__license__ = 'MIT'
__desc__ = 'Python client for Baidu Yun (Personal Cloud Storage) 百度云/百度网盘 Python 客户端'
__url__ = 'https://github.com/houtianze/bypy'
### return (error) codes
# they are put at the top because:
# 1. they have zero dependencies
# 2. can be referred in any abort later, e.g. return error on import failures
ENoError = 0 # plain old OK, fine, no error.
EIncorrectPythonVersion = 1
#EApiNotConfigured = 10 # Deprecated: ApiKey, SecretKey and AppPcsPath not properly configured
EArgument = 10 # invalid program command argument
EAbort = 20 # aborted
EException = 30 # unhandled exception occurred
EParameter = 40 # invalid parameter passed to ByPy
EInvalidJson = 50
EHashMismatch = 60 # MD5 hashes of the local file and remote file don't match each other
EFileWrite = 70
EFileTooBig = 80 # file too big to upload
EFailToCreateLocalDir = 90
EFailToCreateLocalFile = 100
EFailToDeleteDir = 110
EFailToDeleteFile = 120
EFileNotFound = 130
EMaxRetry = 140
ERequestFailed = 150 # request failed
ECacheNotLoaded = 160
EMigrationFailed = 170
EDownloadCerts = 180
EUserRejected = 190 # user's decision
EUpdateNeeded = 200
ESkipped = 210
EFatal = -1 # No way to continue
# internal errors
IEMD5NotFound = 31079 # File md5 not found, you should use upload API to upload the whole file.
IESuperfileCreationFailed = 31081 # superfile create failed (HTTP 404)
# Undocumented, see #308 , https://paste.ubuntu.com/23672323/
IEBlockMissInSuperFile2 = 31363 # block miss in superfile2 (HTTP 403)
IETaskNotFound = 36016 # Task was not found
IEFileAlreadyExists = 31061 # {"error_code":31061,"error_msg":"file already exists","request_id":2939656146461714799}
IEAppIDIsEmpty = 31024 # {'error_code': 31024, 'error_msg': 'app id is empty', 'request_id': 5638555241104030586} (HTTP 400)
# TODO: Should have use an enum or some sort of data structure for this,
# but now changing this is too time consuming and error-prone
ErrorExplanations = {
ENoError: "Everything went fine.",
EIncorrectPythonVersion: "Incorrect Python version",
EArgument: "Invalid program argument passed in",
EAbort: "Abort due to unrecovrable errors",
EException: "Unhandled exception occurred",
EParameter: "Some or all the parameters passed to the function are invalid",
EInvalidJson: "Invalid JSON received",
EHashMismatch: "MD5 hashes of the local file and remote file don't match each other",
EFileWrite: "Error writing file",
EFileTooBig: "File too big to upload",
EFailToCreateLocalDir: "Unable to create some directory(ies)",
EFailToCreateLocalFile: "Unable to create some local file(s)",
EFailToDeleteDir:" Unable to delete some directory(ies)",
EFailToDeleteFile: "Unable to delete some file(s)",
EFileNotFound: "File not found",
EMaxRetry: "Maximum retries reached",
ERequestFailed: "Request failed",
ECacheNotLoaded: "Failed to load file caches",
EMigrationFailed: "Failed to migrate from the old cache format",
EDownloadCerts: "Failed to download certificats", # no long in use
EUserRejected: "User chose to not to proceed",
EUpdateNeeded: "Need to update bypy",
ESkipped: "Some files/directores are skipped",
EFatal: "Fatal error, unable to continue",
IEMD5NotFound: "File md5 not found, you should use upload API to upload the whole file.",
IESuperfileCreationFailed: "superfile create failed (HTTP 404)",
# Undocumented, see #308 , https://paste.ubuntu.com/23672323/
IEBlockMissInSuperFile2: "Block miss in superfile2 (HTTP 403)",
IETaskNotFound: "Task was not found",
IEFileAlreadyExists: "File already exists"
}
DownloaderAria2 = 'aria2'
Downloaders = [DownloaderAria2]
DownloaderDefaultArgs = {
DownloaderAria2 : "-c -k10M -x4 -s4 --file-allocation=none"
}
DownloaderArgsEnvKey = 'DOWNLOADER_ARGUMENTS'
DownloaderArgsIsFilePrefix = '@'
PipBinaryName = 'pip' + str(sys.version_info[0])
PipInstallCommand = PipBinaryName + ' install requests'
PipUpgradeCommand = PipBinaryName + ' install -U requests'
#### Definitions that are real world constants
OneK = 1024
OneM = OneK * OneK
OneG = OneM * OneK
OneT = OneG * OneK
OneP = OneT * OneK
OneE = OneP * OneK
OneZ = OneE * OneK
OneY = OneZ * OneK
SIPrefixNames = [ '', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y' ]
SIPrefixTimes = {
'K' : OneK,
'M' : OneM,
'G' : OneG,
'T' : OneT,
'E' : OneE,
'Z' : OneZ,
'Y' : OneY }
# before this, you don't know me, i don't know you - Eason
TenYearInSeconds = 60 * 60 * 24 * 366 * 10
# For Python 3 only, threading.TIMEOUT_MAX is 9223372036854.0 on all *nix systems,
# but it's a little over 49 days for Windows, if we give a value larger than that,
# Python 3 on Windows will throw towel, so we cringe.
FortyNineDaysInSeconds = 60 * 60 * 24 * 49
#### Baidu PCS constants
# ==== NOTE ====
# I use server auth, because it's the only method I know that can conceal the SecretKey.
# If you want to perform local authorization using 'Device' method instead, you just need:
# - Paste your own ApiKey and SecretKey. (A non-empty SecretKey will change auth mode to device)
# - Change the AppPcsPath to your own App's directory at Baidu PCS
# Then you are good to go
# replace with your own API key and secret if you are using your own appid, or set env vars
ApiKey = os.environ['BAIDU_API_KEY'] if 'BAIDU_API_KEY' in os.environ else 'q8WE4EpCsau1oS0MplgMKNBn'
SecretKey = os.environ['BAIDU_API_SECRET'] if 'BAIDU_API_SECRET' in os.environ else 'PA4MhwB5RE7DacKtoP2i8ikCnNzAqYTD'
# NOTE: no trailing '/'
AppPcsPath = '/apps/bypy' # change this to the App's directory you specified when creating the app
AppPcsPathLen = len(AppPcsPath)
## Baidu PCS URLs etc.
OpenApiUrl = "https://openapi.baidu.com"
OpenApiVersion = "2.0"
OAuthUrl = OpenApiUrl + "/oauth/" + OpenApiVersion
ServerAuthUrl = OAuthUrl + "/authorize"
DeviceAuthUrl = OAuthUrl + "/device/code"
TokenUrl = OAuthUrl + "/token"
PcsDomain = 'pcs.baidu.com'
RestApiPath = '/rest/2.0/pcs/'
PcsUrl = 'https://' + PcsDomain + RestApiPath
CPcsUrl = 'https://c.pcs.baidu.com/rest/2.0/pcs/'
DPcsUrl = 'https://d.pcs.baidu.com/rest/2.0/pcs/'
## Baidu PCS constants
MinRapidUploadFileSize = 256 * OneK
MaxSliceSize = 2 * OneG
MaxSlicePieces = 1024
MaxListEntries = 1000 # https://github.com/houtianze/bypy/issues/285
### public static properties
HelpMarker = "Usage:"
### ByPy config constants
## directories, for setting, cache, etc
HomeDir = os.path.expanduser('~')
# os.path.join() may not handle unicode well
ConfigDir = HomeDir + os.sep + '.bypy'
TokenFileName = 'bypy.json'
TokenFilePath = ConfigDir + os.sep + TokenFileName
SettingFileName = 'bypy.setting.json'
SettingFilePath = ConfigDir + os.sep + SettingFileName
HashCacheFileName = 'bypy.hashcache.json'
HashCachePath = ConfigDir + os.sep + HashCacheFileName
PickleFileName = 'bypy.pickle'
PicklePath = ConfigDir + os.sep + PickleFileName
# ProgressPath saves the MD5s of uploaded slices, for upload resuming
# format:
# {
# abspath: [slice_size, [slice1md5, slice2md5, ...]],
# }
#
ProgressFileName = 'bypy.parts.json'
ProgressPath = ConfigDir + os.sep + ProgressFileName
ByPyCertsFileName = 'bypy.cacerts.pem'
OldByPyCertsPath = ConfigDir + os.sep + ByPyCertsFileName
# Old setting locations, should be moved to ~/.bypy to be clean
OldTokenFilePath = HomeDir + os.sep + '.bypy.json'
OldPicklePath = HomeDir + os.sep + '.bypy.pickle'
RemoteTempDir = AppPcsPath + '/.bypytemp'
SettingKey_OverwriteRemoteTempDir = 'overwriteRemoteTempDir'
SettingKey_LastUpdateCheckTime = 'lastUpdateCheck'
## default config values
PrintFlushPeriodInSec = 5.0
# TODO: Does the following User-Agent emulation help?
UserAgent = None # According to xslidian, User-Agent affects download.
#UserAgent = 'Mozilla/5.0'
#UserAgent = "Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)"
#UserAgent = 'netdisk;5.2.7.2;PC;PC-Windows;6.2.9200;WindowsBaiduYunGuanJia'
DefaultSliceInMB = 20
DefaultSliceSize = 20 * OneM
DefaultDlChunkSize = 20 * OneM
RetryDelayInSec = 10
CacheSavePeriodInSec = 10 * 60.0
DefaultTimeOutInSeconds=300
# share retries
ShareRapidUploadRetries = 3
DefaultResumeDlRevertCount = 1
DefaultProcessCount = 1
## program switches
CleanOptionShort = '-c'
CleanOptionLong = '--clean'
DisableSslCheckOption = '--disable-ssl-check'
CaCertsOption = '--cacerts'
MultiprocessOption = '--processes'
# vim: tabstop=4 noexpandtab shiftwidth=4 softtabstop=4 ff=unix fileencoding=utf-8
| mit | 661d1ee912f5cf86c1057156863d95bf | 37.772321 | 124 | 0.744732 | 3.062412 | false | false | false | false |
kibitzr/kibitzr | kibitzr/app.py | 1 | 5995 | import logging
import signal
import time
import code
import psutil
import os
import entrypoints
from .conf import settings, SettingsParser
from .fetcher import cleanup_fetchers, persistent_firefox
from .checker import Checker
from .bootstrap import create_boilerplate
from . import timeline
logger = logging.getLogger(__name__)
__all__ = [
'Application',
]
class Application:
def __init__(self):
self.signals = {
'reload_conf_pending': False,
'interrupted': False,
'open_backdoor': False,
'orig': {
signal.SIGINT: None,
signal.SIGTERM: None,
}
}
try:
self.signals['orig'].update({
signal.SIGUSR1: None,
signal.SIGUSR2: None,
})
except AttributeError:
# Unavailable on Windows
pass
@staticmethod
def bootstrap():
create_boilerplate()
def run(self, once=False, log_level=logging.INFO, names=None):
# Reset global state for testability:
self.signals.update({
'reload_conf_pending': False,
'interrupted': False,
'open_backdoor': False,
})
self.setup_logger(log_level)
self.connect_signals()
try:
while True:
if self.signals['interrupted']:
return 1
if self.signals['reload_conf_pending']:
settings().reread()
self.signals['reload_conf_pending'] = False
checkers = Checker.create_from_settings(
checks=settings().checks,
names=names
)
if checkers:
self.before_start(checkers)
self.execute_all(checkers)
if once:
return 0
else:
self.check_forever(checkers)
else:
logger.warning("No checks defined. Exiting")
return 1
finally:
cleanup_fetchers()
return 0
def disconnect_signals(self):
signal.signal(signal.SIGINT, self.signals['orig'][signal.SIGINT])
signal.signal(signal.SIGTERM, self.signals['orig'][signal.SIGTERM])
try:
signal.signal(signal.SIGUSR1, self.signals['orig'][signal.SIGUSR1])
signal.signal(signal.SIGUSR2, self.signals['orig'][signal.SIGUSR2])
except AttributeError:
# Unavailable on Windows
pass
def connect_signals(self):
self.signals['orig'][signal.SIGINT] = signal.signal(signal.SIGINT, self.on_interrupt)
self.signals['orig'][signal.SIGTERM] = signal.signal(signal.SIGTERM, self.on_interrupt)
try:
self.signals['orig'][signal.SIGUSR1] = signal.signal(signal.SIGUSR1,
self.on_reload_config)
self.signals['orig'][signal.SIGUSR2] = signal.signal(signal.SIGUSR2, self.on_backdoor)
except AttributeError:
# Unavailable on Windows
pass
@staticmethod
def execute_conf(conf):
logging.basicConfig(level=logging.WARNING)
logging.getLogger('').handlers[0].level = logging.WARNING
checks = SettingsParser().parse_checks(conf)
for check in checks:
Checker(check).check()
def run_firefox(self):
self.setup_logger(logging.INFO)
persistent_firefox()
@staticmethod
def telegram_chat():
from .notifier.telegram import chat_id
chat_id()
@staticmethod
def setup_logger(log_level=logging.INFO):
logging.getLogger("").setLevel(log_level)
def check_forever(self, checkers):
timeline.schedule_checks(checkers)
logger.info("Starting infinite loop")
while not self.signals['reload_conf_pending']:
if self.signals['interrupted']:
break
if self.signals['open_backdoor']:
self.signals['open_backdoor'] = False
code.interact(
banner="Kibitzr debug shell",
local=locals(),
)
timeline.run_pending()
if self.signals['interrupted']:
break
time.sleep(1)
def execute_all(self, checkers):
for checker in checkers:
if not self.signals['interrupted']:
checker.check()
else:
break
def on_reload_config(self, *args, **kwargs):
logger.info("Received SIGUSR1. Flagging configuration reload")
self.signals['reload_conf_pending'] = True
def on_backdoor(self, *args, **kwargs):
logger.info("Received SIGUSR2. Flagging backdoor to open")
self.signals['open_backdoor'] = True
def on_interrupt(self, *args, **kwargs):
if not self.signals['interrupted']:
self.signals['interrupted'] = True
else:
# Third Ctrl+C to hard stop:
self.disconnect_signals()
def before_start(self, checkers):
"""
Loads entry points named kibitzr.before_start
and call each one with two arguments:
1. Application instance;
2. List of configured checkers
"""
for point in entrypoints.get_group_all("kibitzr.before_start"):
entry = point.load()
entry(self, checkers)
@staticmethod
def send_reload():
"""
Sends SIGUSR1 to all processes that execute kibitzr
"""
user_id = os.geteuid()
for proc in psutil.process_iter(['uids', 'name', 'pid']):
if proc.info['name'] == "kibitzr" and proc.info['uids'][0] == user_id:
proc.send_signal(signal.SIGUSR1)
logger.info(f"Send singal SIGUSR1 to process: {proc.info['pid']}")
| mit | e8c3793bca75b52f5a9321fb52cac2aa | 31.231183 | 98 | 0.551626 | 4.447329 | false | false | false | false |
demisto/content | Packs/CommonScripts/Scripts/CalculateEntropy/CalculateEntropy.py | 2 | 1737 | import demistomock as demisto
from CommonServerPython import * # noqa: E402 lgtm [py/polluting-import]
from CommonServerUserPython import * # noqa: E402 lgtm [py/polluting-import]
import math
import string
def calculate_shannon_entropy(data, minimum_entropy):
"""Algorithm to determine the randomness of a given data.
Higher is more random/complex, most English words will yield in average result of 3
Args:
data (str): The data to calculate entropy on.
minimum_entropy (float): The minimum entropy.
Returns:
(str, dict, dict). Human readable, context, raw response
"""
entropy = 0.0
# each unicode code representation of all characters which are considered printable
for char in (ord(c) for c in string.printable):
# probability of event X
p_x = float(data.count(chr(char))) / len(data)
if p_x > 0:
entropy += - p_x * math.log(p_x, 2)
if entropy >= minimum_entropy:
human_readable = tableToMarkdown("Entropy results", {'Checked Value': data, 'Entropy': entropy},
headers=['Checked Value', 'Entropy'])
return human_readable, {'EntropyResult': {'checked_value': data, 'entropy': entropy}}, {}
return f'Entropy for {data} is {entropy} - lower than {minimum_entropy}', {}, {}
def main():
try:
data = demisto.args().get('data', '')
minimum_entropy = float(demisto.args().get('minimum_entropy', 0))
return_outputs(*calculate_shannon_entropy(data, minimum_entropy))
except Exception as ex:
return_error(f'Failed to execute calculate entropy script. Error: {str(ex)}')
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | dc34451e286ce66ad453ad0c8eaf13cc | 38.477273 | 104 | 0.640184 | 3.974828 | false | false | false | false |
demisto/content | Packs/PANWComprehensiveInvestigation/Scripts/PanwIndicatorCreateQueries/PanwIndicatorCreateQueries_test.py | 2 | 3263 | from PanwIndicatorCreateQueries import generate_ip_queries, generate_hash_queries, generate_domain_queries
def test_generate_ip_queries():
"""Unit test
Given
- generate_ip_queries command
- command args(single and multiple ips)
When
- executing generate_ip_queries command
Then
- Validate that the proper query is created
"""
expected1 = {
'CortexTrapsIP': "SELECT * from tms.threat where endPointHeader.agentIp='8.8.8.8'"
}
expected2 = {
'CortexTrapsIP': "SELECT * from tms.threat where endPointHeader.agentIp='8.8.8.8' OR "
"endPointHeader.agentIp='1.1.1.1'"
}
queries1_1 = generate_ip_queries(['8.8.8.8'])
queries1_2 = generate_ip_queries(['8.8.8.8', '12345'])
queries2_1 = generate_ip_queries(['8.8.8.8', '1.1.1.1'])
assert expected1['CortexTrapsIP'] == queries1_1['CortexTrapsIP']
assert expected1['CortexTrapsIP'] == queries1_2['CortexTrapsIP']
assert expected2['CortexTrapsIP'] == queries2_1['CortexTrapsIP']
def test_generate_hash_queries():
"""Unit test
Given
- generate_hash_queries command
- command args(single and multiple hashes)
When
- executing generate_hash_queries command
Then
- Validate that the proper query is created
"""
cortex_traps_single_hash = {
'CortexTrapsHash': "SELECT * from tms.threat where messageData.files.sha256='ababababababababab'"
}
queries_single_hash = generate_hash_queries(['ababababababababab'])
assert queries_single_hash['CortexTrapsHash'] == cortex_traps_single_hash['CortexTrapsHash']
cortex_traps_multiple_hash = {
'CortexTrapsHash': "SELECT * from tms.threat where messageData.files.sha256='ababababababababab' OR "
"messageData.files.sha256='cbcbcbcbcbcbcbcbcb'"
}
auto_focus_hash_query = (
'{"operator": "any", "children": ['
'{"field": "alias.hash_lookup", "operator": "contains", "value": "ababababababababab"}, '
'{"field": "alias.hash_lookup", "operator": "contains", "value": "cbcbcbcbcbcbcbcbcb"}]}')
queries_multiple_hashes = generate_hash_queries(['ababababababababab', 'cbcbcbcbcbcbcbcbcb'])
assert queries_multiple_hashes['CortexTrapsHash'] == cortex_traps_multiple_hash['CortexTrapsHash']
assert queries_multiple_hashes['AutofocusSessionsHash'] == auto_focus_hash_query
def test_generate_domain_queries():
"""Unit test
Given
- generate_domain_queries command
- command args(single and multiple domains)
When
- executing generate_domain_queries command
Then
- Validate that the proper query is created
"""
expected1 = {
'CortexThreatDomain': "SELECT * from panw.threat where misc LIKE 'demisto.com'"
}
expected2 = {
'CortexThreatDomain': "SELECT * from panw.threat where misc LIKE 'demisto.com' OR "
"misc LIKE 'paloaltonetworks.com'"
}
queries1_1 = generate_domain_queries(['demisto.com'])
queries2_1 = generate_domain_queries(['demisto.com', 'paloaltonetworks.com'])
assert expected1['CortexThreatDomain'] == queries1_1['CortexThreatDomain']
assert expected2['CortexThreatDomain'] == queries2_1['CortexThreatDomain']
| mit | f2508b05dc08c9ed15d6f0b8c42ff271 | 40.833333 | 109 | 0.667177 | 3.391892 | false | true | false | false |
demisto/content | Packs/ShiftManagement/Scripts/CreateChannelWrapper/CreateChannelWrapper.py | 2 | 2075 | from CommonServerPython import *
def main():
args = demisto.args()
channel_type = args.get('type')
channel_name = args.get('name')
channel_desc = args.get('description')
channel_team = args.get('team')
errors = []
integrations_to_create = []
channels_created = []
modules = demisto.getModules()
for module_name, module in modules.items():
brand = module.get('brand')
if module.get('state') == 'active' and brand in {'Microsoft Teams', 'SlackV2'}:
integrations_to_create.append(brand)
if not integrations_to_create:
return_error('Microsoft Teams and Slack are not available, please configure at least one of them.')
for integration in integrations_to_create:
res = None
if integration == 'SlackV2':
res = demisto.executeCommand('slack-create-channel', {'type': channel_type, 'name': channel_name})
elif integration == 'Microsoft Teams':
if channel_team:
res = demisto.executeCommand('microsoft-teams-create-channel',
{'channel_name': channel_name, 'description': channel_desc, 'team': channel_team})
else:
errors.append('Failed to create channel in Microsoft Teams: team argument is missing')
if is_error(res):
errors.append(f'Failed to create channel in {integration}: {get_error(res)}')
elif res:
channels_created.append(integration)
errors_str = '\n'.join(errors)
# in case of no channel created
if len(channels_created) == 0:
return_error(errors_str)
# in case of channel created in all the available brands(Microsoft Teams and Slack)
elif len(channels_created) == len(integrations_to_create):
return_results(f'Channel {channel_name} created successfully.')
# in case of only one channel created
else:
return_results(f'Channel {channel_name} created successfully.\n{errors_str}')
if __name__ in ('__builtin__', 'builtins', '__main__'):
main()
| mit | 31f00d97dde9ac7235a8a437ed9554d0 | 36.727273 | 127 | 0.623133 | 4.036965 | false | false | false | false |
demisto/content | Packs/Workday/Integrations/Workday_IAM/test_data/fetch_incidents_source_priority_mock_data.py | 2 | 1396 | full_report = {
"Report_Entry": [{
"Employment_Status": "Active",
"Last_Day_Of_Work": "10/05/2035",
"Last_Hire_Date": "10/05/2020",
"Emp_ID": "100122",
"Email_Address": "rrahardjo@paloaltonetworks.com"
}]
}
employee_id_to_user_profile = {
"100122": {
"employmentstatus": "Active",
"lastdayofwork": "10/05/2035",
"hiredate": "10/05/2020",
"employeeid": "100122",
"username": "rrahardjo@paloaltonetworks.com",
"email": "rrahardjo@paloaltonetworks.com",
"sourcepriority": 1,
"sourceoftruth": "Wand IAM",
"isprocessed": False
}
}
email_to_user_profile = {
"rrahardjo@paloaltonetworks.com": {
"employmentstatus": "Active",
"lastdayofwork": "10/05/2035",
"hiredate": "10/05/2020",
"employeeid": "100122",
"username": "rrahardjo@paloaltonetworks.com",
"email": "rrahardjo@paloaltonetworks.com",
"sourcepriority": 1,
"sourceoftruth": "Wand IAM",
"isprocessed": False
}
}
mapped_workday_user = {
"Employment Status": "Active",
"Last Day of Work": "10/05/2035",
"Hire Date": "10/05/2020",
"Employee ID": "100122",
"Username": "rrahardjo@paloaltonetworks.com",
"Email": "rrahardjo@paloaltonetworks.com",
"Source Priority": 1,
"Source of Truth": "Wand IAM"
}
event_data = []
| mit | 6214c1a4290d998305670f9c4094d87f | 26.92 | 57 | 0.576648 | 2.902287 | false | false | true | false |
demisto/content | Packs/LogRhythmRest/Integrations/LogRhythmRest/LogRhythmRest.py | 2 | 83553 | # -*- coding: utf-8 -*-
import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
''' IMPORTS '''
import json
import requests
import random
import string
from datetime import datetime, timedelta
# Disable insecure warnings
requests.packages.urllib3.disable_warnings()
''' GLOBALS/PARAMS '''
TOKEN = demisto.params().get('token', '')
BASE_URL = demisto.params().get('url', '').strip('/')
INSECURE = not demisto.params().get('insecure')
CLUSTER_ID = demisto.params().get('cluster-id')
ENTITY_ID = demisto.params().get('entity-id')
# Headers to be sent in requests
HEADERS = {
'Authorization': 'Bearer ' + TOKEN,
'Content-Type': 'application/json',
}
HOSTS_HEADERS = ["ID", "Name", "EntityId", "EntityName", "OS", "Status", "Location", "RiskLevel", "ThreatLevel",
"ThreatLevelComments", "DateUpdated", "HostZone"]
LOGS_HEADERS = ["Level", "Computer", "Channel", "Keywords", "EventData"]
PERSON_HEADERS = ["ID", "HostStatus", "IsAPIPerson", "FirstName", "LastName", "UserID", "UserLogin", "DateUpdated"]
NETWORK_HEADERS = ["ID", "BeganIP", "EndIP", "HostStatus", "Name", "RiskLevel", "EntityId", "EntityName", "Location",
"ThreatLevel", "DateUpdated", "HostZone"]
ALARM_SUMMARY_HEADERS = ["PIFType", "DrillDownSummaryLogs"]
USER_HEADERS = ["ID", "DateUpdated", "HostStatus", "LastName", "FirstName", "UserType", "Entity", "Owner", "ReadAccess",
"WriteAccess"]
LOGIN_HEADERS = ["Login", "UserProfileId", "UserId", "DefaultEntityId", "HostStatus", "DateUpdated", "DateCreated"]
PROFILE_HEADERS = ["ID", "Name", "ShortDescription", "LongDescription", "DataProcessorAccessMode", "SecurityRole", "ProfileType",
"DateUpdated", "TotalAssociatedUsers"]
PIF_TYPES = {
"1": "Direction",
"2": "Priority",
"3": "Normal Message Date",
"4": "First Normal Message Date",
"5": "Last Normal Message Date",
"6": "Count",
"7": "MessageDate",
"8": "Entity",
"9": "Log Source",
"10": "Log Source Host",
"11": "Log Source Type",
"12": "Log Class Type",
"13": "Log Class",
"14": "Common Event",
"15": "MPE Rule",
"16": "Source",
"17": "Destination",
"18": "Service",
"19": "Known Host",
"20": "Known Host (Origin)",
"21": "Known Host (Impacted)",
"22": "Known Service",
"23": "IP",
"24": "IP Address (Origin)",
"25": "IP Address (Impacted)",
"26": "Host Name",
"27": "Host Name (Origin)",
"28": "Host Name (Impacted)",
"29": "Port (Origin)",
"30": "Port (Impacted)",
"31": "Protocol",
"32": "User (Origin)",
"33": "User (Impacted)",
"34": "Sender",
"35": "Recipient",
"36": "Subject",
"37": "Object",
"38": "Vendor Message ID",
"39": "Vendor Message Name",
"40": "Bytes In",
"41": "Bytes Out",
"42": "Items In",
"43": "Items Out",
"44": "Duration",
"45": "Time Start",
"46": "Time End",
"47": "Process",
"48": "Amount",
"49": "Quantity",
"50": "Rate",
"51": "Size",
"52": "Domain (Impacted)",
"53": "Group",
"54": "URL",
"55": "Session",
"56": "Sequence",
"57": "Network (Origin)",
"58": "Network (Impacted)",
"59": "Location (Origin)",
"60": "Country (Origin)",
"61": "Region (Origin)",
"62": "City (Origin)",
"63": "Location (Impacted)",
"64": "Country (Impacted)",
"65": "Region (Impacted)",
"66": "City (Impacted)",
"67": "Entity (Origin)",
"68": "Entity (Impacted)",
"69": "Zone (Origin)",
"70": "Zone (Impacted)",
"72": "Zone",
"73": "User",
"74": "Address",
"75": "MAC",
"76": "NATIP",
"77": "Interface",
"78": "NATPort",
"79": "Entity (Impacted or Origin)",
"80": "RootEntity",
"100": "Message",
"200": "MediatorMsgID",
"201": "MARCMsgID",
"1040": "MAC (Origin)",
"1041": "MAC (Impacted)",
"1042": "NATIP (Origin)",
"1043": "NATIP (Impacted)",
"1044": "Interface (Origin)",
"1045": "Interface (Impacted)",
"1046": "PID",
"1047": "Severity",
"1048": "Version",
"1049": "Command",
"1050": "ObjectName",
"1051": "NATPort (Origin)",
"1052": "NATPort (Impacted)",
"1053": "Domain (Origin)",
"1054": "Hash",
"1055": "Policy",
"1056": "Vendor Info",
"1057": "Result",
"1058": "Object Type",
"1059": "CVE",
"1060": "UserAgent",
"1061": "Parent Process Id",
"1062": "Parent Process Name",
"1063": "Parent Process Path",
"1064": "Serial Number",
"1065": "Reason",
"1066": "Status",
"1067": "Threat Id",
"1068": "Threat Name",
"1069": "Session Type",
"1070": "Action",
"1071": "Response Code",
"1072": "User (Origin) Identity ID",
"1073": "User (Impacted) Identity ID",
"1074": "Sender Identity ID",
"1075": "Recipient Identity ID",
"1076": "User (Origin) Identity",
"1077": "User (Impacted) Identity",
"1078": "Sender Identity",
"1079": "Recipient Identity",
"1080": "User (Origin) Identity Domain",
"1081": "User (Impacted) Identity Domain",
"1082": "Sender Identity Domain",
"1083": "Recipient Identity Domain",
"1084": "User (Origin) Identity Company",
"1085": "User (Impacted) Identity Company",
"1086": "Sender Identity Company",
"1087": "Recipient Identity Company",
"1088": "User (Origin) Identity Department",
"1089": "User (Impacted) Identity Department",
"1090": "Sender Identity Department",
"1091": "Recipient Identity Department",
"1092": "User (Origin) Identity Title",
"1093": "User (Impacted) Identity Title",
"1094": "Sender Identity Title",
"1095": "Recipient Identity Title",
"10001": "Source Or Destination",
"10002": "Port (Origin or Impacted)",
"10003": "Network (Origin or Impacted)",
"10004": "Location (Origin or Impacted)",
"10005": "Country (Origin or Impacted)",
"10006": "Region (Origin or Impacted)",
"10007": "City (Origin or Impacted)",
"10008": "Bytes In/Out",
"10009": "Items In/Out"
}
ALARM_STATUS = {
"0": "Waiting",
"1": "In queue",
"2": "Sent to SvcHost",
"3": "Queued for retry",
"4": "Completed",
}
# Mapping type and name fields
SOURCE_TYPE_MAP = {
"API_-_AWS_CloudTrail": 1000598,
"API_-_AWS_CloudWatch_Alarm": 1000607,
"API_-_AWS_Config_Event": 1000610,
"API_-_AWS_S3_Flat_File": 1000703,
"API_-_AWS_S3_Server_Access_Event": 1000575,
"API_-_BeyondTrust_Retina_Vulnerability_Management": 1000299,
"API_-_Box_Event": 1000633,
"API_-_Cisco_IDS/IPS": 1000025,
"API_-_Cradlepoint_ECM": 1000600,
"API_-_IP360_Vulnerability_Scanner": 1000589,
"API_-_Metasploit_Penetration_Scanner": 1000297,
"API_-_Nessus_Vulnerability_Scanner": 1000237,
"API_-_NetApp_CIFS_Security_Audit_Event_Log": 1000238,
"API_-_NeXpose_Vulnerability_Scanner": 1000296,
"API_-_Office_365_Management_Activity": 1000645,
"API_-_Office_365_Message_Tracking": 1000730,
"API_-_Okta_Event": 1000618,
"API_-_Qualys_Vulnerability_Scanner": 1000232,
"API_-_Salesforce_EventLogFile": 1000609,
"API_-_Sourcefire_eStreamer": 1000298,
"API_-_Tenable_SecurityCenter": 1000663,
"API_-_Tenable.io_Scanner": 1000624,
"Flat_File_-_ActivIdentity_CMS": 1000494,
"Flat_File_-_Airwatch_MDM": 1000337,
"Flat_File_-_Alfresco": 1000604,
"Flat_File_-_AllScripts": 1000734,
"Flat_File_-_Apache_Access_Log": 1000000001,
"Flat_File_-_Apache_Error_Log": 80,
"Flat_File_-_Apache_SSL_Access_Log": 1000000002,
"Flat_File_-_Apache_SSL_Error_Log": 82,
"Flat_File_-_Apache_Tomcat_Access_Log": 1000056,
"Flat_File_-_Apache_Tomcat_Console_Log": 1000465,
"Flat_File_-_Avaya_Secure_Access_Link_Remote_Access_Log": 1000474,
"Flat_File_-_Avaya_Voice_Mail_Log": 131,
"Flat_File_-_Axway_SFTP": 1000372,
"Flat_File_-_Beacon_Endpoint_Profiler": 1000518,
"Flat_File_-_Bind_9": 1000084,
"Flat_File_-_BlackBerry_Enterprise_Server": 164,
"Flat_File_-_Blue_Coat_Proxy_BCREPORTERMAIN_Format": 1000000006,
"Flat_File_-_Blue_Coat_Proxy_CSV_Format": 95,
"Flat_File_-_Blue_Coat_Proxy_SQUID-1_Format": 167,
"Flat_File_-_Blue_Coat_Proxy_W3C_Format": 1000003,
"Flat_File_-_Bro_IDS_Critical_Stack_Intel_Log": 1000611,
"Flat_File_-_Broadcom_SiteMinder": 1000794,
"Flat_File_-_CA_ACF2_for_z/OS_-_ACFRPTDS": 1000379,
"Flat_File_-_CA_ACF2_for_z/OS_-_ACFRPTEL": 1000386,
"Flat_File_-_CA_ACF2_for_z/OS_-_ACFRPTJL": 1000385,
"Flat_File_-_CA_ACF2_for_z/OS_-_ACFRPTLL": 1000384,
"Flat_File_-_CA_ACF2_for_z/OS_-_ACFRPTNV": 1000383,
"Flat_File_-_CA_ACF2_for_z/OS_-_ACFRPTOM": 1000371,
"Flat_File_-_CA_ACF2_for_z/OS_-_ACFRPTPW": 1000380,
"Flat_File_-_CA_ACF2_for_z/OS_-_ACFRPTRL": 1000382,
"Flat_File_-_CA_ACF2_for_z/OS_-_ACFRPTRV": 1000381,
"Flat_File_-_CA_ControlMinder": 1000345,
"Flat_File_-_Cerberus_FTP_Server": 1000417,
"Flat_File_-_Cerner": 1000422,
"Flat_File_-_Cisco_AMP_for_Endpoints": 1000744,
"Flat_File_-_Cisco_Email_Security_Appliance": 1000615,
"Flat_File_-_Cisco_LMS_(cwcli)": 1000212,
"Flat_File_-_Cisco_LMS_(Syslog)": 1000207,
"Flat_File_-_Cisco_NGFW": 1000107,
"Flat_File_-_Cisco_Secure_ACS_CSV_File": 139,
"Flat_File_-_Cisco_Security_Agent": 1000094,
"Flat_File_-_Cisco_Umbrella_DNS": 1000705,
"Flat_File_-_Cisco_Web_Security_aclog": 1000224,
"Flat_File_-_Citrix_Access_Gateway_IIS_Format": 1000024,
"Flat_File_-_Citrix_Access_Gateway_NCSA_Common_Format": 1000023,
"Flat_File_-_Citrix_Access_Gateway_W3C_Format": 1000022,
"Flat_File_-_Citrix_Presentation_Server": 1000086,
"Flat_File_-_Citrix_Secure_Gateway": 1000440,
"Flat_File_-_ClamAV_Anti-Virus": 1000052,
"Flat_File_-_ColdFusion_Application_Log": 1000357,
"Flat_File_-_ColdFusion_Exception_Log": 1000395,
"Flat_File_-_ColdFusion_Mail_Log": 1000361,
"Flat_File_-_ColdFusion_Mailsent_Log": 1000360,
"Flat_File_-_ColdFusion_Server_Log": 1000355,
"Flat_File_-_Cornerstone_Managed_File_Transfer": 1000374,
"Flat_File_-_Coyote_Point_Equalizer": 1000214,
"Flat_File_-_DB2_Audit_Log": 1000035,
"Flat_File_-_DB2_via_BMC_Log_Master": 1000290,
"Flat_File_-_Defender_Server": 1000151,
"Flat_File_-_DocWorks": 1000424,
"Flat_File_-_eClinicalWorks_Audit_Log": 1000748,
"Flat_File_-_EMC_Isilon": 1000563,
"Flat_File_-_Epicor_Coalition": 1000124,
"Flat_File_-_FairWarning_Ready-For-Healthcare": 1000269,
"Flat_File_-_FileZilla_System_Log": 1000564,
"Flat_File_-_FireEye_Web_MPS": 1000310,
"Flat_File_-_Forcepoint_Web_Security_CEF_Cloud_Format": 1000706,
"Flat_File_-_Forescout_CounterACT": 1000501,
"Flat_File_-_FoxT_BoKS_Server_Access_Control": 1000688,
"Flat_File_-_FundsXpress": 1000517,
"Flat_File_-_Gene6_FTP": 154,
"Flat_File_-_GlobalSCAPE_EFT": 1000231,
"Flat_File_-_Hadoop": 1000457,
"Flat_File_-_HMC": 1000614,
"Flat_File_-_HP-UX_Audit_Log": 115,
"Flat_File_-_IBM_4690_POS": 1000109,
"Flat_File_-_IBM_Informix_Application_Log": 1000169,
"Flat_File_-_IBM_Informix_Audit_Log": 1000170,
"Flat_File_-_IBM_Tivoli_Storage_Manager": 1000454,
"Flat_File_-_IBM_WebSphere_App_Server_v7_Audit_Log": 1000179,
"Flat_File_-_IBM_WebSphere_Cast_Iron_Cloud_Integration": 1000389,
"Flat_File_-_IBM_ZOS_Batch_Decryption_Log": 146,
"Flat_File_-_IBM_ZOS_CICS_Decryption_Log": 147,
"Flat_File_-_IBM_ZOS_RACF_Access_Log": 148,
"Flat_File_-_IBM_ZOS_RACF_SMF_Type_80": 175,
"Flat_File_-_IPSwitch_WS_FTP": 1000777,
"Flat_File_-_Irix_Audit_Logs": 1000117,
"Flat_File_-_IT-CUBE_AgileSI": 1000316,
"Flat_File_-_JBoss_Log_File": 134,
"Flat_File_-_Juniper_Steel_Belted_Radius_Server": 1000261,
"Flat_File_-_Kerio_Mail_Server": 1000115,
"Flat_File_-_KERISYS_Doors_Event_Export_Format": 1000129,
"Flat_File_-_Kippo_Honeypot": 1000522,
"Flat_File_-_Linux_Audit_ASCII": 1000154,
"Flat_File_-_Linux_Audit_Log": 1000123,
"Flat_File_-_Linux_Host_Secure_Log": 1000507,
"Flat_File_-_LOGbinder_EX": 1000623,
"Flat_File_-_LogRhythm_Alarm_Reingest": 8,
"Flat_File_-_LogRhythm_Data_Indexer_Monitor": 1000648,
"Flat_File_-_LogRhythm_Oracle_Log": 1000716,
"Flat_File_-_LogRhythm_System_Monitor": 17,
"Flat_File_-_LogRhythm_System_Monitor_Log_File": 1000858,
"Flat_File_-_LogRhythm_Trebek_Log": 1000717,
"Flat_File_-_LogRhythm_Zeus_Log": 1000715,
"Flat_File_-_Lotus_Domino_Client_Log": 1000041,
"Flat_File_-_McAfee_Cloud_Proxy_do_not_use": 1000826,
"Flat_File_-_McAfee_ePO_HIPS": 1000552,
"Flat_File_-_McAfee_Foundstone": 1000049,
"Flat_File_-_McAfee_Proxy_Cloud": 1000829,
"Flat_File_-_McAfee_SaaS_Web_Protection": 1000638,
"Flat_File_-_McAfee_Web_Gateway_Audit_Log": 1000685,
"Flat_File_-_Merak": 1000312,
"Flat_File_-_Meridian": 1000098,
"Flat_File_-_Microsoft_ActiveSync_2010": 1000404,
"Flat_File_-_Microsoft_CRM": 1000106,
"Flat_File_-_Microsoft_DHCP_Server_Log": 122,
"Flat_File_-_Microsoft_Forefront_TMG": 1000402,
"Flat_File_-_Microsoft_Forefront_TMG_Web_Proxy": 1000586,
"Flat_File_-_Microsoft_IIS_(IIS_Format)_File": 112,
"Flat_File_-_Microsoft_IIS_7.x_W3C_Extended_Format": 1000655,
"Flat_File_-_Microsoft_IIS_Error_Log_V6": 1000323,
"Flat_File_-_Microsoft_IIS_FTP_IIS_Log_File_Format": 1000150,
"Flat_File_-_Microsoft_IIS_FTP_W3C_Extended_Format": 161,
"Flat_File_-_Microsoft_IIS_NCSA_Common_Format_File": 111,
"Flat_File_-_Microsoft_IIS_SMTP_W3C_Format": 1000397,
"Flat_File_-_Microsoft_IIS_URL_Scan_Log": 1000054,
"Flat_File_-_Microsoft_IIS_W3C_File": 84,
"Flat_File_-_Microsoft_ISA_Server_2004": 187,
"Flat_File_-_Microsoft_ISA_Server_W3C_File": 21,
"Flat_File_-_Microsoft_Netlogon": 1000579,
"Flat_File_-_Microsoft_Port_Reporter_PR-PORTS_Log": 1000274,
"Flat_File_-_Microsoft_Semantic_Logging": 1000582,
"Flat_File_-_Microsoft_SQL_Server_2000_Error_Log": 40,
"Flat_File_-_Microsoft_SQL_Server_2005_Error_Log": 1000172,
"Flat_File_-_Microsoft_SQL_Server_2008_Error_Log": 1000181,
"Flat_File_-_Microsoft_SQL_Server_2012_Error_Log": 1000479,
"Flat_File_-_Microsoft_SQL_Server_2014_Error_Log": 1000637,
"Flat_File_-_Microsoft_Windows_2003_DNS": 1000506,
"Flat_File_-_Microsoft_Windows_2008_DNS": 1000276,
"Flat_File_-_Microsoft_Windows_2012_DNS": 1000619,
"Flat_File_-_Microsoft_Windows_Firewall": 119,
"Flat_File_-_MicroStrategy": 1000535,
"Flat_File_-_Mimecast_Audit": 1000721,
"Flat_File_-_Mimecast_Email": 1000726,
"Flat_File_-_Monetra": 1000288,
"Flat_File_-_MongoDB": 185,
"Flat_File_-_MS_Exchange_2003_Message_Tracking_Log": 1000000005,
"Flat_File_-_MS_Exchange_2007_Message_Tracking_Log": 1000000004,
"Flat_File_-_MS_Exchange_2010_Message_Tracking_Log": 1000000007,
"Flat_File_-_MS_Exchange_2013_Message_Tracking_Log": 1000561,
"Flat_File_-_MS_Exchange_2016_Message_Tracking_Log": 1000805,
"Flat_File_-_MS_Exchange_RPC_Client_Access": 1000433,
"Flat_File_-_MS_IAS/RAS_Server_NPS_DB_Log_Format": 121,
"Flat_File_-_MS_IAS/RAS_Server_Standard_Log_Format": 1000168,
"Flat_File_-_MS_ISA_Server_2006_ISA_All_Fields": 157,
"Flat_File_-_MS_ISA_Server_2006_W3C_All_Fields": 156,
"Flat_File_-_MS_SQL_Server_Reporting_Services_2008": 1000066,
"Flat_File_-_MySQL": 1000247,
"Flat_File_-_MySQL_error.log": 1000252,
"Flat_File_-_MySQL_mysql.log": 1000256,
"Flat_File_-_MySQL_mysql-slow.log": 1000253,
"Flat_File_-_Nessus_System_Log": 1000220,
"Flat_File_-_NetApp_Cluster": 1000593,
"Flat_File_-_Nginx_Log": 1000718,
"Flat_File_-_Novell_Audit": 1000110,
"Flat_File_-_Novell_GroupWise": 1000429,
"Flat_File_-_Novell_LDAP": 1000307,
"Flat_File_-_ObserveIT_Enterprise": 1000363,
"Flat_File_-_Office_365_Message_Tracking": 1000720,
"Flat_File_-_OpenDJ": 1000455,
"Flat_File_-_OpenVMS": 1000127,
"Flat_File_-_OpenVPN": 1000311,
"Flat_File_-_Oracle_11g_Fine_Grained_Audit_Trail": 1000227,
"Flat_File_-_Oracle_9i": 1000007,
"Flat_File_-_Oracle_BRM_CM_Log": 1000515,
"Flat_File_-_Oracle_BRM_DM_Log": 1000514,
"Flat_File_-_Oracle_Listener_Audit_Trail": 1000346,
"Flat_File_-_Oracle_SunOne_Directory_Server": 1000278,
"Flat_File_-_Oracle_SunOne_Web_Server_Access_Log": 1000277,
"Flat_File_-_Oracle_Virtual_Directory": 1000315,
"Flat_File_-_Oracle_WebLogic_11g_Access_Log": 1000471,
"Flat_File_-_Other": 127,
"Flat_File_-_PeopleSoft": 1000822,
"Flat_File_-_PhpMyAdmin_Honeypot": 1000523,
"Flat_File_-_Postfix": 1000294,
"Flat_File_-_PowerBroker_Servers": 1000528,
"Flat_File_-_Princeton_Card_Secure": 1000136,
"Flat_File_-_ProFTPD": 1000087,
"Flat_File_-_PureMessage_For_Exchange_SMTP_Log": 1000180,
"Flat_File_-_PureMessage_For_UNIX_Blocklist_Log": 1000176,
"Flat_File_-_PureMessage_For_UNIX_Message_Log": 1000177,
"Flat_File_-_RACF_(SMF)": 1000033,
"Flat_File_-_Radmin": 1000367,
"Flat_File_-_Restic_Backup_Log": 14,
"Flat_File_-_RL_Patient_Feedback": 1000349,
"Flat_File_-_RSA_Adaptive_Authentication": 1000283,
"Flat_File_-_RSA_Authentication_Manager_6.1": 1000226,
"Flat_File_-_S2_Badge_Reader": 1000630,
"Flat_File_-_Safenet": 1000714,
"Flat_File_-_Sendmail_File": 133,
"Flat_File_-_Sharepoint_ULS": 1000221,
"Flat_File_-_ShoreTel_VOIP": 1000351,
"Flat_File_-_Siemens_Radiology_Information_System": 1000091,
"Flat_File_-_Snort_Fast_Alert_File": 37,
"Flat_File_-_Solaris_-_Sulog": 1000043,
"Flat_File_-_Solaris_Audit_Log": 1000116,
"Flat_File_-_SpamAssassin": 1000047,
"Flat_File_-_Squid_Proxy": 1000070,
"Flat_File_-_Subversion": 1000516,
"Flat_File_-_Sudo.Log": 1000373,
"Flat_File_-_Swift_Alliance": 1000099,
"Flat_File_-_Symantec_Antivirus_10.x_Corporate_Edtn": 176,
"Flat_File_-_Symantec_Antivirus_12.x_Corporate_Edtn": 1000602,
"Flat_File_-_Symitar_Episys_Console_Log": 1000466,
"Flat_File_-_Symitar_Episys_Sysevent_Log": 1000450,
"Flat_File_-_Tandem_EMSOUT_Log_File": 138,
"Flat_File_-_Tandem_XYGATE": 1000306,
"Flat_File_-_Tectia_SSH_Server": 1000476,
"Flat_File_-_Trade_Innovations_CSCS": 1000114,
"Flat_File_-_Trend_Micro_IMSS": 1000219,
"Flat_File_-_Trend_Micro_Office_Scan": 1000244,
"Flat_File_-_Tumbleweed_Mailgate_Server": 1000067,
"Flat_File_-_Verint_Audit_Trail_File": 142,
"Flat_File_-_VMWare_Virtual_Machine": 109,
"Flat_File_-_Voltage_Securemail": 1000368,
"Flat_File_-_Vormetric_Log_File": 135,
"Flat_File_-_vsFTP_Daemon_Log": 1000042,
"Flat_File_-_Vyatta_Firewall_Kernel_Log": 1000456,
"Flat_File_-_WordPot_Honeypot": 1000524,
"Flat_File_-_X-NetStat_Log": 38,
"Flat_File_-_XPient_POS_CCA_Manager": 159,
"Flat_File_-_XPIENT_POS_POSLOG": 1000275,
"Flat_File_-_XPIENT_POS_Shell_Log": 1000287,
"IPFIX_-_IP_Flow_Information_Export": 1000484,
"J-Flow_-_Juniper_J-Flow_Version_5": 1000292,
"J-Flow_-_Juniper_J-Flow_Version_9": 1000293,
"LogRhythm_CloudAI": 1000678,
"LogRhythm_Data_Loss_Defender": 1000044,
"LogRhythm_Demo_File_-_Application_Server_Log": 1000186,
"LogRhythm_Demo_File_-_Content_Inspection_Log": 1000190,
"LogRhythm_Demo_File_-_Database_Audit_Log": 1000191,
"LogRhythm_Demo_File_-_Ecom_Server_Log": 1000194,
"LogRhythm_Demo_File_-_File_Server_Log": 1000184,
"LogRhythm_Demo_File_-_Firewall_Log": 1000189,
"LogRhythm_Demo_File_-_FTP_Log": 1000182,
"LogRhythm_Demo_File_-_IDS_Alarms_Log": 1000188,
"LogRhythm_Demo_File_-_Mail_Server_Log": 1000185,
"LogRhythm_Demo_File_-_Netflow_Log": 1000193,
"LogRhythm_Demo_File_-_Network_Device_Log": 1000192,
"LogRhythm_Demo_File_-_Network_Server_Log": 1000183,
"LogRhythm_Demo_File_-_VPN_Log": 1000195,
"LogRhythm_Demo_File_-_Web_Access_Log": 1000187,
"LogRhythm_File_Monitor_(AIX)": 8,
"LogRhythm_File_Monitor_(HP-UX)": 1000137,
"LogRhythm_File_Monitor_(Linux)": 2,
"LogRhythm_File_Monitor_(Solaris)": 6,
"LogRhythm_File_Monitor_(Windows)": 3,
"LogRhythm_Filter": 1000695,
"LogRhythm_Network_Connection_Monitor_(AIX)": 1000163,
"LogRhythm_Network_Connection_Monitor_(HP-UX)": 1000164,
"LogRhythm_Network_Connection_Monitor_(Linux)": 1000165,
"LogRhythm_Network_Connection_Monitor_(Solaris)": 1000166,
"LogRhythm_Network_Connection_Monitor_(Windows)": 1000162,
"LogRhythm_Process_Monitor_(AIX)": 1000159,
"LogRhythm_Process_Monitor_(HP-UX)": 1000160,
"LogRhythm_Process_Monitor_(Linux)": 1000167,
"LogRhythm_Process_Monitor_(Solaris)": 1000161,
"LogRhythm_Process_Monitor_(Windows)": 1000158,
"LogRhythm_Registry_Integrity_Monitor": 1000539,
"LogRhythm_SQL_Server_2000_C2_Audit_Log": 1000202,
"LogRhythm_SQL_Server_2005_C2_Audit_Log": 1000203,
"LogRhythm_SQL_Server_2008_C2_Audit_Log": 1000204,
"LogRhythm_SQL_Server_2012+_C2_Audit_Log": 1000475,
"LogRhythm_User_Activity_Monitor_(AIX)": 1000062,
"LogRhythm_User_Activity_Monitor_(HP-UX)": 1000138,
"LogRhythm_User_Activity_Monitor_(Linux)": 1000060,
"LogRhythm_User_Activity_Monitor_(Solaris)": 1000061,
"LogRhythm_User_Activity_Monitor_(Windows)": 1000059,
"MS_Event_Log_for_XP/2000/2003_-_Application": 31,
"MS_Event_Log_for_XP/2000/2003_-_Application_-_Espaniol": 1000571,
"MS_Event_Log_for_XP/2000/2003_-_BioPassword": 151,
"MS_Event_Log_for_XP/2000/2003_-_DFS": 1000112,
"MS_Event_Log_for_XP/2000/2003_-_Directory_Service": 32,
"MS_Event_Log_for_XP/2000/2003_-_DNS": 76,
"MS_Event_Log_for_XP/2000/2003_-_DotDefender": 1000083,
"MS_Event_Log_for_XP/2000/2003_-_EMC_Celerra_NAS": 1000488,
"MS_Event_Log_for_XP/2000/2003_-_File_Rep_Service": 33,
"MS_Event_Log_for_XP/2000/2003_-_HA": 1000069,
"MS_Event_Log_for_XP/2000/2003_-_Kaspersky": 1000102,
"MS_Event_Log_for_XP/2000/2003_-_Micros_POS": 1000354,
"MS_Event_Log_for_XP/2000/2003_-_PatchLink": 1000073,
"MS_Event_Log_for_XP/2000/2003_-_SafeWord_2008": 199,
"MS_Event_Log_for_XP/2000/2003_-_SCE": 1000173,
"MS_Event_Log_for_XP/2000/2003_-_Security": 23,
"MS_Event_Log_for_XP/2000/2003_-_Security_-_Espaniol": 1000569,
"MS_Event_Log_for_XP/2000/2003_-_SMS_2003": 1000038,
"MS_Event_Log_for_XP/2000/2003_-_System": 30,
"MS_Event_Log_for_XP/2000/2003_-_System_-_Espaniol": 1000570,
"MS_Event_Log_for_XP/2000/2003_-_Virtual_Server": 1000075,
"MS_Windows_Event_Logging_-_ADFS_Admin": 1000661,
"MS_Windows_Event_Logging_-_Application": 1000032,
"MS_Windows_Event_Logging_-_AppLockerApp": 1000557,
"MS_Windows_Event_Logging_-_Backup": 1000341,
"MS_Windows_Event_Logging_-_Citrix_Delivery_Services": 1000526,
"MS_Windows_Event_Logging_-_Citrix_XenApp": 1000701,
"MS_Windows_Event_Logging_-_DFS": 1000121,
"MS_Windows_Event_Logging_-_DHCP_Admin": 1000540,
"MS_Windows_Event_Logging_-_DHCP_Operational": 1000537,
"MS_Windows_Event_Logging_-_Diagnosis-PLA": 1000280,
"MS_Windows_Event_Logging_-_Digital_Persona": 1000483,
"MS_Windows_Event_Logging_-_Dir_Service": 1000119,
"MS_Windows_Event_Logging_-_DNS": 1000120,
"MS_Windows_Event_Logging_-_Dot_Defender": 1000303,
"MS_Windows_Event_Logging_-_ESD_Data_Flow_Track": 1000583,
"MS_Windows_Event_Logging_-_Exchange_Mailbox_DB_Failures": 1000446,
"MS_Windows_Event_Logging_-_FailoverClustering/Operational": 1000447,
"MS_Windows_Event_Logging_-_Firewall_With_Advanced_Security": 1000302,
"MS_Windows_Event_Logging_-_Forefront_AV": 1000352,
"MS_Windows_Event_Logging_-_Group_Policy_Operational": 1000301,
"MS_Windows_Event_Logging_-_Hyper-V_Hvisor": 1000264,
"MS_Windows_Event_Logging_-_Hyper-V_IMS": 1000263,
"MS_Windows_Event_Logging_-_Hyper-V_Network": 1000265,
"MS_Windows_Event_Logging_-_Hyper-V_SynthSt": 1000266,
"MS_Windows_Event_Logging_-_Hyper-V_VMMS": 1000251,
"MS_Windows_Event_Logging_-_Hyper-V_Worker": 1000262,
"MS_Windows_Event_Logging_-_Kaspersky": 1000495,
"MS_Windows_Event_Logging_-_Kernel_PnP_Configuration": 1000559,
"MS_Windows_Event_Logging_-_Lync_Server": 1000628,
"MS_Windows_Event_Logging_-_MSExchange_Management": 1000338,
"MS_Windows_Event_Logging_-_Operations_Manager": 1000421,
"MS_Windows_Event_Logging_-_PowerShell": 1000627,
"MS_Windows_Event_Logging_-_Print_Services": 1000356,
"MS_Windows_Event_Logging_-_Quest_ActiveRoles_EDM_Server": 1000577,
"MS_Windows_Event_Logging_-_Replication": 1000122,
"MS_Windows_Event_Logging_-_SafeWord_2008": 1000419,
"MS_Windows_Event_Logging_-_Security": 1000030,
"MS_Windows_Event_Logging_-_Setup": 1000281,
"MS_Windows_Event_Logging_-_Sysmon": 1000558,
"MS_Windows_Event_Logging_-_System": 1000031,
"MS_Windows_Event_Logging_-_Task_Scheduler": 1000308,
"MS_Windows_Event_Logging_-_TS_Gateway": 1000532,
"MS_Windows_Event_Logging_-_TS_Licensing": 1000272,
"MS_Windows_Event_Logging_-_TS_Local_Session_Manager": 1000271,
"MS_Windows_Event_Logging_-_TS_Remote_Connection_Manager": 1000300,
"MS_Windows_Event_Logging_-_TS_Session_Broker": 1000320,
"MS_Windows_Event_Logging_-_TS_Session_Broker_Client": 1000309,
"MS_Windows_Event_Logging_-_VisualSVN": 1000578,
"MS_Windows_Event_Logging_:_Deutsch_-_Security": 1000470,
"MS_Windows_Event_Logging_:_Espaniol_-_Application": 1000566,
"MS_Windows_Event_Logging_:_Espaniol_-_Security": 1000565,
"MS_Windows_Event_Logging_:_Espaniol_-_System": 1000568,
"MS_Windows_Event_Logging_:_Francais_-_System": 1000468,
"MS_Windows_Event_Logging_:Francais_-_Security": 1000469,
"MS_Windows_Event_Logging_XML_-_ADFS": 1000868,
"MS_Windows_Event_Logging_XML_-_Application": 1000562,
"MS_Windows_Event_Logging_XML_-_Forwarded_Events": 1000746,
"MS_Windows_Event_Logging_XML_-_Generic": 1000738,
"MS_Windows_Event_Logging_XML_-_LRTracer": 1000784,
"MS_Windows_Event_Logging_XML_-_Microsoft-Windows-NTLM/Operational": 1000781,
"MS_Windows_Event_Logging_XML_-_Security": 1000639,
"MS_Windows_Event_Logging_XML_-_Sysmon": 1000862,
"MS_Windows_Event_Logging_XML_-_Sysmon_7.01": 1000724,
"MS_Windows_Event_Logging_XML_-_Sysmon_8/9/10": 1000745,
"MS_Windows_Event_Logging_XML_-_System": 1000662,
"MS_Windows_Event_Logging_XML_-_Unisys_Stealth": 1000681,
"MS_Windows_Event_Logging_XML_-_Windows_Defender": 1000856,
"Netflow_-_Cisco_Netflow_Version_1": 101,
"Netflow_-_Cisco_Netflow_Version_5": 102,
"Netflow_-_Cisco_Netflow_Version_9": 1000174,
"Netflow_-_Palo_Alto_Version_9": 191,
"Netflow_-_SonicWALL_Version_5": 1000436,
"Netflow_-_SonicWALL_Version_9": 1000437,
"OPSEC_LEA_-_Checkpoint_Firewall": 125,
"OPSEC_LEA_-_Checkpoint_Firewall_Audit_Log": 1000304,
"OPSEC_LEA_-_Checkpoint_For_LR_7.4.1+": 1000741,
"OPSEC_LEA_-_Checkpoint_Log_Server": 126,
"sFlow_-_Version_5": 1000239,
"SNMP_Trap_-_Audiolog": 1000259,
"SNMP_Trap_-_Autoregistered": 1000149,
"SNMP_Trap_-_Brocade_Switch": 1000599,
"SNMP_Trap_-_Cisco_5508_Wireless_Controller": 1000545,
"SNMP_Trap_-_Cisco_IP_SLA": 1000572,
"SNMP_Trap_-_Cisco_Prime": 1000629,
"SNMP_Trap_-_Cisco_Router-Switch": 1000327,
"SNMP_Trap_-_CyberArk": 1000240,
"SNMP_Trap_-_Dell_OpenManage": 1000322,
"SNMP_Trap_-_HP_Network_Node_Manager": 1000377,
"SNMP_Trap_-_IBM_TS3000_Series_Tape_Drive": 1000258,
"SNMP_Trap_-_Riverbed_SteelCentral_NetShark": 1000508,
"SNMP_Trap_-_RSA_Authentication_Manager": 1000248,
"SNMP_Trap_-_Swift_Alliance": 1000405,
"SNMP_Trap_-_Trend_Micro_Control_Manager": 1000413,
"Syslog_-_3Com_Switch": 1000329,
"Syslog_-_A10_Networks_AX1000_Load_Balancer": 1000268,
"Syslog_-_A10_Networks_Web_Application_Firewall": 1000785,
"Syslog_-_Accellion_Secure_File_Transfer_Application": 1000665,
"Syslog_-_Active_Scout_IPS": 128,
"Syslog_-_Adallom": 1000585,
"Syslog_-_Adtran_Switch": 1000284,
"Syslog_-_Aerohive_Access_Point": 1000467,
"Syslog_-_Aerohive_Firewall": 1000677,
"Syslog_-_AIMIA_Tomcat": 1000635,
"Syslog_-_AirDefense_Enterprise": 182,
"Syslog_-_Airmagnet_Wireless_IDS": 177,
"Syslog_-_AirTight_IDS/IPS": 145,
"Syslog_-_AirWatch_MDM": 1000594,
"Syslog_-_Airwave_Management_System_Log": 150,
"Syslog_-_AIX_Host": 90,
"Syslog_-_Alcatel-Lucent_Switch": 1000756,
"Syslog_-_Alcatel-Lucent_Wireless_Controller": 1000425,
"Syslog_-_AlertLogic": 1000742,
"Syslog_-_AMX_AV_Controller": 27,
"Syslog_-_Apache_Access_Log": 1000255,
"Syslog_-_Apache_Error_Log": 1000254,
"Syslog_-_Apache_Tomcat_Request_Parameters": 110,
"Syslog_-_Apache_Tomcat_Service_Clients_Log": 1000418,
"Syslog_-_APC_ATS": 1000400,
"Syslog_-_APC_NetBotz_Environmental_Monitoring": 1000348,
"Syslog_-_APC_PDU": 1000416,
"Syslog_-_APC_UPS": 1000200,
"Syslog_-_Apcon_Network_Monitor": 1000491,
"Syslog_-_Apex_One": 1000832,
"Syslog_-_Arbor_Networks_Peakflow": 1000477,
"Syslog_-_Arbor_Networks_Spectrum": 1000708,
"Syslog_-_Arbor_Pravail_APS": 1000464,
"Syslog_-_Arista_Switch": 1000410,
"Syslog_-_Array_TMX_Load_Balancer": 1000525,
"Syslog_-_Arris_CMTS": 1000230,
"Syslog_-_Aruba_Clear_Pass": 1000502,
"Syslog_-_Aruba_Mobility_Controller": 144,
"Syslog_-_Aruba_Wireless_Access_Point": 1000529,
"Syslog_-_AS/400_via_Powertech_Interact": 178,
"Syslog_-_Asus_WRT_Router": 1000679,
"Syslog_-_Avatier_Identity_Management_Suite_(AIMS)": 1000780,
"Syslog_-_Avaya_Communications_Manager": 1000459,
"Syslog_-_Avaya_Ethernet_Routing_Switch": 1000482,
"Syslog_-_Avaya_G450_Media_Gateway": 1000680,
"Syslog_-_Avaya_Router": 1000581,
"Syslog_-_Aventail_SSL/VPN": 1000132,
"Syslog_-_Avocent_Cyclades_Terminal_Server": 1000396,
"Syslog_-_Azul_Java_Appliance": 1000217,
"Syslog_-_Barracuda_Load_Balancer": 1000370,
"Syslog_-_Barracuda_Mail_Archiver": 1000492,
"Syslog_-_Barracuda_NG_Firewall": 1000442,
"Syslog_-_Barracuda_NG_Firewall_6.x": 1000613,
"Syslog_-_Barracuda_Spam_Firewall": 132,
"Syslog_-_Barracuda_Web_Application_Firewall": 1000342,
"Syslog_-_Barracuda_Webfilter": 140,
"Syslog_-_BeyondTrust_BeyondInsight_LEEF": 1000778,
"Syslog_-_Bind_DNS": 1000621,
"Syslog_-_Bit9_Parity_Suite": 1000215,
"Syslog_-_Bit9_Security_Platform_CEF": 1000622,
"Syslog_-_Bit9+Carbon_Black_(Deprecated)": 1000620,
"Syslog_-_BitDefender": 1000597,
"Syslog_-_Black_Diamond_Switch": 1000004,
"Syslog_-_Blue_Coat_CAS": 1000739,
"Syslog_-_Blue_Coat_Forward_Proxy": 1000509,
"Syslog_-_Blue_Coat_PacketShaper": 1000392,
"Syslog_-_Blue_Coat_ProxyAV_ISA_W3C_Format": 1000126,
"Syslog_-_Blue_Coat_ProxyAV_MS_Proxy_2.0_Format": 1000143,
"Syslog_-_Blue_Coat_ProxySG": 166,
"Syslog_-_Blue_Socket_Wireless_Controller": 1000451,
"Syslog_-_Bluecat_Adonis": 1000438,
"Syslog_-_BlueCedar": 1000753,
"Syslog_-_BluVector": 1000769,
"Syslog_-_Bomgar": 1000347,
"Syslog_-_Bradford_Networks_NAC": 1000553,
"Syslog_-_Bradford_Remediation_&_Registration_Svr": 155,
"Syslog_-_Bro_IDS": 1000723,
"Syslog_-_Brocade_Switch": 183,
"Syslog_-_Bromium_vSentry_CEF": 1000513,
"Syslog_-_BSD_Host": 117,
"Syslog_-_CA_Privileged_Access_Manager": 1000808,
"Syslog_-_Cb_Defense_CEF": 1000702,
"Syslog_-_Cb_Protection_CEF": 1000420,
"Syslog_-_Cb_Response_LEEF": 1000651,
"Syslog_-_Cell_Relay": 1000407,
"Syslog_-_Certes_Networks_CEP": 1000445,
"Syslog_-_Check_Point_Log_Exporter": 1000806,
"Syslog_-_Checkpoint_Site-to-Site_VPN": 1000376,
"Syslog_-_Cisco_ACS": 1000063,
"Syslog_-_Cisco_Aironet_WAP": 1000002,
"Syslog_-_Cisco_APIC": 1000764,
"Syslog_-_Cisco_Application_Control_Engine": 1000130,
"Syslog_-_Cisco_ASA": 5,
"Syslog_-_Cisco_Clean_Access_(CCA)_Appliance": 1000201,
"Syslog_-_Cisco_CSS_Load_Balancer": 1000064,
"Syslog_-_Cisco_Email_Security_Appliance": 1000021,
"Syslog_-_Cisco_FirePOWER": 1000683,
"Syslog_-_Cisco_Firepower_Threat_Defense": 18,
"Syslog_-_Cisco_FireSIGHT": 1000595,
"Syslog_-_Cisco_FWSM": 163,
"Syslog_-_Cisco_Global_Site_Selector": 1000068,
"Syslog_-_Cisco_ISE": 1000369,
"Syslog_-_Cisco_Meraki": 1000530,
"Syslog_-_Cisco_Nexus_Switch": 1000225,
"Syslog_-_Cisco_PIX": 1000000003,
"Syslog_-_Cisco_Prime_Infrastructure": 1000500,
"Syslog_-_Cisco_Router": 86,
"Syslog_-_Cisco_Secure_ACS_5": 1000206,
"Syslog_-_Cisco_Session_Border_Controller": 11,
"Syslog_-_Cisco_Switch": 85,
"Syslog_-_Cisco_Telepresence_Video_Communications_Server": 1000657,
"Syslog_-_Cisco_UCS": 1000391,
"Syslog_-_Cisco_Unified_Comm_Mgr_(Call_Mgr)": 1000133,
"Syslog_-_Cisco_VPN_Concentrator": 116,
"Syslog_-_Cisco_WAAS": 1000333,
"Syslog_-_Cisco_Web_Security": 1000390,
"Syslog_-_Cisco_Wireless_Access_Point": 1000394,
"Syslog_-_Cisco_Wireless_Control_System": 1000101,
"Syslog_-_CiscoWorks": 1000260,
"Syslog_-_Citrix_Access_Gateway_Server": 1000403,
"Syslog_-_Citrix_Netscaler": 25,
"Syslog_-_Citrix_XenServer": 1000257,
"Syslog_-_Claroty_CTD_CEF": 1000801,
"Syslog_-_Clearswift_Secure_Email_Gateway": 1000747,
"Syslog_-_CloudLock": 1000659,
"Syslog_-_CodeGreen_Data_Loss_Prevention": 1000097,
"Syslog_-_Cofense_Triage_CEF": 1000632,
"Syslog_-_Consentry_NAC": 165,
"Syslog_-_Corero_IPS": 1000431,
"Syslog_-_Corero_SmartWall_DDoS": 22,
"Syslog_-_CoyotePoint_Equalizer": 1000289,
"Syslog_-_Crowdstrike_Falconhost_CEF": 1000682,
"Syslog_-_CyberArk": 1000325,
"Syslog_-_CyberArk_Privileged_Threat_Analytics": 1000652,
"Syslog_-_Cylance_CEF": 1000813,
"Syslog_-_CylancePROTECT": 1000625,
"Syslog_-_DarkTrace_CEF": 1000710,
"Syslog_-_Dell_Force_10": 1000423,
"Syslog_-_Dell_PowerConnect_Switch": 1000118,
"Syslog_-_Dell_Remote_Access_Controller": 1000324,
"Syslog_-_Dell_SecureWorks_iSensor_IPS": 1000554,
"Syslog_-_Dialogic_Media_Gateway": 1000125,
"Syslog_-_Digital_Guardian_CEF": 1000800,
"Syslog_-_D-Link_Switch": 1000504,
"Syslog_-_Don_not_use": 1000827,
"Syslog_-_Dragos_Platform_CEF": 1000852,
"Syslog_-_Ecessa_ShieldLink": 1000282,
"Syslog_-_EfficientIP": 7,
"Syslog_-_EMC_Avamar": 1000556,
"Syslog_-_EMC_Centera": 1000490,
"Syslog_-_EMC_Data_Domain": 1000551,
"Syslog_-_EMC_Isilon": 20,
"Syslog_-_EMC_Unity_Array": 1000751,
"Syslog_-_EMC_VNX": 1000432,
"Syslog_-_Ensilo_NGAV": 1000830,
"Syslog_-_Enterasys_Dragon_IDS": 1000131,
"Syslog_-_Enterasys_Router": 123,
"Syslog_-_Enterasys_Switch": 124,
"Syslog_-_Entrust_Entelligence_Messaging_Server": 1000462,
"Syslog_-_Entrust_IdentityGuard": 1000234,
"Syslog_-_Epic_Hyperspace_CEF": 1000668,
"Syslog_-_EqualLogic_SAN": 189,
"Syslog_-_eSafe_Email_Security": 1000366,
"Syslog_-_ESET_Remote_Administrator_(ERA)_LEEF": 1000754,
"Syslog_-_Event_Reporter_(Win_2000/XP/2003)": 1000046,
"Syslog_-_Exabeam": 3,
"Syslog_-_Exchange_Message_Tracking": 6,
"Syslog_-_ExtraHop": 1000795,
"Syslog_-_Extreme_Wireless_LAN": 1000058,
"Syslog_-_ExtremeWare": 1000318,
"Syslog_-_ExtremeXOS": 1000317,
"Syslog_-_F5_BIG-IP_Access_Policy_Manager": 1000676,
"Syslog_-_F5_BIG-IP_AFM": 1000771,
"Syslog_-_F5_BIG-IP_ASM": 1000236,
"Syslog_-_F5_BIG-IP_ASM_Key-Value_Pairs": 1000749,
"Syslog_-_F5_BIG-IP_ASM_v12": 1000709,
"Syslog_-_F5_Big-IP_GTM_&_DNS": 188,
"Syslog_-_F5_Big-IP_LTM": 1000335,
"Syslog_-_F5_FirePass_Firewall": 179,
"Syslog_-_F5_Silverline_DDoS_Protection": 1000799,
"Syslog_-_Fargo_HDP_Card_Printer_and_Encoder": 1000358,
"Syslog_-_Fat_Pipe_Load_Balancer": 1000807,
"Syslog_-_Fidelis_XPS": 1000104,
"Syslog_-_FireEye_E-Mail_MPS": 1000542,
"Syslog_-_FireEye_EX": 1000831,
"Syslog_-_FireEye_Web_MPS/CMS/ETP/HX": 1000359,
"Syslog_-_Forcepoint_DLP": 1000321,
"Syslog_-_Forcepoint_Email_Security_Gateway": 1000591,
"Syslog_-_Forcepoint_Stonesoft_NGFW": 1000675,
"Syslog_-_Forcepoint_SureView_Insider_Threat": 1000660,
"Syslog_-_Forcepoint_Web_Security": 1000375,
"Syslog_-_Forcepoint_Web_Security_CEF_Format": 1000452,
"Syslog_-_Forescout_CounterACT_NAC": 1000157,
"Syslog_-_Fortinet_FortiAnalyzer": 1000811,
"Syslog_-_Fortinet_FortiAuthenticator": 1000846,
"Syslog_-_Fortinet_FortiDDoS": 1000782,
"Syslog_-_Fortinet_FortiGate": 130,
"Syslog_-_Fortinet_FortiGate_v4.0": 1000199,
"Syslog_-_Fortinet_FortiGate_v5.0": 1000426,
"Syslog_-_Fortinet_FortiGate_v5.2": 1000567,
"Syslog_-_Fortinet_FortiGate_v5.4/v5.6": 1000700,
"Syslog_-_Fortinet_FortiGate_v5.6_CEF": 1000722,
"Syslog_-_Fortinet_Fortigate_v6.0": 1000774,
"Syslog_-_Fortinet_FortiMail": 1000536,
"Syslog_-_Fortinet_FortiWeb": 1000493,
"Syslog_-_Foundry_Switch": 1000050,
"Syslog_-_Gene6_FTP": 153,
"Syslog_-_Generic_CEF": 1000725,
"Syslog_-_Generic_ISC_DHCP": 1000088,
"Syslog_-_Generic_LEEF": 1000728,
"Syslog_-_Guardium_Database_Activity_Monitor": 1000326,
"Syslog_-_H3C_Router": 1000243,
"Syslog_-_Hitachi_Universal_Storage_Platform": 1000398,
"Syslog_-_HP_BladeSystem": 1000439,
"Syslog_-_HP_iLO": 1000616,
"Syslog_-_HP_Procurve_Switch": 160,
"Syslog_-_HP_Router": 1000057,
"Syslog_-_HP_Switch": 1000444,
"Syslog_-_HP_Unix_Tru64": 1000096,
"Syslog_-_HP_Virtual_Connect_Switch": 1000350,
"Syslog_-_HP-UX_Host": 89,
"Syslog_-_Huawei_Access_Router": 1000541,
"Syslog_-_IBM_Blade_Center": 1000401,
"Syslog_-_IBM_Security_Network_Protection": 1000521,
"Syslog_-_IBM_Virtual_Tape_Library_Server": 1000511,
"Syslog_-_IBM_WebSphere_DataPower_Integration": 1000441,
"Syslog_-_IBM_zSecure_Alert_for_ACF2_2.1.0": 1000590,
"Syslog_-_IceWarp_Server": 1000267,
"Syslog_-_Imperva_Incapsula_CEF": 1000763,
"Syslog_-_Imperva_SecureSphere": 1000135,
"Syslog_-_Imprivata_OneSign_SSO": 1000693,
"Syslog_-_InfoBlox": 1000089,
"Syslog_-_Invincea_(LEEF)": 1000626,
"Syslog_-_iPrism_Proxy_Log": 1000095,
"Syslog_-_IPSWITCH_MOVEit_Server": 1000573,
"Syslog_-_IPTables": 1000364,
"Syslog_-_IRIX_Host": 118,
"Syslog_-_iSeries_via_Powertech_Interact": 184,
"Syslog_-_Ivanti_FileDirector": 16,
"Syslog_-_JetNexus_Load_Balancer": 1000332,
"Syslog_-_Juniper_DX_Application_Accelerator": 1000147,
"Syslog_-_Juniper_Firewall": 1000045,
"Syslog_-_Juniper_Firewall_3400": 1000601,
"Syslog_-_Juniper_Host_Checker": 1000082,
"Syslog_-_Juniper_IDP": 1000053,
"Syslog_-_Juniper_NSM": 1000242,
"Syslog_-_Juniper_Router": 1000026,
"Syslog_-_Juniper_SSL_VPN": 186,
"Syslog_-_Juniper_SSL_VPN_WELF_Format": 1000111,
"Syslog_-_Juniper_Switch": 1000037,
"Syslog_-_Juniper_Trapeze": 1000343,
"Syslog_-_Juniper_vGW_Virtual_Gateway": 1000448,
"Syslog_-_Kaspersky_Security_Center": 1000797,
"Syslog_-_Kea_DHCP_Server": 10,
"Syslog_-_Kemp_Load_Balancer": 1000412,
"Syslog_-_KFSensor_Honeypot": 1000672,
"Syslog_-_KFSensor_Honeypot_CEF": 1000691,
"Syslog_-_Lancope_StealthWatch": 1000393,
"Syslog_-_Lancope_StealthWatch_CEF": 1000698,
"Syslog_-_Layer_7_SecureSpan_SOA_Gateway": 1000427,
"Syslog_-_Legacy_Checkpoint_Firewall_(Not_Log_Exporter)": 1000434,
"Syslog_-_Legacy_Checkpoint_IPS_(Not_Log_Exporter)": 1000103,
"Syslog_-_Lieberman_Enterprise_Random_Password_Manager": 1000353,
"Syslog_-_Linux_Audit": 1000139,
"Syslog_-_Linux_Host": 13,
"Syslog_-_Linux_TACACS_Plus": 23,
"Syslog_-_LOGbinder_EX": 1000533,
"Syslog_-_LOGbinder_SP": 1000408,
"Syslog_-_LOGbinder_SQL": 1000555,
"Syslog_-_LogRhythm_Data_Indexer_Monitor": 1000653,
"Syslog_-_LogRhythm_Inter_Deployment_Data_Sharing": 1000815,
"Syslog_-_LogRhythm_Log_Distribution_Services": 1000840,
"Syslog_-_LogRhythm_Network_Monitor": 197,
"Syslog_-_LogRhythm_Syslog_Generator": 105,
"Syslog_-_Lumension": 1000608,
"Syslog_-_MacOS_X": 1000144,
"Syslog_-_Malwarebytes_Endpoint_Security_CEF": 1000773,
"Syslog_-_Mandiant_MIR": 1000489,
"Syslog_-_McAfee_Advanced_Threat_Defense": 1000617,
"Syslog_-_McAfee_Email_And_Web_Security": 1000051,
"Syslog_-_McAfee_ePO": 1000866,
"Syslog_-_McAfee_Firewall_Enterprise": 1000001,
"Syslog_-_McAfee_Network_Security_Manager": 1000036,
"Syslog_-_McAfee_Secure_Internet_Gateway": 136,
"Syslog_-_McAfee_SecureMail": 1000092,
"Syslog_-_McAfee_Skyhigh_for_Shadow_IT_LEEF": 1000644,
"Syslog_-_McAfee_Web_Gateway": 1000612,
"Syslog_-_mGuard_Firewall": 1000711,
"Syslog_-_Microsoft_Advanced_Threat_Analytics_(ATA)_CEF": 1000731,
"Syslog_-_Microsoft_Azure_Log_Integration": 1000733,
"Syslog_-_Microsoft_Azure_MFA": 1000707,
"Syslog_-_Microsoft_Forefront_UAG": 1000461,
"Syslog_-_Mirapoint": 1000228,
"Syslog_-_MobileIron": 1000497,
"Syslog_-_Motorola_Access_Point": 1000313,
"Syslog_-_MS_IIS_Web_Log_W3C_Format_(Snare)": 1000027,
"Syslog_-_MS_Windows_Event_Logging_XML_-_Application": 1000783,
"Syslog_-_MS_Windows_Event_Logging_XML_-_Security": 1000669,
"Syslog_-_MS_Windows_Event_Logging_XML_-_System": 1000671,
"Syslog_-_Nagios": 1000319,
"Syslog_-_nCircle_Configuration_Compliance_Manager": 1000430,
"Syslog_-_NetApp_Filer": 1000108,
"Syslog_-_NETASQ_Firewall": 1000485,
"Syslog_-_NetGate_Router": 1000527,
"Syslog_-_NetMotion_VPN": 1000592,
"Syslog_-_Netscout_nGenius_InfiniStream": 1000481,
"Syslog_-_NetScreen_Firewall": 107,
"Syslog_-_Netskope": 1000736,
"Syslog_-_Netskope_CEF": 1000853,
"Syslog_-_Network_Chemistry_RFprotect": 108,
"Syslog_-_Nginx_Web_Log": 1000584,
"Syslog_-_Nimble_Storage": 1000727,
"Syslog_-_Nortel_8600_Switch": 1000081,
"Syslog_-_Nortel_BayStack_Switch": 171,
"Syslog_-_Nortel_Contivity": 1000153,
"Syslog_-_Nortel_Firewall": 168,
"Syslog_-_Nortel_IP_1220": 1000205,
"Syslog_-_Nortel_Passport_Switch": 169,
"Syslog_-_Nozomi_Networks_Guardian_CEF": 1000819,
"Syslog_-_NuSecure_Gateway": 1000198,
"Syslog_-_Nutanix": 26,
"Syslog_-_Open_Collector": 1000759,
"Syslog_-_Open_Collector_-_AWS_CloudTrail": 1000786,
"Syslog_-_Open_Collector_-_AWS_CloudWatch": 1000789,
"Syslog_-_Open_Collector_-_AWS_Config_Events": 1000790,
"Syslog_-_Open_Collector_-_AWS_Guard_Duty": 1000791,
"Syslog_-_Open_Collector_-_AWS_S3": 1000802,
"Syslog_-_Open_Collector_-_Azure_Event_Hub": 1000772,
"Syslog_-_Open_Collector_-_Carbon_Black_Cloud": 1000861,
"Syslog_-_Open_Collector_-_CarbonBlackBeat_Heartbeat": 1000864,
"Syslog_-_Open_Collector_-_Cisco_AMP": 1000842,
"Syslog_-_Open_Collector_-_Cisco_Umbrella": 1000787,
"Syslog_-_Open_Collector_-_CiscoAMPBeat_Heartbeat": 1000843,
"Syslog_-_Open_Collector_-_Duo_Authentication_Security": 1000854,
"Syslog_-_Open_Collector_-_DuoBeat_Heartbeat": 1000855,
"Syslog_-_Open_Collector_-_EventHubBeat_Heartbeat": 1000833,
"Syslog_-_Open_Collector_-_GCP_Audit": 1000817,
"Syslog_-_Open_Collector_-_GCP_Cloud_Key_Management_Service": 1000820,
"Syslog_-_Open_Collector_-_GCP_Http_Load_Balancer": 1000839,
"Syslog_-_Open_Collector_-_GCP_Pub_Sub": 1000812,
"Syslog_-_Open_Collector_-_GCP_Security_Command_Center": 1000816,
"Syslog_-_Open_Collector_-_GCP_Virtual_Private_Cloud": 1000821,
"Syslog_-_Open_Collector_-_Gmail_Message_Tracking": 1000823,
"Syslog_-_Open_Collector_-_GMTBeat_Heartbeat": 1000834,
"Syslog_-_Open_Collector_-_GSuite": 1000758,
"Syslog_-_Open_Collector_-_GSuiteBeat_Heartbeat": 1000838,
"Syslog_-_Open_Collector_-_Metricbeat": 1000841,
"Syslog_-_Open_Collector_-_Okta_System_Log": 1000863,
"Syslog_-_Open_Collector_-_OktaSystemLogBeat_Heartbeat": 1000865,
"Syslog_-_Open_Collector_-_PubSubBeat_Heartbeat": 1000836,
"Syslog_-_Open_Collector_-_S3Beat_Heartbeat": 1000835,
"Syslog_-_Open_Collector_-_Sophos_Central": 1000814,
"Syslog_-_Open_Collector_-_SophosCentralBeat_Heartbeat": 1000837,
"Syslog_-_Open_Collector_-_Webhook": 1000850,
"Syslog_-_Open_Collector_-_Webhook_OneLogin": 1000848,
"Syslog_-_Open_Collector_-_Webhook_Zoom": 1000849,
"Syslog_-_Open_Collector_-_WebhookBeat_Heartbeat": 1000851,
"Syslog_-_Opengear_Console": 28,
"Syslog_-_OpenLDAP": 1000305,
"Syslog_-_Oracle_10g_Audit_Trail": 1000071,
"Syslog_-_Oracle_11g_Audit_Trail": 1000223,
"Syslog_-_OSSEC_Alerts": 1000218,
"Syslog_-_Other": 92,
"Syslog_-_Outpost24": 1000414,
"Syslog_-_Palo_Alto_Cortex_XDR": 1000867,
"Syslog_-_Palo_Alto_Custom_Pipe": 15,
"Syslog_-_Palo_Alto_Firewall": 1000134,
"Syslog_-_Palo_Alto_Traps_CEF": 1000729,
"Syslog_-_Palo_Alto_Traps_Management_Service": 1000796,
"Syslog_-_Password_Manager_Pro": 21,
"Syslog_-_pfSense_Firewall": 1000740,
"Syslog_-_PingFederate_7.2": 1000631,
"Syslog_-_PingFederate_CEF": 1000770,
"Syslog_-_Polycom": 1000362,
"Syslog_-_Postfix": 1000105,
"Syslog_-_Procera_PacketLogic": 9,
"Syslog_-_Proofpoint_Spam_Firewall": 141,
"Syslog_-_Protegrity_Defiance_DPS": 1000085,
"Syslog_-_QLogic_Infiniband_Switch": 1000449,
"Syslog_-_Quest_Defender": 1000328,
"Syslog_-_Radiator_Radius": 4,
"Syslog_-_RADiFlow_3180_Switch": 1000498,
"Syslog_-_Radware_Alteon_Load_Balancer": 1000245,
"Syslog_-_Radware_DefensePro": 1000241,
"Syslog_-_Radware_Web_Server_Director_Audit_Log": 1000344,
"Syslog_-_Raritan_KVM": 1000279,
"Syslog_-_Raz-Lee": 1000428,
"Syslog_-_RedSeal": 1000547,
"Syslog_-_Riverbed": 1000156,
"Syslog_-_RSA_ACE": 190,
"Syslog_-_RSA_Authentication_Manager_v7.1": 1000233,
"Syslog_-_RSA_Authentication_Manager_v8.x": 1000656,
"Syslog_-_RSA_Web_Threat_Detection": 1000512,
"Syslog_-_RSA_Web_Threat_Detection_5.1": 1000574,
"Syslog_-_RuggedRouter": 1000093,
"Syslog_-_Safenet": 1000074,
"Syslog_-_Sailpoint": 1000640,
"Syslog_-_Sauce_Labs": 1000704,
"Syslog_-_SecureAuth_IdP": 1000443,
"Syslog_-_SecureAuth_IdP_v9": 1000713,
"Syslog_-_SecureLink": 1000793,
"Syslog_-_SecureTrack": 1000249,
"Syslog_-_SEL_3610_Port_Switch": 1000273,
"Syslog_-_SEL_3620_Ethernet_Security_Gateway": 1000246,
"Syslog_-_Sentinel_IPS": 1000460,
"Syslog_-_SentinelOne_CEF": 1000712,
"Syslog_-_Sguil": 1000719,
"Syslog_-_Siemens_Scalance_X400": 1000473,
"Syslog_-_Smoothwall_Firewall": 1000435,
"Syslog_-_SnapGear_Firewall": 1000409,
"Syslog_-_Snare_Windows_2003_Event_Log": 1000028,
"Syslog_-_Snare_Windows_2008_Event_Log": 19,
"Syslog_-_Snort_IDS": 1000019,
"Syslog_-_Solaris_(Snare)": 120,
"Syslog_-_Solaris_Host": 91,
"Syslog_-_SonicWALL": 106,
"Syslog_-_SonicWALL_SSL-VPN": 137,
"Syslog_-_Sophos_Email_Encryption_Appliance": 1000336,
"Syslog_-_Sophos_UTM": 113,
"Syslog_-_Sophos_Web_Proxy": 1000399,
"Syslog_-_Sophos_XG_Firewall": 1000792,
"Syslog_-_Sourcefire_IDS_3D": 1000080,
"Syslog_-_Sourcefire_RNA": 1000340,
"Syslog_-_Spectracom_Network_Time_Server": 1000463,
"Syslog_-_Splunk_API_-_Checkpoint_Firewall": 1000689,
"Syslog_-_Splunk_API_-_Cisco_Netflow_V9": 1000697,
"Syslog_-_Splunk_API_-_Nessus_Vulnerability_Scanner": 1000692,
"Syslog_-_Squid_Proxy": 2,
"Syslog_-_StealthBits_Activity_Monitor": 1000844,
"Syslog_-_STEALTHbits_StealthINTERCEPT": 1000737,
"Syslog_-_StoneGate_Firewall": 1000291,
"Syslog_-_Stonesoft_IPS": 1000480,
"Syslog_-_Stormshield_Network_Security_Firewall": 1000650,
"Syslog_-_Sycamore_Networks_DNX-88": 1000588,
"Syslog_-_Sygate_Firewall": 180,
"Syslog_-_Symantec_Advanced_Threat_Protection_(ATP)_CEF": 1000798,
"Syslog_-_Symantec_DLP_CEF": 181,
"Syslog_-_Symantec_Endpoint_Server": 1000077,
"Syslog_-_Symantec_Messaging_Gateway": 1000828,
"Syslog_-_Symantec_PGP_Gateway": 1000387,
"Syslog_-_Symbol_Wireless_Access_Point": 114,
"Syslog_-_Tanium": 1000674,
"Syslog_-_Temporary_LST-2": 1000699,
"Syslog_-_Tenable_SecurityCenter": 1000534,
"Syslog_-_Thycotic_Secret_Server": 1000519,
"Syslog_-_Tipping_Point_IPS": 143,
"Syslog_-_Tipping_Point_SSL_Reverse_Proxy": 1000339,
"Syslog_-_Top_Layer_IPS": 1000048,
"Syslog_-_Townsend_Alliance_LogAgent": 1000213,
"Syslog_-_Trend_Micro_Control_Manager_CEF": 1000750,
"Syslog_-_Trend_Micro_Deep_Discovery_Inspector": 1000580,
"Syslog_-_Trend_Micro_Deep_Security_CEF": 1000388,
"Syslog_-_Trend_Micro_Deep_Security_LEEF": 1000804,
"Syslog_-_Trend_Micro_IWSVA": 1000330,
"Syslog_-_Trend_Micro_Vulnerability_Protection_Manager": 1000803,
"Syslog_-_Tripwire": 192,
"Syslog_-_Trustwave_NAC": 1000596,
"Syslog_-_Trustwave_Secure_Web_Gateway": 1000499,
"Syslog_-_Trustwave_Web_Application_Firewall": 1000065,
"Syslog_-_Tufin": 1000684,
"Syslog_-_Tumbleweed_Mailgate_Server": 1000078,
"Syslog_-_Ubiquiti_UniFi_Security_Gateway": 1000760,
"Syslog_-_Ubiquiti_UniFi_Switch": 1000757,
"Syslog_-_Ubiquiti_UniFi_WAP": 1000762,
"Syslog_-_Untangle": 1000365,
"Syslog_-_Vamsoft_ORF": 1000458,
"Syslog_-_Vanguard_Active_Alerts": 1000694,
"Syslog_-_Varonis_DatAlert": 1000544,
"Syslog_-_Vasco_Digipass_Identikey_Server": 1000503,
"Syslog_-_Vectra_Networks": 1000779,
"Syslog_-_Versa_Networks_SD-WAN": 1000824,
"Syslog_-_VMWare_ESX/ESXi_Server": 1000000,
"Syslog_-_VMware_Horizon_View": 1000603,
"Syslog_-_VMWare_NSX/NSX-T": 1000768,
"Syslog_-_VMWare_Unified_Access_Gateway": 1000871,
"Syslog_-_VMWare_vCenter_Server": 1000752,
"Syslog_-_VMWare_vShield": 1000487,
"Syslog_-_Voltage_Securemail": 1000543,
"Syslog_-_Vormetric_CoreGuard": 1000210,
"Syslog_-_Vormetric_Data_Security_Manager": 1000486,
"Syslog_-_WALLIX_Bastion": 1000765,
"Syslog_-_Watchguard_FireBox": 129,
"Syslog_-_WS2000_Wireless_Access_Point": 1000076,
"Syslog_-_Wurldtech_SmartFirewall": 198,
"Syslog_-_Xirrus_Wireless_Array": 1000197,
"Syslog_-_Zimbra_System_Log": 1000100,
"Syslog_-_Zix_E-mail_Encryption": 1000654,
"Syslog_-_Zscaler_Nano_Streaming_Service": 1000546,
"Syslog_-_ZXT_Load_Balancer": 1000411,
"Syslog_-_ZyWALL_VPN_Firewall": 1000666,
"Syslog_Avaya_G450_Media_Gateway": 1000670,
"Syslog_File_-_AIX_Host": 1000006,
"Syslog_File_-_BSD_Format": 35,
"Syslog_File_-_HP-UX_Host": 1000145,
"Syslog_File_-_IRIX_Host": 1000295,
"Syslog_File_-_Linux_Host": 103,
"Syslog_File_-_LogRhythm_Syslog_Generator": 13,
"Syslog_File_-_MS_2003_Event_Log_(Snare)": 1000039,
"Syslog_File_-_Oracle_10g_Audit_Trail": 1000072,
"Syslog_File_-_Oracle_11g_Audit_Trail": 1000222,
"Syslog_File_-_Solaris_Host": 104,
"UDLA_-_CA_Single_Sign-On": 1000636,
"UDLA_-_Deepnet_DualShield": 1000286,
"UDLA_-_Drupal": 1000496,
"UDLA_-_Finacle_Core": 1000196,
"UDLA_-_Finacle_Treasury_Logs": 1000178,
"UDLA_-_Forcepoint": 1000020,
"UDLA_-_Gallagher_Command_Centre": 1000810,
"UDLA_-_iManage_Worksite": 1000732,
"UDLA_-_ISS_Proventia_SiteProtector_-_IPS": 1000034,
"UDLA_-_LogRhythm_Enterprise_Monitoring_Solution": 1000314,
"UDLA_-_LREnhancedAudit": 1000548,
"UDLA_-_McAfee_ePolicy_Orchestrator_-_Universal_ePOEvents": 1000788,
"UDLA_-_McAfee_ePolicy_Orchestrator_3.6_-_Events": 158,
"UDLA_-_McAfee_ePolicy_Orchestrator_4.0_-_ePOEvents": 1000079,
"UDLA_-_McAfee_ePolicy_Orchestrator_4.5_-_ePOEvents": 1000175,
"UDLA_-_McAfee_ePolicy_Orchestrator_5.0_-_ePOEvents": 1000531,
"UDLA_-_McAfee_ePolicy_Orchestrator_5.1_-_ePOEvents": 1000550,
"UDLA_-_McAfee_ePolicy_Orchestrator_5.3_-_ePOEvents": 1000696,
"UDLA_-_McAfee_ePolicy_Orchestrator_5.9_-_ePOEvents": 1000761,
"UDLA_-_McAfee_Network_Access_Control": 1000055,
"UDLA_-_McAfee_Network_Security_Manager": 1000453,
"UDLA_-_Microsoft_System_Center_2012_Endpoint_Protection": 1000587,
"UDLA_-_ObserveIT": 1000605,
"UDLA_-_Oracle_10g_Audit_Trail": 152,
"UDLA_-_Oracle_11g_Audit_Trail": 1000171,
"UDLA_-_Oracle_12C_Unified_Auditing": 1000658,
"UDLA_-_Oracle_9i_Audit_Trail": 1000040,
"UDLA_-_Other": 1000576,
"UDLA_-_SEL_3530_RTAC": 1000285,
"UDLA_-_SharePoint_2007_AuditData": 1000208,
"UDLA_-_SharePoint_2010_EventData": 1000415,
"UDLA_-_SharePoint_2013_EventData": 1000606,
"UDLA_-_Siemens_Invision": 1000229,
"UDLA_-_Sophos_Anti-Virus": 1000090,
"UDLA_-_Sophos_Endpoint_Security_and_Control": 1000735,
"UDLA_-_Symantec_CSP": 1000505,
"UDLA_-_Symantec_SEP": 1000520,
"UDLA_-_Symmetry_Access_Control": 1000270,
"UDLA_-_VMWare_vCenter_Server": 1000378,
"UDLA_-_VMWare_vCloud": 1000538,
"VLS_-_Syslog_-_Infoblox_-_DNS_RPZ": 1000643,
"VLS_-_Syslog_-_Infoblox_-_Threat_Protection": 1000642
}
''' HELPER FUNCTIONS '''
def fix_date_values(item):
date_keys = ['normalDateMin', 'normalDate', 'normalMsgDateMax', 'logDate']
for key in date_keys:
if item.get(key):
item[key] = datetime.fromtimestamp(item.get(key) / 1000.0).\
strftime('%Y-%m-%d %H:%M:%S')
def fix_location_value(items):
for item in items:
location_val = str(item.get('location'))
if location_val == '{u\'id\': -1}':
item['location'] = 'NA'
return items
def get_time_frame(time_frame, start_arg, end_arg):
start = datetime.now()
end = datetime.now()
if time_frame == 'Today':
start = datetime(end.year, end.month, end.day)
elif time_frame == 'Last2Days':
start = end - timedelta(days=2)
elif time_frame == 'LastWeek':
start = end - timedelta(days=7)
elif time_frame == 'LastMonth':
start = end - timedelta(days=30)
elif time_frame == 'Custom':
if not start_arg:
return_error('start-date argument is missing')
if not end_arg:
return_error('end-date argument is missing')
start = datetime.strptime(start_arg, '%Y-%m-%d')
end = datetime.strptime(end_arg, '%Y-%m-%d')
return start, end
def http_request(method, url_suffix, data=None, headers=HEADERS):
try:
res = requests.request(
method,
urljoin(BASE_URL, url_suffix),
headers=headers,
verify=INSECURE,
data=data
)
except Exception as e:
return_error(e)
# Handle error responses gracefully
if 'application/json' not in res.headers.get('Content-Type', []) and res.status_code != 204:
LOG(f'response status code is: {res.status_code}')
return_error('invalid url or port: ' + BASE_URL)
if res.status_code == 404:
if res.json().get('message'):
return_error(res.json().get('message'))
else:
return_error('No data returned')
if res.status_code not in {200, 201, 202, 204, 207}:
return_error(
'Error in API call to {}, status code: {}, reason: {}'.format(BASE_URL + '/' + url_suffix, res.status_code,
res.json()['message']))
if res.status_code == 204:
return {}
return res.json()
def get_host_by_id(host_id):
res = http_request('GET', 'lr-admin-api/hosts/' + host_id)
return fix_location_value([res])
def update_hosts_keys(hosts):
new_hosts = []
for host in hosts:
tmp_host = {
'EntityId': host.get('entity').get('id'),
'EntityName': host.get('entity').get('name'),
'OS': host.get('os'),
'ThreatLevel': host.get('threatLevel'),
'UseEventlogCredentials': host.get('useEventlogCredentials'),
'Name': host.get('name'),
'DateUpdated': host.get('dateUpdated'),
'HostZone': host.get('hostZone'),
'RiskLevel': host.get('riskLevel'),
'Location': host.get('location'),
'Status': host.get('recordStatusName'),
'ThreatLevelComments': host.get('threatLevelComments'),
'ID': host.get('id'),
'OSType': host.get('osType')
}
new_hosts.append(tmp_host)
return new_hosts
def update_networks_keys(networks):
new_networks = []
for network in networks:
tmp_network = {
'EndIP': network.get('eip'),
'HostStatus': network.get('recordStatusName'),
'Name': network.get('name'),
'RiskLevel': network.get('riskLevel'),
'EntityId': network.get('entity').get('id'),
'EntityName': network.get('entity').get('name'),
'Location': network.get('location'),
'ThreatLevel': network.get('threatLevel'),
'DateUpdated': network.get('dateUpdated'),
'HostZone': network.get('hostZone'),
'ID': network.get('id'),
'BeganIP': network.get('bip')
}
new_networks.append(tmp_network)
return new_networks
def update_users_keys(users):
new_users = []
for user in users:
tmp_user = {
'ID': user.get('id'),
'DateUpdated': user.get('dateUpdated'),
'HostStatus': user.get('recordStatusName'),
'LastName': user.get('lastName'),
'FirstName': user.get('firstName'),
'UserType': user.get('userType'),
'Entity': user.get('objectPermissions').get('entity'),
'Owner': user.get('objectPermissions').get('owner'),
'ReadAccess': user.get('objectPermissions').get('readAccess'),
'WriteAccess': user.get('objectPermissions').get('writeAccess')
}
new_users.append(tmp_user)
return new_users
def update_logins_keys(logins):
new_logins = []
for login in logins:
tmp_login = {
'Login': login.get('login'),
'UserProfileId': login.get('userProfileId'),
'UserId': login.get('userId'),
'DefaultEntityId': login.get('defaultEntityId'),
'HostStatus': login.get('recordStatusName'),
'DateUpdated': login.get('dateUpdated'),
'DateCreated': login.get('dateCreated'),
'Entities': login.get('entities')
}
new_logins.append(tmp_login)
return new_logins
def update_profiles_keys(profiles):
new_profiles = []
for profile in profiles:
tmp_profile = {
'ID': profile.get('id'),
'Name': profile.get('name'),
'ShortDescription': profile.get('shortDescription'),
'LongDescription': profile.get('longDescription'),
'DataProcessorAccessMode': profile.get('dataProcessorAccessMode'),
'SecurityRole': profile.get('securityRole'),
'ProfileType': profile.get('ProfileType'),
'DateUpdated': profile.get('dateUpdated'),
'TotalAssociatedUsers': profile.get('totalAssociatedUsers'),
'NotificationGroupsPermissions': profile.get('notificationGroupsPermissions'),
'ADGroupsPermissions': profile.get('adGroupsPermissions'),
'EntityPermissions': profile.get('entityPermissions'),
'DataProcessorsPermissions': profile.get('dataProcessorsPermissions'),
'LogsourceListPermissions': profile.get('logsourceListPermissions'),
'LogSourcePermissions': profile.get('logSourcePermissions'),
'Privileges': profile.get('privileges'),
'SmartResponsePluginsPermissions': profile.get('smartResponsePluginsPermissions')
}
new_profiles.append(tmp_profile)
return new_profiles
def update_persons_keys(persons):
new_persons = []
for person in persons:
tmp_person = {
'ID': person.get('id'),
'DateUpdated': person.get('dateUpdated'),
'HostStatus': person.get('recordStatusName'),
'LastName': person.get('lastName'),
'FirstName': person.get('firstName'),
'IsAPIPerson': person.get('isAPIPerson'),
'UserID': person.get('user').get('id'),
'UserLogin': person.get('user').get('login')
}
new_persons.append(tmp_person)
return new_persons
def generate_query_value(valueType, value):
if valueType == 2:
return int(value)
elif valueType == 5:
return str(value)
else:
return {
"value": value,
"matchType": 2
}
def generate_query_item(filterType, valueType, value):
query = {
"filterItemType": 0,
"fieldOperator": 1,
"filterMode": 1,
"values": [
{
"filterType": filterType,
"valueType": valueType,
"value": generate_query_value(valueType, value)
}
]
}
return query
''' COMMANDS + REQUESTS FUNCTIONS '''
def test_module():
http_request('GET', 'lr-admin-api/hosts')
demisto.results('ok')
def add_host(data_args):
data = {
"id": -1,
"entity": {
"id": int(data_args.get('entity-id')),
"name": data_args.get('entity-name')
},
"name": data_args.get('name'),
"shortDesc": data_args.get('short-description'),
"longDesc": data_args.get('long-description'),
"riskLevel": data_args.get('risk-level'),
"threatLevel": data_args.get('threat-level'),
"threatLevelComments": data_args.get('threat-level-comments'),
"recordStatusName": data_args.get('host-status'),
"hostZone": data_args.get('host-zone'),
"os": data_args.get('os'),
"useEventlogCredentials": bool(data_args.get('use-eventlog-credentials')),
"osType": data_args.get('os-type')
}
res = http_request('POST', 'lr-admin-api/hosts/', json.dumps(data))
res = fix_location_value([res])
context = createContext(update_hosts_keys(res), removeNull=True)
outputs = {'Logrhythm.Host(val.ID === obj.ID)': context}
return_outputs(readable_output=data_args.get('name') + " added successfully to " + data_args.get('entity-name'),
outputs=outputs, raw_response=res)
def get_hosts_by_entity(data_args):
res = http_request('GET', 'lr-admin-api/hosts?entity=' + data_args['entity-name'] + '&count=' + data_args['count'])
res = fix_location_value(res)
res = update_hosts_keys(res)
context = createContext(res, removeNull=True)
human_readable = tableToMarkdown('Hosts for ' + data_args.get('entity-name'), res, HOSTS_HEADERS)
outputs = {'Logrhythm.Host(val.Name && val.ID === obj.ID)': context}
return_outputs(readable_output=human_readable, outputs=outputs, raw_response=res)
def get_hosts(data_args):
id = data_args.get('host-id')
if id:
res = get_host_by_id(id)
else:
res = http_request('GET', 'lr-admin-api/hosts?count=' + data_args['count'])
res = fix_location_value(res)
res = update_hosts_keys(res)
context = createContext(res, removeNull=True)
human_readable = tableToMarkdown('Hosts information:', res, HOSTS_HEADERS)
outputs = {'Logrhythm.Host(val.Name && val.ID === obj.ID)': context}
return_outputs(readable_output=human_readable, outputs=outputs, raw_response=res)
def change_status(data_args):
data = [{
"hostId": int(data_args.get('host-id')),
"status": data_args.get('status')
}]
res = http_request('PUT', 'lr-admin-api/hosts/status', json.dumps(data))
host_info = get_host_by_id(data_args.get('host-id'))
context = createContext(update_hosts_keys(host_info), removeNull=True)
outputs = {'Logrhythm.Host(val.ID === obj.ID)': context}
return_outputs(readable_output='Status updated to ' + data_args.get('status'), outputs=outputs, raw_response=res)
def execute_query(data_args):
# generate random string for request id
req_id = ''.join(random.choice(string.ascii_letters) for x in range(8))
start, end = get_time_frame(data_args.get('time-frame'), data_args.get('start-date'), data_args.get('end-date'))
delta = end - start
dates = []
for i in range(delta.days + 1):
dates.append((start + timedelta(days=i)).strftime("logs-%Y-%m-%d"))
data = {
"indices": dates,
"searchType": "DFS_QUERY_THEN_FETCH",
"source": {
"size": data_args.get('page-size'),
"query": {
"query_string": {
"default_field": "logMessage",
"query": data_args.get('keyword')
}
},
"stored_fields": "logMessage",
"sort": [
{
"normalDate": {
"order": "asc"
}
}
]
}
}
headers = dict(HEADERS)
headers['Content-Type'] = 'application/json'
headers['Request-Id'] = req_id
headers['Request-Origin-Date'] = str(datetime.now())
headers['x-gateway-route-to-tag'] = CLUSTER_ID
res = http_request('POST', 'lr-legacy-search-api/esquery', json.dumps(data), headers)
logs = res['hits']['hits']
logs_response = []
xml_ns = './/{http://schemas.microsoft.com/win/2004/08/events/event}'
for log in logs:
message = str(log['fields']['logMessage'])
message = message[3:-2]
try:
root = ET.fromstring(message)
log_item = {
"EventID": str(root.find(xml_ns + 'EventID').text), # type: ignore
"Level": str(root.find(xml_ns + 'Level').text), # type: ignore
"Task": str(root.find(xml_ns + 'Task').text), # type: ignore
"Opcode": str(root.find(xml_ns + 'Opcode').text), # type: ignore
"Keywords": str(root.find(xml_ns + 'Keywords').text), # type: ignore
"Channel": str(root.find(xml_ns + 'Channel').text), # type: ignore
"Computer": str(root.find(xml_ns + 'Computer').text), # type: ignore
"EventData": str(root.find(xml_ns + 'EventData').text) # type: ignore
.replace('\\r\\n', '\n').replace('\\t', '\t')
}
logs_response.append(log_item)
except Exception:
continue
context = createContext(logs_response, removeNull=True)
human_readable = tableToMarkdown('logs results', logs_response, LOGS_HEADERS)
outputs = {'Logrhythm.Log': context}
return_outputs(readable_output=human_readable, outputs=outputs, raw_response=logs_response)
def get_persons(data_args):
id = data_args.get('person-id')
if id:
res = [http_request('GET', 'lr-admin-api/persons/' + id)]
else:
res = http_request('GET', 'lr-admin-api/persons?count=' + data_args['count'])
res = update_persons_keys(res)
context = createContext(res, removeNull=True)
outputs = {'Logrhythm.Person(val.ID === obj.ID)': context}
human_readable = tableToMarkdown('Persons information', context, PERSON_HEADERS)
return_outputs(readable_output=human_readable, outputs=outputs, raw_response=res)
def get_users(data_args):
id = data_args.get('user_id')
if id:
res = [http_request('GET', 'lr-admin-api/users/' + id)]
else:
res = http_request('GET', 'lr-admin-api/users?count=' + data_args['count'])
res = update_users_keys(res)
context = createContext(res, removeNull=True)
outputs = {'Logrhythm.User(val.ID === obj.ID)': context}
human_readable = tableToMarkdown('Users information', context, USER_HEADERS)
return_outputs(readable_output=human_readable, outputs=outputs, raw_response=res)
def add_user(data_args):
data = {
"userType": "Individual",
"firstName": data_args.get("first_name"),
"lastName": data_args.get("last_name")
}
if not data_args.get("abbreviation"):
data["abbreviation"] = f"{data_args.get('first_name')[0]}{data_args.get('last_name')}".lower()
else:
data["abbreviation"] = data_args.get("abbreviation")
res = [http_request('POST', 'lr-admin-api/users/', json.dumps(data))]
res = update_users_keys(res)
context = createContext(res, removeNull=True)
outputs = {'Logrhythm.User(val.ID === obj.ID)': context}
human_readable = tableToMarkdown('User added', context, USER_HEADERS)
return_outputs(readable_output=human_readable, outputs=outputs, raw_response=res)
def get_logins(data_args):
id = data_args.get('user_id')
if id:
res = [http_request('GET', 'lr-admin-api/users/' + id + '/login/')]
else:
res = http_request('GET', 'lr-admin-api/users/user-logins?count=' + data_args['count'])
res = update_logins_keys(res)
context = createContext(res, removeNull=True)
outputs = {'Logrhythm.Login(val.Login === obj.Login)': context}
human_readable = tableToMarkdown('Logins information', context, LOGIN_HEADERS)
return_outputs(readable_output=human_readable, outputs=outputs, raw_response=res)
def add_login(data_args):
id = data_args.get('user_id')
data = {
"login": data_args.get("login"),
"userProfileId": arg_to_number(data_args.get("profile_id")),
"defaultEntityId": arg_to_number(data_args.get("entity_id")),
"password": data_args.get("password")
}
res = [http_request('POST', 'lr-admin-api/users/' + id + '/login/', json.dumps(data))]
res = update_logins_keys(res)
context = createContext(res, removeNull=True)
outputs = {'Logrhythm.User(val.ID === obj.ID)': context}
human_readable = tableToMarkdown('Login added', context, LOGIN_HEADERS)
return_outputs(readable_output=human_readable, outputs=outputs, raw_response=res)
def get_privileges(data_args):
id = data_args.get('user_id')
res = http_request('GET', 'lr-admin-api/users/' + id + '/privileges?offset='
+ data_args['offset'] + '&count=' + data_args['count'])
res = {"ID": id, "Privileges": res}
context = createContext(res, removeNull=True)
outputs = {'Logrhythm.Privileges(val.ID === obj.ID)': context}
human_readable = tableToMarkdown('Privileges information', context, ["Privileges"])
return_outputs(readable_output=human_readable, outputs=outputs, raw_response=res)
def get_profiles(data_args):
id = data_args.get('profile_id')
if id:
res = [http_request('GET', 'lr-admin-api/user-profiles/' + id)]
else:
res = http_request('GET', 'lr-admin-api/user-profiles?count=' + data_args['count'])
res = update_profiles_keys(res)
context = createContext(res, removeNull=True)
outputs = {'Logrhythm.Profile(val.ID === obj.ID)': context}
human_readable = tableToMarkdown('Users information', context, PROFILE_HEADERS)
return_outputs(readable_output=human_readable, outputs=outputs, raw_response=res)
def get_networks(data_args):
id = data_args.get('network-id')
if id:
res = [http_request('GET', 'lr-admin-api/networks/' + id)]
else:
res = http_request('GET', 'lr-admin-api/networks?count=' + data_args['count'])
res = fix_location_value(res)
res = update_networks_keys(res)
context = createContext(res, removeNull=True)
outputs = {'Logrhythm.Network(val.ID === obj.ID)': context}
human_readable = tableToMarkdown('Networks information', context, NETWORK_HEADERS)
return_outputs(readable_output=human_readable, outputs=outputs, raw_response=res)
def get_alarm_data(data_args):
id = data_args.get('alarm-id')
res = http_request('GET', 'lr-drilldown-cache-api/drilldown/' + id)
if not res:
return_outputs(readable_output=f"No data was found for alarm with ID {id}.")
alarm_data = res['Data']['DrillDownResults']
alarm_summaries = res['Data']['DrillDownResults']['RuleBlocks']
del alarm_data['RuleBlocks']
aie_message = xml2json(str(alarm_data.get('AIEMsgXml'))).replace('\"@', '\"')
alarm_data['AIEMsgXml'] = json.loads(aie_message).get('aie')
alarm_data['Status'] = ALARM_STATUS[str(alarm_data['Status'])]
alarm_data['ID'] = alarm_data['AlarmID']
del alarm_data['AlarmID']
dds_summaries = []
for block in alarm_summaries:
for item in block['DDSummaries']:
item['PIFType'] = PIF_TYPES[str(item['PIFType'])]
m = re.findall(r'"field": "(([^"]|\\")*)"', item['DrillDownSummaryLogs'])
fields = [k[0] for k in m]
item['DrillDownSummaryLogs'] = ", ".join(fields)
del item['DefaultValue']
dds_summaries.append(item)
alarm_data['Summary'] = dds_summaries
context = createContext(alarm_data, removeNull=True)
outputs = {'Logrhythm.Alarm(val.ID === obj.ID)': context}
del alarm_data['AIEMsgXml']
del alarm_data['Summary']
human_readable = tableToMarkdown('Alarm information for alarm id ' + id, alarm_data) + tableToMarkdown(
'Alarm summaries', dds_summaries, ALARM_SUMMARY_HEADERS)
return_outputs(readable_output=human_readable, outputs=outputs, raw_response=res)
def get_alarm_events(data_args):
id = data_args.get('alarm-id')
count = data_args.get('count')
count = int(data_args.get('count'))
fields = data_args.get('fields')
show_log_message = data_args.get('get-log-message') == 'True'
res = http_request('GET', 'lr-drilldown-cache-api/drilldown/' + id)
if not res:
return_outputs(readable_output=f"No events were found for alarm with ID {id}")
res = res['Data']['DrillDownResults']['RuleBlocks']
events = []
for block in res:
if not block.get('DrillDownLogs'):
continue
logs = json.loads(block['DrillDownLogs'])
for log in logs:
fix_date_values(log)
if not show_log_message:
del log['logMessage']
events.append((log))
events = events[:count]
human_readable = tableToMarkdown('Events information for alarm ' + id, events)
if fields:
fields = fields.split(',')
for event in events:
for key in event.keys():
if key not in fields:
del event[key]
ec = {"ID": int(id), "Event": events}
context = createContext(ec, removeNull=True)
outputs = {'Logrhythm.Alarm(val.ID === obj.ID)': context}
return_outputs(readable_output=human_readable, outputs=outputs, raw_response=res)
def fetch_incidents():
headers = dict(HEADERS)
last_run = demisto.getLastRun()
# Check if first run. If not, continue running from the last case dateCreated field.
if last_run and 'start_time' in last_run:
start_time = last_run.get('start_time')
headers['createdAfter'] = start_time
# print(start_time)
else:
headers['createdBefore'] = datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ")
# Get list of cases
if ENTITY_ID:
cases = http_request('GET', 'lr-case-api/cases?entityNumber=' + str(ENTITY_ID), headers=headers)
else:
cases = http_request('GET', 'lr-case-api/cases', headers=headers)
# Set Last Run to the last case dateCreated field
if cases:
demisto.setLastRun({
'start_time': cases[len(cases) - 1]['dateCreated']
})
# Generate incidents
incidents = []
for case in cases:
incident = {
'name': 'Case #' + str(case['number']) + ' ' + str(case['name']),
'occurred': str(case['dateCreated']),
'rawJSON': json.dumps(case)
}
incidents.append(incident)
demisto.incidents(incidents)
def lr_get_case_evidence(data_args):
case_id = data_args.get('case_id')
case = http_request('GET', 'lr-case-api/cases/' + case_id + '/evidence')
result = CommandResults(
outputs_prefix='Logrhythm.Evidence',
outputs=case,
readable_output=tableToMarkdown('Evidences for case ' + case_id, case, headerTransform=string_to_table_header),
raw_response=case,
outputs_key_field='number'
)
return_results(result)
def lr_execute_search_query(data_args):
number_of_days = data_args.get('number_of_days')
source_type = data_args.get('source_type')
host_name = data_args.get('host_name')
username = data_args.get('username')
subject = data_args.get('subject')
sender = data_args.get('sender')
recipient = data_args.get('recipient')
hash = data_args.get('hash')
url = data_args.get('URL')
process_name = data_args.get('process_name')
object = data_args.get('object')
ipaddress = data_args.get('ip_address')
max_message = data_args.get('max_massage')
query_timeout = data_args.get('query_timeout')
# Create filter query
query = []
if host_name:
query.append(generate_query_item(filterType=23, valueType=4, value=str(host_name)))
if ENTITY_ID:
query.append(generate_query_item(filterType=136, valueType=2, value=int(ENTITY_ID)))
if source_type and source_type != "all":
query.append(generate_query_item(filterType=9, valueType=2, value=SOURCE_TYPE_MAP[source_type]))
if username:
query.append(generate_query_item(filterType=43, valueType=4, value=str(username)))
if subject:
query.append(generate_query_item(filterType=33, valueType=4, value=str(subject)))
if sender:
query.append(generate_query_item(filterType=31, valueType=4, value=str(sender)))
if recipient:
query.append(generate_query_item(filterType=32, valueType=4, value=str(recipient)))
if hash:
query.append(generate_query_item(filterType=138, valueType=4, value=str(hash)))
if url:
query.append(generate_query_item(filterType=42, valueType=4, value=str(url)))
if process_name:
query.append(generate_query_item(filterType=41, valueType=4, value=str(process_name)))
if object:
query.append(generate_query_item(filterType=34, valueType=4, value=str(object)))
if ipaddress:
query.append(generate_query_item(filterType=17, valueType=5, value=str(ipaddress)))
# Search and get TaskID
querybody = {
"maxMsgsToQuery": int(max_message),
"logCacheSize": 10000,
"queryTimeout": int(query_timeout),
"queryRawLog": True,
"queryEventManager": False,
"dateCriteria": {
"useInsertedDate": False,
"lastIntervalValue": int(number_of_days),
"lastIntervalUnit": 4
},
"queryLogSources": [],
"queryFilter": {
"msgFilterType": 2,
"isSavedFilter": False,
"filterGroup": {
"filterItemType": 1,
"fieldOperator": 1,
"filterMode": 1,
"filterGroupOperator": 0,
"filterItems": query
}
}
}
headers = HEADERS
headers['Content-Type'] = 'application/json'
search_task = http_request('POST', 'lr-search-api/actions/search-task', json.dumps(querybody), headers)
task_id = search_task.get('TaskId')
results = CommandResults(
outputs={"TaskID": task_id},
outputs_prefix="Logrhythm.Search.Task",
outputs_key_field='taskID',
raw_response=search_task,
readable_output='New search query created, Task ID=' + task_id
)
return_results(results)
def lr_get_query_result(data_args):
task_id = data_args.get('task_id')
queryresult = json.dumps(
{
"data": {
"searchGuid": task_id,
"search": {
"sort": [],
"fields": []
},
"paginator": {
"origin": 0,
"page_size": 50
}
}
})
headers = HEADERS
headers['Content-Type'] = 'application/json'
search_result = http_request('POST', 'lr-search-api/actions/search-result', queryresult, headers)
context = {
"TaskID": task_id,
"TaskStatus": search_result["TaskStatus"],
"Items": search_result["Items"]
}
if search_result["TaskStatus"] == "Completed: No Results":
message = "#### No results, please modify your search"
elif search_result["TaskStatus"] == "Searching":
message = "#### Searching"
elif search_result["TaskStatus"] == "Search Failed":
message = "#### The search is timed out, please try again or modify your search"
elif search_result["Items"]:
for log in search_result["Items"]:
log.pop('logMessage', None)
message = tableToMarkdown("Search results for task " + task_id, search_result["Items"],
headerTransform=string_to_table_header)
else:
message = "#### Please try again later"
results = CommandResults(
readable_output=message,
outputs=context,
outputs_key_field='TaskID',
outputs_prefix="Logrhythm.Search.Results",
raw_response=search_result
)
return_results(results)
''' COMMANDS MANAGER / SWITCH PANEL '''
def main():
LOG('Command being called is %s' % (demisto.command()))
try:
handle_proxy()
if demisto.command() == 'test-module':
# This is the call made when pressing the integration test button.
test_module()
elif demisto.command() == 'lr-add-host':
add_host(demisto.args())
elif demisto.command() == 'lr-get-hosts-by-entity':
get_hosts_by_entity(demisto.args())
elif demisto.command() == 'lr-get-hosts':
get_hosts(demisto.args())
elif demisto.command() == 'lr-execute-query':
execute_query(demisto.args())
elif demisto.command() == 'lr-update-host-status':
change_status(demisto.args())
elif demisto.command() == 'lr-get-persons':
get_persons(demisto.args())
elif demisto.command() == 'lr-get-users':
get_users(demisto.args())
elif demisto.command() == 'lr-get-logins':
get_logins(demisto.args())
elif demisto.command() == 'lr-get-privileges':
get_privileges(demisto.args())
elif demisto.command() == 'lr-get-profiles':
get_profiles(demisto.args())
elif demisto.command() == 'lr-get-networks':
get_networks(demisto.args())
elif demisto.command() == 'lr-get-alarm-data':
get_alarm_data(demisto.args())
elif demisto.command() == 'lr-get-alarm-events':
get_alarm_events(demisto.args())
elif demisto.command() == 'fetch-incidents':
fetch_incidents()
elif demisto.command() == 'lr-execute-search-query':
lr_execute_search_query(demisto.args())
elif demisto.command() == 'lr-get-query-result':
lr_get_query_result(demisto.args())
elif demisto.command() == 'lr-get-case-evidence':
lr_get_case_evidence(demisto.args())
elif demisto.command() == 'lr-add-user':
add_user(demisto.args())
elif demisto.command() == 'lr-add-login':
add_login(demisto.args())
except Exception as e:
return_error('error has occurred: {}'.format(str(e)))
# python2 uses __builtin__ python3 uses builtins
if __name__ in ('__main__', '__builtin__', 'builtins'): # pragma: no cover
main()
| mit | 11eb08bdce97ed2be39f3f157a197500 | 40.713929 | 129 | 0.635537 | 2.938799 | false | false | false | false |
demisto/content | Packs/CIRCL/Integrations/CIRCL/CIRCL.py | 2 | 7040 | import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
import requests
import json
# disable insecure warnings
requests.packages.urllib3.disable_warnings()
''' GLOBAL VARS '''
BASE_URL = demisto.getParam('url')
USERNAME = demisto.getParam('credentials')['identifier']
PASSWORD = demisto.getParam('credentials')['password']
AUTH = (USERNAME, PASSWORD)
USE_SSL = not demisto.params().get('insecure', False)
IS_USING_PROXY = True if demisto.params().get('proxy') else False
LAST_TIME_KEY = 'time_last'
def http_request(method, url):
response = requests.request(
method,
url,
auth=AUTH,
verify=USE_SSL
)
if response.status_code != 200:
return_error('Error in API call: [%d] - %s' % (response.status_code, response.reason))
return response
def validate_sha1(sha1):
if len(sha1) != 40:
return_error('Invalid SHA-1, expected 40 characters: %s' % (sha1))
def validate_ip_of_cidr(ip):
regex = r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'
match = re.search(regex, ip)
if match is None:
return_error('Invalid IP or CIDR: %s' % (ip))
def timestamp_to_string(timestamp):
if timestamp is None:
return None
return datetime.utcfromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S')
def dns_get_command(url):
response = http_dns_get(url)
results = list(map(lambda line: json.loads(line), response.text.splitlines()))
results = merge_by_rdata(results)
records = []
for result in results:
records.append(create_dns_record_context(result))
demisto.results({
'Type': entryTypes['note'],
'ContentsFormat': formats['text'],
'Contents': response.text,
'HumanReadable': tableToMarkdown("CIRCL Dns - " + url, records),
'EntryContext': {
'CIRCLdns.Query(val.Value===obj.Value)': {
'Value': url,
'Record': records,
}
}
})
def http_dns_get(url):
query_url = BASE_URL + '/pdns/query/' + url
return http_request('GET', query_url)
# The results may contain several records with the same 'rdata' but different (not interesting) other properties.
# This function will merge the records and keep the later "last seen time".
def merge_by_rdata(results):
results_map = {} # type: dict
for e in results:
key = e['rdata']
other = results_map.get(key)
if other is not None and other[LAST_TIME_KEY] > e[LAST_TIME_KEY]:
e = other
results_map[key] = e
return list(results_map.values())
def create_dns_record_context(record):
last_time = timestamp_to_string(record[LAST_TIME_KEY])
return {
'Data': record['rdata'],
'LastTime': last_time,
}
def list_certificates(queryValue):
validate_ip_of_cidr(queryValue)
response = http_list_certificates(queryValue)
data = response.json()
records = []
for ip, ip_data in data.items():
records.append(create_ip_context(ip, ip_data))
result = {
'Type': entryTypes['note'],
'ContentsFormat': formats['json'],
'Contents': data,
'HumanReadable': tableToMarkdown('List certificates for ' + queryValue, records),
'EntryContext': {
'CIRCLssl.IPAddress(val.Value===obj.Value)': records
}
}
demisto.results(result)
def http_list_certificates(queryValue):
query_url = BASE_URL + '/v2pssl/query/' + queryValue
return http_request('GET', query_url)
def create_ip_context(ip, ipData):
certificates = []
for sha1 in ipData['certificates']:
subjects = ipData['subjects'].get(sha1, {}).get('values', [])
certificates.append(create_list_certificate_context(sha1, subjects))
return {
'Value': ip,
'Certificate': certificates
}
def create_list_certificate_context(sha1, subjects):
return {
'SHA1': sha1,
'Subjects': subjects
}
def list_certificate_seen_ips(sha1, limit):
validate_sha1(sha1)
response = http_list_certificate_seen_ips(sha1)
data = response.json()
certificate = create_certificate_seen_ips_context(sha1, data, limit)
result = {
'Type': entryTypes['note'],
'ContentsFormat': formats['json'],
'Contents': data,
'HumanReadable': 'Hits: ' + str(certificate['Hits']),
'EntryContext': {
'CIRCLssl.Certificate(val.SHA1===obj.SHA1)': certificate,
}
}
demisto.results(result)
def http_list_certificate_seen_ips(sha1):
query_url = BASE_URL + '/v2pssl/cquery/' + sha1
return http_request('GET', query_url)
def create_certificate_seen_ips_context(sha1, data, limit):
return {
'SHA1': sha1,
'Hits': data['hits'],
'IPAddress': data['seen'][:limit],
}
def get_certificate_details(sha1):
validate_sha1(sha1)
response = http_get_certificate_details(sha1)
data = response.json()
certificate = create_certificate_details(sha1, data)
result = {
'Type': entryTypes['note'],
'ContentsFormat': formats['json'],
'Contents': data,
'HumanReadable': tableToMarkdown("CIRCL ssl certificate - " + sha1, certificate),
'EntryContext': {
'CIRCLssl.Certificate(val.SHA1===obj.SHA1)': certificate,
}
}
demisto.results(result)
def http_get_certificate_details(sha1):
query_url = BASE_URL + '/v2pssl/cfetch/' + sha1
return http_request('GET', query_url)
def create_certificate_details(sha1, data):
info = data['info']
usage = ''
distribution = ''
extension = info.get('extension', {})
usage = extension.get('keyUsage', usage)
usage = extension.get('extendedKeyUsage', usage)
distribution = extension.get('crlDistributionPoints', distribution)
times_seen = data.get('icsi', {}).get('times_seen')
return {
'SHA1': sha1,
'Usage': usage,
'Distribution': distribution,
'Issuer': info['issuer'],
'Time': info['not_before'],
'Subject': info['subject'],
'Key': info['key'],
'Pem': data['pem'],
'Seen': times_seen,
}
''' EXECUTION CODE '''
LOG('command is %s' % (demisto.command(), ))
try:
command = demisto.command()
args = demisto.args()
handle_proxy()
if command == 'test-module':
result = http_dns_get('test.com')
demisto.results('ok')
elif command == 'circl-dns-get':
dns_get_command(args.get('queryValue'))
elif command == 'circl-ssl-list-certificates':
list_certificates(args.get('queryValue'))
elif command == 'circl-ssl-query-certificate':
limit = int(args.get('limitResults', 100))
sha1 = args.get('certificate')
list_certificate_seen_ips(sha1, limit)
elif command == 'circl-ssl-get-certificate':
get_certificate_details(args.get('certificate'))
except Exception as e:
return_error(str(e))
| mit | 857d08fdb467386c8697032edc84d60b | 24.6 | 113 | 0.613352 | 3.54303 | false | false | false | false |
demisto/content | Packs/NCSCCyberAsssessmentFramework/Scripts/NCSCReportDetailsC/NCSCReportDetailsC.py | 2 | 3754 | import json
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
def calculate_overall(data: dict = None) -> str:
if not data:
return ""
results = [x['Result'] for x in data]
if "Not Achieved" in results:
return "Not Achieved"
elif "Partially Achieved" in results:
return "Partially Achieved"
else:
return "Achieved"
def main():
query = "-status:closed -category:job type:\"NCSC CAF Assessment\""
result_field = "cafcresultraw"
answers_field = "cafcanswers"
questions_field = "cafcquestions"
assessment_field = "AssessmentC"
incidents = demisto.executeCommand("getIncidents", {"query": query})[0]['Contents']['data']
if len(incidents) < 1:
return ""
incidents = sorted(incidents, key=lambda x: x['id'])
incident = incidents[0]
original_question_data = json.loads(demisto.executeCommand("getList", {"listName": "NCSC CAF "
"Assessment"})[0]['Contents'])
original_question_data = original_question_data[assessment_field]
if incident:
md: str = ""
custom_fields = incident.get('CustomFields')
assessment_questions = json.loads(custom_fields.get(questions_field))
assessment_answers = json.loads(custom_fields.get(answers_field))
assessment_details = json.loads(custom_fields.get(result_field))
assessment_result = calculate_overall(assessment_details)
answered_questions = str()
for x in range(0, len(assessment_questions)):
table = list()
original_answers = [a.get('answers') for a in original_question_data if a['question']
== assessment_questions.get(str(x))][0]
these_answers = assessment_answers.get(str(x))
for answer in these_answers:
verdict = [b['score'] for b in original_answers if b['answer'] == answer][0]
verdict = "Achieved" if verdict == 2 else "Not Achieved" if verdict == 0 else "Partially Achieved"
table.append(
{
"Answer": answer,
"Result": verdict
}
)
answers_markdown = tableToMarkdown(assessment_questions.get(str(x)), table, ['Answer', 'Result'])
answered_questions += f"{answers_markdown}\n\n"
md += f"### Provided answers\n\nBelow are the individual questions and responses provided for this " \
f"objective:\n\n{answered_questions}\n\n"
if assessment_result in ['Not Achieved', 'Partially Achieved']:
md += "### Recommendations\n\nPlease review the following questions and their responses that result in " \
"an 'Achieved' outcome for this objective (the list only includes questions which have not " \
"resulted in 'Achieved'):\n\n"
failed_questions = [x['Question'] for x in assessment_details if x['Result'] != "Achieved"]
for question in original_question_data:
if question.get('question') in failed_questions:
md += f"#### {question.get('question')}\n"
for answer in [x['answer'] for x in question['answers'] if x['score'] == 2]:
md += f"- {answer}\n"
md += "\n"
else:
md += "### Recommendations\n\nThere are no further recommendations to improve your result for this " \
"objectve. Good work!"
else:
md = ""
demisto.results(md)
if __name__ in ['__main__', '__builtin__', 'builtins']:
main()
| mit | 075ac3d0ed7b97ce31bdab3e06b99c8d | 42.149425 | 118 | 0.570858 | 4.040904 | false | false | false | false |
demisto/content | Packs/CortexXDR/Scripts/CortexXDRIdentityInformationWidget/CortexXDRIdentityInformationWidget_test.py | 2 | 1049 | import io
import pytest
from CommonServerPython import *
import CortexXDRIdentityInformationWidget
def util_load_json(path):
with io.open(path, mode='r', encoding='utf-8') as f:
return json.loads(f.read())
@pytest.mark.parametrize('context_data, expected_result', [
(util_load_json('test_data/context_data1.json'), util_load_json('test_data/expected_results1.json')),
(util_load_json('test_data/context_data2.json'), util_load_json('test_data/expected_results2.json'))
])
def test_additional_info(mocker, context_data, expected_result):
mocker.patch.object(demisto, 'context', return_value=context_data)
results = CortexXDRIdentityInformationWidget.get_identity_info()
for actual_res, expected_res in zip(results, expected_result):
actual_access_keys = actual_res.pop('Access Keys')
expected_access_keys = expected_res.pop('Access Keys')
assert actual_res == expected_res
assert set(actual_access_keys) == set(expected_access_keys) # the sorting of the access keys doesn't matter
| mit | a5f6cc55ef63261c30654b534336239e | 44.608696 | 116 | 0.722593 | 3.48505 | false | true | false | false |
demisto/content | Packs/MalwareInvestigationAndResponse/Scripts/InvestigationSummaryToTable/InvestigationSummaryToTable_test.py | 2 | 1584 | import json
from pathlib import Path
from InvestigationSummaryToTable import Result, get_findings, findings_to_command_results
TEST_DATA_DIR = Path(__file__).parent / 'test_data'
def _load_test_file(file_name: str):
return json.loads((TEST_DATA_DIR / file_name).read_text())
def _dump_test_file(file_name: str, content: dict):
(TEST_DATA_DIR / file_name).write_text(json.dumps(content))
def test_empty_context():
context = {}
findings = get_findings(context)
result = findings_to_command_results(findings)
assert not result.outputs
assert result.readable_output == '### Waiting on entries\n' \
'When `InvestigationSummaryParse` is finished, its results will appear here.'
def test_context():
findings = get_findings({
'EvidenceOfCommandAndControl': {'Result': 'Suspicious', 'Tactic': 'Command and Control'},
'EvidenceOfPrivilegeEscalation': [{'should be ignored': 'should be ignored'},
{'Result': 'Not Detected', 'Tactic': 'Privilege Escalation'}],
})
assert len(findings) == 2
assert findings[0].result == Result.SUSPICIOUS
assert findings[0].tactic == 'Command and Control'
assert findings[0].context_name == 'EvidenceOfCommandAndControl'
assert findings[1].result == Result.NOT_DETECTED
assert findings[1].tactic == 'Privilege Escalation'
assert findings[1].context_name == 'EvidenceOfPrivilegeEscalation'
assert findings_to_command_results(findings).to_context() == _load_test_file('expected_context.json')
| mit | b84bbc585c8e4ee2e4268a507030ce1e | 39.615385 | 114 | 0.67298 | 3.6 | false | true | false | false |
demisto/content | Packs/Stealthwatch_Cloud/Integrations/Stealthwatch_Cloud/Stealthwatch_Cloud.py | 2 | 17438 | import demistomock as demisto
from CommonServerPython import *
''' IMPORTS '''
import requests
import json
import os
from datetime import datetime, timedelta
import collections
# disable insecure warnings
requests.packages.urllib3.disable_warnings()
''' GLOBAL VARS '''
SERVER = demisto.params().get('serverURL', '').strip('/')
SERVER_URL = SERVER + '/api/v3'
API_KEY = demisto.params()['APIKey']
USE_SSL = not demisto.params().get('insecure')
DEFAULT_HEADERS = {
'Content-Type': 'application/json',
'Accept': 'application/json',
'Authorization': API_KEY
}
''' HELPER FUNCTIONS '''
if not demisto.params()['proxy']:
del os.environ['HTTP_PROXY']
del os.environ['HTTPS_PROXY']
del os.environ['http_proxy']
del os.environ['https_proxy']
def http_request(method, url_suffix, params_dict=None, headers=DEFAULT_HEADERS, data=None):
req_params = {} # type: Dict[Any,Any]
if params_dict is not None:
req_params.update(params_dict)
url = SERVER_URL + url_suffix
LOG(f'running {method} request with url={url}\tparams={json.dumps(req_params)}')
try:
res = requests.request(method,
url,
verify=USE_SSL,
params=req_params,
headers=headers,
data=data
)
res.raise_for_status()
try:
return res.json()
except ValueError:
# in case the response doesn't have JSON
return "Request completed"
except Exception as e:
LOG(e)
raise(e)
def underscore_to_camelcase(word):
return ' '.join(x.capitalize() or '_' for x in word.split('_'))
def create_incident_data_from_alert(alert):
alert.pop('comments')
alert.pop('observations')
return {
'name': 'Stealthwatch alert ' + str(alert.get('id', '')),
'rawJSON': json.dumps(alert),
'occurred': alert.get('created', '')
}
def get_latest_id(alerts_data):
latest_id = 0
for alert in alerts_data:
current_id = alert.get('id', None)
if current_id is not None and current_id > latest_id:
latest_id = current_id
return latest_id
''' COMMANDS FUNCTIONS '''
def show_alert(alert_id):
"""
Returns alert by specific id
"""
api_endpoint = f"/alerts/alert/{alert_id}/"
return http_request('GET', api_endpoint, {}, DEFAULT_HEADERS)
def show_alert_command():
"""
corresponds to 'sw-show-alert' command. Returns information about a specific alert
"""
alert_id = demisto.args().get('alertID')
alert_data = show_alert(alert_id)
if not demisto.args().get('addComments', False) == 'true':
alert_data.pop('comments')
alert_data.pop('new_comment')
alert_data.pop('observations')
list_for_md = ['resolved', 'id', 'last_modified', 'obj_created', 'assigned_to']
dict_for_md = {underscore_to_camelcase(k): v for k, v in alert_data.items() if k in list_for_md}
md = tableToMarkdown(alert_data.get('text', ''), dict_for_md)
return {
'Type': entryTypes['note'],
'Contents': alert_data,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': md,
'EntryContext': {
"Stealthwatch.Alert(val.id==obj.id)": alert_data
}
}
def update_alert(alert_id, data):
"""
Updates alert by specific id
"""
api_endpoint = f"/alerts/alert/{alert_id}/"
return http_request('PUT', api_endpoint, data=json.dumps(data))
def update_alert_command():
"""
corresponds to 'sw-update-alert' command. Returns information about a specific alert
"""
args = demisto.args()
alert_id = args.get('alertID')
update_params = {}
# adding the possible params for update
possible_params = ['new_comment', 'tags', 'publish_time', 'resolved', 'snooze_settings', 'merit', 'assigned_to']
for param in possible_params:
current_param = args.get(param, False)
if current_param:
update_params[param] = current_param
username = args.get('resolved_user', None)
if username is not None:
update_params['resolved_user'] = {
'username': username
}
alert_data = update_alert(alert_id, update_params)
alert_data.pop('comments')
alert_data.pop('new_comment')
alert_data.pop('observations')
list_for_md = ['resolved', 'id', 'last_modified', 'obj_created', 'assigned_to']
dict_for_md = {k: v for k, v in alert_data.items() if k in list_for_md}
md = tableToMarkdown(alert_data.get('text', ''), dict_for_md)
return {
'Type': entryTypes['note'],
'Contents': alert_data,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': md,
'EntryContext': {
"Stealthwatch.Alert(val.id==obj.id)": alert_data
}
}
def list_alerts(params):
"""
Retrieves alerts
"""
api_endpoint = "/alerts/alert/"
return http_request('GET', api_endpoint, params, DEFAULT_HEADERS)
def build_alert_dic(alert):
dic = collections.OrderedDict() # type: Dict[str,str]
list_for_md = ['id', 'last_modified', 'resolved', 'text', 'obj_created', 'assigned_to', 'description']
for item in list_for_md:
dic[underscore_to_camelcase(item)] = alert[item]
return dic
def list_alerts_command():
"""
corresponds to 'sw-list-alerts' command. Returns a list of Stealthwatch alerts
"""
args = demisto.args()
list_params = {}
# adding the possible params for update
possible_params = ['status', 'tags', 'search', 'assignee', 'limit']
for param in possible_params:
current_param = args.get(param, False)
if current_param:
list_params[param] = current_param
alerts_data = list_alerts(list_params).get('objects')
md_dicts_list = []
for alert in alerts_data:
if not demisto.args().get('addComments', False) == 'true':
alert.pop('comments')
alert.pop('new_comment')
alert.pop('observations')
md_dicts_list.append(build_alert_dic(alert))
md = tableToMarkdown("The following alerts were retrieved", md_dicts_list)
return {
'Type': entryTypes['note'],
'Contents': alerts_data,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': md,
'EntryContext': {
"Stealthwatch.Alert(val.id==obj.id)": alerts_data
}
}
def domain_block(params):
"""
Updates domain blacklist status
"""
api_endpoint = "/blacklist/domains/"
return http_request('POST', api_endpoint, {}, DEFAULT_HEADERS, params)
def block_domain_command():
"""
corresponds to 'sw-block-domain-or-ip' command. Adds a domain to the blacklist
"""
domain = demisto.args().get('domain')
ip = demisto.args().get('ip')
if not (domain or ip):
return {
"Type": entryTypes["error"],
"ContentsFormat": formats["text"],
"Contents": 'Please enter either domain or ip'
}
if domain and ip:
return {
"Type": entryTypes["error"],
"ContentsFormat": formats["text"],
"Contents": 'Please enter only domain or ip, not both'
}
identifier = None
if domain:
identifier = domain
else:
identifier = ip
domain_params = {
"identifier": identifier,
"category": "domain",
"list_on": "blacklist"
}
domain_result = domain_block(json.dumps(domain_params))
ec = None
if domain:
ec = {
"Stealthwatch.Domain(val.identifier==obj.identifier)": domain_result
}
else:
ec = {
"Stealthwatch.IP(val.identifier==obj.identifier)": domain_result
}
return {
'Type': entryTypes['note'],
'Contents': domain_result,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown('Blacklist ' + domain + ' result', domain_result),
'EntryContext': ec
}
def domain_unblock(domain_id):
"""
Removes domain from the blacklist
"""
api_endpoint = f"/blacklist/domains/{domain_id}/"
return http_request('DELETE', api_endpoint, None, DEFAULT_HEADERS, None)
def unblock_domain_command():
"""
corresponds to 'sw-unblock-domain' command. Removes a domain to the blacklist
"""
domain_id = demisto.args().get('id')
domain_result = domain_unblock(domain_id)
return {
'Type': entryTypes['note'],
'Contents': domain_result,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': 'Unblocked domain with id: ' + domain_id,
}
def list_domains(list_params):
"""
Lists blacklisted domains
"""
api_endpoint = "/blacklist/domains/"
return http_request('GET', api_endpoint, list_params, DEFAULT_HEADERS, {})
def list_blocked_domains_command():
"""
corresponds to 'sw-list-blocked-domains' command. Returns a list of the blocked domains
"""
args = demisto.args()
list_params = {}
# adding the possible params for update
possible_params = ['search', 'limit']
for param in possible_params:
current_param = args.get(param, False)
if current_param:
list_params[param] = current_param
specific_domain = args.get('domain', None)
if specific_domain is not None:
list_params['identifier'] = specific_domain
domains_data = list_domains(list_params)
domains_result = domains_data.get('objects', {})
data_output = []
for obs in domains_result:
data_output.append({underscore_to_camelcase(k): v for k, v in list(obs.items())})
return {
'Type': entryTypes['note'],
'Contents': domains_data,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown('Current blacklisted domains are', data_output),
'EntryContext': {
"Stealthwatch.Domain(val.identifier==obj.identifier)": domains_result
}
}
def list_observations(params):
"""
Lists observations
"""
api_endpoint = "/observations/all/"
return http_request('GET', api_endpoint, params, DEFAULT_HEADERS)
def list_observations_command():
"""
corresponds to 'sw-list-observations' command. Returns a list of Stealthwatch observations
"""
args = demisto.args()
list_params = {
"ordering": 'creation_time'
}
# adding the possible params for update
possible_params = ['alert', 'id', 'search', 'limit']
for param in possible_params:
current_param = args.get(param, False)
if current_param:
list_params[param] = current_param
observations_data = list_observations(list_params).get('objects')
data_output = []
for obs in observations_data:
data_output.append({underscore_to_camelcase(k): v for k, v in list(obs.items())})
return {
'Type': entryTypes['note'],
'Contents': data_output,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown('Found the following observations', data_output),
'EntryContext': {
"Stealthwatch.Observation(val.id==obj.id)": observations_data
}
}
def list_sessions(params):
"""
Lists observations
"""
api_endpoint = "/snapshots/session-data/"
return http_request('GET', api_endpoint, params, DEFAULT_HEADERS)
def list_sessions_command():
"""
corresponds to 'sw-list-sessions' command. Returns a list of Stealthwatch
sessions
"""
date_format = "%Y-%m-%dT%H:%M:%SZ"
list_params = {}
ip = demisto.args().get('ip')
connected_ip = demisto.args().get('connectedIP')
connected_device_id = demisto.args().get('connectedDeviceId')
limit = demisto.args().get('limit')
start_time = demisto.args().get('startTime', None)
end_time = demisto.args().get('endTime', None)
session_type = demisto.args().get('sessionType', 'all')
if start_time and end_time:
list_params['start_datetime'] = start_time
list_params['end_datetime'] = end_time
elif end_time is None:
start_time_object = datetime.strptime(start_time, date_format)
start_time_object = start_time_object - timedelta(minutes=5)
end_time_object = start_time_object + timedelta(minutes=5)
start_time = start_time_object.strftime(date_format)
end_time = end_time_object.strftime(date_format)
list_params['ip'] = ip
list_params['connected_ip'] = connected_ip
list_params['limit'] = limit
list_params['start_datetime'] = start_time
list_params['end_datetime'] = end_time
list_params['connected_device_id'] = connected_device_id
unique_session_ids = [] # type: List[str]
final_sessions_data = []
sessions_data = list_sessions(list_params).get('objects')
for sess in sessions_data:
if sess['connected_ip'] not in unique_session_ids:
unique_session_ids.append(sess['connected_ip'])
if demisto.get(sess, 'connected_device_id'):
sess['connected_device_is_external'] = False
if session_type == 'internal':
final_sessions_data.append(sess)
else:
sess['connected_device_is_external'] = True
if session_type == 'external':
final_sessions_data.append(sess)
if session_type == 'all':
final_sessions_data.append(sess)
data_output = []
for sess in final_sessions_data:
data_output.append({underscore_to_camelcase(k): v for k, v in list(sess.items())})
return {
'Type': entryTypes['note'],
'Contents': data_output,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown('Found the following session data', data_output),
'EntryContext': {
"Stealthwatch.Session(val.id==obj.id)": final_sessions_data
}
}
def fetch_incidents():
date_format = "%Y-%m-%dT%H:%M:%SZ"
list_params = {
"ordering": 'created',
"limit": 100
}
final_alerts = []
last_fetch_string = demisto.getLastRun().get('last_fetch_time', None)
ids = demisto.getLastRun().get('ids', None)
first_time = (not last_fetch_string and ids is not None)
if last_fetch_string is None or not last_fetch_string:
now = datetime.now()
last_fetch = now - timedelta(days=20)
else:
last_fetch = parse_date_string(last_fetch_string)
# Couldn't find a way to sort descending so looking for last offset of 100 alerts
alerts_response = list_alerts(list_params)
num_alerts = alerts_response.get('meta', {'total_count': 100}).get('total_count')
offset = 0 if num_alerts < 100 else num_alerts - 100
list_params['offset'] = offset
alerts_response = list_alerts(list_params)
alerts_data = alerts_response.get('objects', [])
max_fetch_time = last_fetch_string if last_fetch_string else now.strftime(date_format)
for alert in alerts_data:
created = alert.get('created')
if parse_date_string(created) > last_fetch:
incident_from_alert = create_incident_data_from_alert(alert)
if first_time:
if alert.get('id') not in ids:
final_alerts.append(incident_from_alert)
else:
final_alerts.append(incident_from_alert)
if parse_date_string(created) > parse_date_string(max_fetch_time):
max_fetch_time = created
demisto.setLastRun({
'last_fetch_time': max_fetch_time
})
demisto.incidents(final_alerts)
''' EXECUTION CODE '''
try:
if demisto.command() == 'test-module':
# This is the call made when pressing the integration test button.
if list_alerts_command():
demisto.results('ok')
else:
demisto.results('test failed')
elif demisto.command() == 'sw-show-alert':
demisto.results(show_alert_command())
elif demisto.command() == 'sw-update-alert':
demisto.results(update_alert_command())
elif demisto.command() == 'sw-list-alerts':
demisto.results(list_alerts_command())
elif demisto.command() == 'sw-block-domain-or-ip':
demisto.results(block_domain_command())
elif demisto.command() == 'sw-unblock-domain':
demisto.results(unblock_domain_command())
elif demisto.command() == 'sw-list-blocked-domains':
demisto.results(list_blocked_domains_command())
elif demisto.command() == 'sw-list-observations':
demisto.results(list_observations_command())
elif demisto.command() == 'sw-list-sessions':
demisto.results(list_sessions_command())
elif demisto.command() == 'fetch-incidents':
demisto.results(fetch_incidents())
except Exception as e:
LOG(e)
LOG.print_log()
raise
| mit | 47303f1f4fa5f60a61809808c9d20139 | 29.91844 | 116 | 0.608212 | 3.794995 | false | false | false | false |
demisto/content | Packs/ARIAPacketIntelligence/Integrations/ARIAPacketIntelligence/ARIAPacketIntelligence.py | 2 | 97247 | import demistomock as demisto
from CommonServerPython import *
import json
import requests
import time
import re
class ParameterError(Exception):
""" Raised when the function parameters do not meet requirements """
pass
"""
Remediation Configuration String (RCS) that use to select SIA.
"""
class RCS:
"""
Define class members
Define class methods
"""
def __init__(self, rcs=None):
self.rcs = rcs
if self.rcs is None:
self.rcs = "PIdevice@all"
"""
Used to indicate which RET types currently supported
"""
self.RET_functions = {"drop": self._parse_RET_drop}
"""
destructor
"""
def __del__(self):
return 0
"""
Parse a drop command and return its representation
for being put into a NRDO action / rule.
"""
def _parse_RET_drop(self, rcs):
if rcs is None:
return None, None
elif rcs == "":
return None, None
rcsp = re.match("^[(][)](.+)$", rcs)
if rcsp is None:
return None, rcs
elif rcsp.group(1) is None:
return "", None
elif rcsp.group(1) == "":
return "", None
RET_drop = ["drop"]
return RET_drop, rcsp.group(1)
"""
Parse a SIA simple name
"""
def _parse_RDL_RD_name(self, rcs):
if rcs is None:
return None, None, "failed: RD name rcs none"
elif rcs == "":
return None, None, "failed: RD name rcs empty"
rcsp = re.match(r"^(\w[\w-]*)(.*)$", rcs)
if rcsp is None:
return None, None, "failed: RD name match none"
elif rcsp.group(1) is None:
return None, None, "failed: RD name none"
elif rcsp.group(1) == "":
return None, None, "failed: RD name empty"
RD_name = ("name", rcsp.group(1))
rcs = rcsp.group(2)
return RD_name, rcs, "success: {0}".format(rcsp.group(1))
"""
Parse a FQN
"""
def _parse_RDL_RD_FQN(self, rcs):
if rcs is None:
return None, None, "failed: RD fqn rcs none"
elif rcs == "":
return None, None, "failed: RD fqn rcs empty"
rcsp = re.match(r"^([<][\w_-<>.]+[>])(.*)$", rcs)
if rcsp is None:
return None, None, "failed: RD fqn match none"
elif rcsp.group(1) is None:
return None, None, "failed: RD fqn none"
elif rcsp.group(1) == "":
return None, None, "failed: RD fqn empty"
RD_fqn = ("FQN", rcsp.group(1))
rcs = rcsp.group(2)
return RD_fqn, rcs, "success: {0}".format(rcsp.group(1))
"""
Parse a security domain name SDN
"""
def _parse_RDL_RD_SDN(self, rcs):
if rcs is None:
return None, None, "failed: RD sd rcs none"
elif rcs == "":
return None, None, "failed: RD sd rcs empty"
rcsp = re.match(r"^\^(\w[\w-]*)(.*)$", rcs)
if rcsp is None:
return None, None, "failed: RD sd match none"
elif rcsp.group(1) is None:
return None, None, "failed: RD sd none"
elif rcsp.group(1) == "":
return None, None, "failed: RD sd empty"
RD_sdn = ("securityDomain", rcsp.group(1))
rcs = rcsp.group(2)
return RD_sdn, rcs, "success: {0}".format(rcsp.group(1))
"""
Parse an RGN label as a name
"""
def _parse_RDL_RD_RGN_name(self, rcs):
if rcs is None:
return None, None, "failed: RD rgn name rcs none"
elif rcs == "":
return None, None, "failed: RD rgn name rcs empty"
rcsp = re.match(r"^(\w[\w-]*)(.*)$", rcs)
if rcsp is None:
return None, None, "failed: RD rgn name rcsp none"
elif rcsp.group(1) is None:
return None, None, "failed: RD rgn name rcsp.g1 none"
elif rcsp.group(1) == "":
return None, None, "failed: RD rgn name rcsp.g1 empty"
return rcsp.group(1), rcsp.group(2), "success"
"""
Parse an RGN label as a list of names
"""
def _parse_RDL_RD_RGN_list(self, rcs):
if rcs is None:
return None, None, "failed: RD rgn list rcs none"
elif rcs == "":
return None, None, "failed: RD rgn list rcs empty"
rcsp = re.match(r"^[(](.+)$", rcs)
if rcsp is None:
return None, None, "failed: RD rgn list rcsp none"
elif rcsp.group(1) is None:
return None, None, "failed: RD rgn list rcsp.g1 none"
elif rcsp.group(1) == "":
return None, None, "failed: RD rgn list rcsp.g1 empty"
rcs = rcsp.group(1)
names = ""
while True:
rcsp = re.match(r"(\w[\w-]*)(.+)$", rcs)
if rcsp is None:
return None, None, "failed: RD rgn list rcsp name none"
elif rcsp.group(1) is None:
return None, None, "failed: RD rgn list rcsp.g1 name none"
elif rcsp.group(1) == "":
return None, None, "failed: RD rgn list rcsp.g1 name empty"
names = "{0}{1}".format(names, rcsp.group(1))
rcs = rcsp.group(2)
if rcs is None:
return None, None, "failed: RD rgn list rcsp.g2 name none"
elif rcs == "":
return None, None, "failed: RD rgn list rcsp.g2 name empty"
rcsp = re.match("^[)](.*)$", rcs)
if rcsp is not None:
rcs = rcsp.group(1)
break
rcsp = re.match("^,(.+)$", rcs)
if rcsp is None:
return None, None, "failed: RD rgn list rcsp comma none"
elif rcsp.group(1) is None:
return None, None, "failed: RD rgn list rcsp.g1 comma none"
elif rcsp.group(1) == "":
return None, None, "failed: RD rgn list rcsp.g1 comma empty"
rcs = rcsp.group(1)
names = "{},".format(names)
if names == "":
return None, None, "failed: RD rgn list names empty"
names = "({})".format(names)
return names, rcs, "success: {0}".format(names)
"""
Parse an RGN label as asterik
"""
def _parse_RDL_RD_RGN_asterik(self, rcs):
if rcs is None:
return None, None, "failed: RD rgn asterik rcs none"
elif rcs == "":
return None, None, "failed: RD rgn asterik rcs empty"
rcsp = re.match(r"^\*(.*)$", rcs)
if rcsp is None:
return None, None, "failed: RD rgn asterik rcsp none"
return "*", rcsp.group(1), "success"
"""
Parse an RGN
"""
def _parse_RDL_RD_RGN_label(self, rcs):
if rcs is None:
return None, None, "failed: RD label rgn rcs none"
elif rcs == "":
return None, None, "failed: RD label rgn rcs empty"
while True:
rcsp = re.match(r"^[!]([(].*)$", rcs)
if rcsp is not None:
if rcsp.group(1) is None:
return None, None, "failed: RD rgn label exclusive g1 none"
elif rcsp.group(1) == "":
return None, None, "failed: RD rgn label exclusive g1 empty"
rcs = rcsp.group(1)
label, rcs, msg = self._parse_RDL_RD_RGN_list(rcs)
if label is None:
return None, None, "failed: RD rgn label exclusive none {0}".format(msg)
elif label == "":
return None, None, "failed: RD rgn label exclusive empty {0}".format(msg)
label = "!{0}".format(label)
break
rcsp = re.match(r"^[(].*$", rcs)
if rcsp is not None:
label, rcs, msg = self._parse_RDL_RD_RGN_list(rcs)
if label is None:
return None, None, "failed: RD rgn label inclusive none {0}".format(msg)
elif label == "":
return None, None, "failed: RD rgn label inclusive empty {0}".format(msg)
break
rcsp = re.match(r"^\*.*$", rcs)
if rcsp is not None:
label, rcs, msg = self._parse_RDL_RD_RGN_asterik(rcs)
if label is None:
return None, None, "failed: RD rgn label asterik none {0}".format(msg)
elif label == "":
return None, None, "failed: RD rgn label asterik empty {0}".format(msg)
break
rcsp = re.match(r"^[\w].*$", rcs)
if rcsp is not None:
label, rcs, msg = self._parse_RDL_RD_RGN_name(rcs)
if label is None:
return None, None, "failed: RD rgn label name none {0}".format(msg)
elif label == "":
return None, None, "failed: RD rgn label name empty {0}".format(msg)
break
return None, None, "failed: RD rgn label invalid"
return label, rcs, "success"
"""
Parse an RGN
"""
def _parse_RDL_RD_RGN(self, rcs):
if rcs is None:
return None, None, "failed: RD rgn rcs none"
elif rcs == "":
return None, None, "failed: RD rgn rcs empty"
region, rcs, msg = self._parse_RDL_RD_RGN_label(rcs)
if region is None:
return None, None, "failed: RD rgn region none {0}".format(msg)
elif region == "":
return None, None, "failed: RD rgn region empty {0}".format(msg)
elif rcs is None:
return None, None, "failed: RD rgn region rcs none {0}".format(msg)
elif rcs == "":
return None, None, "failed: RD rgn region rcs empty {0}".format(msg)
rcsp = re.match(r"^\.(.+)$", rcs)
if rcsp is None:
return None, None, "failed: RD rgn region rcsp none ."
elif rcsp.group(1) is None:
return None, None, "failed: RD rgn region rcsp.g1 none ."
elif rcsp.group(1) == "":
return None, None, "failed: RD rgn region rcsp.g1 empty ."
rcs = rcsp.group(1)
group, rcs, msg = self._parse_RDL_RD_RGN_label(rcs)
if group is None:
return None, None, "failed: RD rgn group none {0}".format(msg)
elif group == "":
return None, None, "failed: RD rgn group empty {0}".format(msg)
elif rcs is None:
return None, None, "failed: RD rgn group rcs none {0}".format(msg)
elif rcs == "":
return None, None, "failed: RD rgn group rcs empty {0}".format(msg)
rcsp = re.match(r"^\.(.+)$", rcs)
if rcsp is None:
return None, None, "failed: RD rgn group rcsp none ."
elif rcsp.group(1) is None:
return None, None, "failed: RD rgn group rcsp.g1 none ."
elif rcsp.group(1) == "":
return None, None, "failed: RD rgn group rcsp.g1 empty ."
rcs = rcsp.group(1)
name, rcs, msg = self._parse_RDL_RD_RGN_label(rcs)
if name is None:
return None, None, "failed: RD rgn name none {0}".format(msg)
elif name == "":
return None, None, "failed: RD rgn name empty {0}".format(msg)
RGN = ("RGN", "{0}.{1}.{2}".format(region, group, name))
return RGN, rcs, "success"
"""
parse the RDL component of the RCS:
RDL :: PIdevice@[<RD><RD_LIST>*]
RD :: name | SDN | RGN | FQN
RD_LIST :: , <RD>
"""
def _parse_RDL(self, rcs):
if rcs is None:
return None, None, "failed: rcs is none"
elif rcs == "":
return None, None, "failed: rcs is empty"
rcsp = re.match("^PIdevice@(.*)$", rcs)
if rcsp is None:
return None, rcs, "failure: invalid keyword"
RDL_all = ("RGN", "all.all.all")
RDL = []
if rcsp.group(1) is None:
return RDL_all, None, "success: all (none)"
elif rcsp.group(1) == "":
return RDL_all, None, "success: all (empty)"
rcs = rcsp.group(1)
while True:
if rcs is None:
break
elif rcs == "":
break
while True:
rcsp = re.match(r"^all\..+$", rcs)
if rcsp is not None:
if rcsp.group(0) == "":
return None, None, "failure: RGN (all-none)"
RD, rcs, msg = self._parse_RDL_RD_RGN(rcs)
if RD is None:
return None, None, "failure: RGN (all-obj) {0}".format(msg)
elif len(RD) != 2:
return None, None, "failure: RGN (all len != 2) {0}".format(msg)
RDL.append(RD)
break
rcsp = re.match(r"^all(.*)$", rcs)
if rcsp is not None:
if rcsp.group(0) == "":
return None, None, "failure: all (empty)"
RDL.append(RDL_all)
rcs = rcsp.group(1)
break
rcsp = re.match(r"^\^.*$", rcs)
if rcsp is not None:
if rcsp.group(0) == "":
return None, None, "failure: SD (none)"
RD, rcs, msg = self._parse_RDL_RD_SDN(rcs)
if RD is None:
return None, None, "failure: SD (empty) {0}".format(msg)
elif len(RD) != 2:
return None, None, "failure: SD (len != 2) {0}".format(msg)
RDL.append(RD)
break
rcsp = re.match(r"^[<].*$", rcs)
if rcsp is not None:
if rcsp.group(0) == "":
return None, None, "failure: FQN (empty)"
RD, rcs, msg = self._parse_RDL_RD_FQN(rcs)
if RD is None:
return None, None, "failure: FQN (obj) {0}".format(msg)
elif len(RD) != 2:
return None, None, "failure: FQN (len != 2) {0}".format(msg)
RDL.append(RD)
break
rcsp = re.match(r"^[!].*$", rcs)
if rcsp is not None:
if rcsp.group(0) == "":
return None, None, "failure: RGN (exclusive-none)"
RD, rcs, msg = self._parse_RDL_RD_RGN(rcs)
if RD is None:
return None, None, "failure: RGN (exclusive-obj) {0}".format(msg)
elif len(RD) != 2:
return None, None, "failure: RGN (exclusive len != 2) {0}".format(msg)
RDL.append(RD)
break
rcsp = re.match(r"^[(].*$", rcs)
if rcsp is not None:
if rcsp.group(0) == "":
return None, None, "failure: RGN (inclusive-none)"
RD, rcs, msg = self._parse_RDL_RD_RGN(rcs)
if RD is None:
return None, None, "failure: RGN (inclusive-none) {0}".format(msg)
elif len(RD) != 2:
return None, None, "failure: RGN (inclusive len != 2) {0}".format(msg)
RDL.append(RD)
break
rcsp = re.match(r"^\*\..*$", rcs)
if rcsp is not None:
if rcsp.group(0) == "":
return None, None, "failure: RGN (asterik-none)"
RD, rcs, msg = self._parse_RDL_RD_RGN(rcs)
if RD is None:
return None, None, "failure: RGN (asterik-none) {0}".format(msg)
elif len(RD) != 2:
return None, None, "failure: RGN (asterik len != 2) {0}".format(msg)
RDL.append(RD)
break
rcsp = re.match(r"^\*(.*)$", rcs)
if rcsp is not None:
if rcsp.group(0) == "":
return None, None, "failure: asterik (empty)"
RDL.append(RDL_all)
rcs = rcsp.group(1)
break
rcsp = re.match(r"^[\w].*$", rcs)
if rcsp is None:
return None, None, "failure: name should be there"
rcsp = re.match(r"^\w[\w-]*\..*$", rcs)
if rcsp is not None:
if rcsp.group(0) == "":
return None, None, "failure: name RGN (none)"
RD, rcs, msg = self._parse_RDL_RD_RGN(rcs)
if RD is None:
return None, None, "failure: RGN name (obj) {0}".format(msg)
elif len(RD) != 2:
return None, None, "failure: RGN name (len != 2) {0}".format(msg)
RDL.append(RD)
break
RD, rcs, msg = self._parse_RDL_RD_name(rcs)
if RD is None:
return None, None, "failure: NAME (obj) {0}".format(msg)
elif len(RD) != 2:
return None, None, "failure: NAME (len != 2) {0}".format(msg)
RDL.append(RD)
break
if rcs is None:
break
elif rcs == "":
break
rcsp = re.match("^,(.*)$", rcs)
if rcsp is None:
return None, None, "failure: RDL , obj (none)"
elif rcsp.group(1) is None:
return None, None, "failure: RDL , (none)"
elif rcsp.group(1) == "":
return None, None, "failure: RDL , (empty)"
rcs = rcsp.group(1)
if rcs is not None:
if rcs != "":
return None, "", "failure: RCS ended-!empty"
if len(RDL) <= 0:
return None, None, "failure: RDL empty"
return RDL, None, "success: {0}".format(len(RDL))
"""
parse the RET component of the RCS:
RET :: Remediation@<ret><ret_list>
ret :: drop() | alert(...) | redirect(...) | serviceChain(...)
SDN_LIST :: , <ret>
"""
def _parse_RET(self, rcs):
if rcs is None:
return None, None, "failure: RET RCS none"
elif rcs == "":
return None, None, "failure: RET RCS empty"
rcsp = re.match("^Remediation@(.+)$", rcs)
if rcsp is None:
rcs = "Remediation@drop()${0}".format(rcs)
rcsp = re.match("^Remediation@(.+)$", rcs)
if rcsp is None:
return None, rcs, "failure: RET failed insert drop()"
if rcsp.group(1) is None:
return None, None, "failuure: RET obj none"
elif rcsp.group(1) == "":
return None, None, "failure: RET obj empty"
rcs = rcsp.group(1)
RET = []
rcsp = re.match(r"^\$(.+)$", rcs)
if rcsp is not None:
rcs = "drop(){0}".format(rcs)
while True:
rcsp = re.match(r"(\w[\w]*)([(].+\$.+)$", rcs)
if rcsp is None:
return None, None, "failure: RET obj type none"
elif rcsp.group(1) is None:
return None, None, "failure: RET type none"
elif rcsp.group(1) == "":
return None, None, "failure: RET type empty"
RET_parse_func = self.RET_functions.get(rcsp.group(1))
if RET_parse_func is None:
return None, None, "failure: RET type not found"
rcs = rcsp.group(2)
if rcs is None:
return None, None, "failure: RET RCS none"
elif rcs == "":
return None, None, "failure: RET RCS empty"
"""
call the RET type parser to create
its PI object.
"""
obj, rcs = RET_parse_func(rcs)
if obj is None:
return None, None, "failure: RET func obj none"
elif len(obj) < 1:
return None, None, "failure: RET func obj 1 or greater"
elif rcs is None:
return None, None, "failure: RET func RCS none"
elif rcs == "":
return None, None, "failure: RET func RCS empty"
RET.append(obj)
rcsp = re.match(r"^\$(.+)$", rcs)
if rcsp is not None:
break
rcsp = re.match(r"^,(\w[\w]*[(].+\$.+)$", rcs)
if rcsp is None:
return None, None, "failure: RET next obj none"
elif rcsp.group(1) is None:
return None, None, "failure: RET next none"
elif rcsp.group(1) == "":
return None, None, "failure: RET next empty"
rcs = rcsp.group(1)
if rcs is None:
return None, None, "failure: RET end RCS none"
elif rcs == "":
return None, None, "failure: RET end RCS empty"
rcsp = re.match(r"^\$(.+)$", rcs)
if rcsp is None:
return None, None, "failure: RET # remove none"
elif rcsp.group(1) is None:
return None, None, "failure: RET # remove group none"
elif rcsp.group(1) == "":
return None, None, "failure: RET # remove group empty"
rcs = rcsp.group(1)
if len(RET) <= 0:
return None, None, "failure: RET list empty"
return RET, rcs, "success: {0}".format(len(RET))
"""
parse the SDL component of the RCS:
SDL :: securityDomain@<SDN><SDN_LIST>*
SDN :: <a-zA-Z0-9_><a-zA-Z0-9_>*
SDN_LIST :: , <SDN>
"""
def _parse_SDL(self, rcs):
if rcs is None:
return None, None, "failure: RCS is none"
elif rcs == "":
return None, None, "failure: RCS is empty"
rcsp = re.match("^securityDomain@(.+)$", rcs)
if rcsp is None:
SDL = ["all"]
return SDL, rcs, "success: 1"
if rcsp.group(1) is None:
return None, None, "failure: none"
elif rcsp.group(1) == "":
return None, None, "failure: empty"
rcs = rcsp.group(1)
SDL = []
while True:
rcsp = re.match(r"(\w[\w-]*)(.*\$.+)$", rcs)
if rcsp is None:
return None, None, "failure: bad SDN"
elif rcsp.group(1) is None:
return None, None, "failure: SDN none"
elif rcsp.group(1) == "":
return None, None, "failure: SDN empty"
SDL.append(rcsp.group(1))
rcs = rcsp.group(2)
if rcs is None:
return None, None, "failure: SDN no more RCS none"
elif rcs == "":
return None, None, "failure: SDN no more RCS empty"
rcsp = re.match(r"^\$(.+)$", rcs)
if rcsp is not None:
break
rcsp = re.match(r"^,(\w[\w-]*.*\$.+)$", rcs)
if rcsp is None:
return None, None, "failure: SDN obj advance none"
elif rcsp.group(1) is None:
return None, None, "failure: SDN advance none"
elif rcsp.group(1) == "":
return None, None, "failure: SDN advance empty"
rcs = rcsp.group(1)
if rcs is None:
return None, None, "failure: SDL RCS none"
elif rcs == "":
return None, None, "failure: SDL RCS empty"
rcsp = re.match(r"^\$(.+)$", rcs)
if rcsp is None:
return None, None, "failure: SDL # remove none"
elif rcsp.group(1) is None:
return None, None, "failure: SDL # remove group none"
elif rcsp.group(1) == "":
return None, None, "failure: SDL # remove group empty"
rcs = rcsp.group(1)
if len(SDL) <= 0:
return None, None, "failure: SDL list empty"
return SDL, rcs, "success: {0}".format(len(SDL))
"""
Parse out the components of the RCS: [SDL] | [RET] | RDL
and return all three. If the optional component is
not found then it returns None but if keyword is
found nothing else then returns empty.
The fourth result returned is if there is remaining characters
in the original RCS then its returned.
Returns info in fifth result
"""
def _parse(self, rcs):
if rcs is None:
return None, None, None, None, "failed: RCS is none"
elif rcs == "":
return None, None, None, None, "failed: RCS is empty"
rcsp = re.search(" ", rcs)
if rcsp is not None:
return None, None, None, None, "failed: space character found in RCS"
SDL, rcs_next, msg = self._parse_SDL(rcs)
if SDL is not None:
if len(SDL) <= 0:
return None, None, None, None, "failed: SDL returned but is empty (msg={0})".format(msg)
if rcs_next is None:
return SDL, None, None, None, "failed: RCS invalid parse after SDL (none) (msg={0})".format(msg)
elif rcs_next == "":
return SDL, None, None, None, "failed: RCS invalid parse after SDL (empty) (msg={0})".format(msg)
RET, rcs_next, msg = self._parse_RET(rcs_next)
if RET is None:
return SDL, None, None, None, "failed: RET is none (msg={0})".format(msg)
elif len(RET) <= 0:
return SDL, None, None, None, "failed: RET is empty (msg={0})".format(msg)
elif rcs_next is None:
return SDL, RET, None, None, "failed: RCS invalid parse after RET (none) (msg={0})".format(msg)
elif rcs_next == "":
return SDL, RET, None, None, "failed: RCS invalid parse after RET (none) (msg={0})".format(msg)
RDL, rcs_next, msg = self._parse_RDL(rcs_next)
if RDL is None:
return SDL, RET, None, None, "failed: RDL is none (msg={0})".format(msg)
elif len(RDL) <= 0:
return SDL, RET, None, None, "failed: RDL is empty (msg={0})".format(msg)
elif rcs_next is not None:
if rcs_next != "":
return SDL, RET, RDL, None, "failed: RCS invalid parse after RDL (not empty) (msg={0})".format(msg)
return SDL, RET, RDL, rcs_next, "success"
"""
Returns true if the RCS provided at object instantiation
time is a valid RCS value, otherwise it returns false.
"""
def _valid(self, rcs):
if rcs is None:
return False
# rcs_save = rcs
SDL, RET, RDL, rcs, rmsg = self._parse(rcs)
if RDL is None:
# print("ARIA: remediation configuraton string (RCS) is invalid -- this will prevent remediation
# to ARIA PI devices from working (rcs={0}:: rmsg={1})".format(rcs_save, rmsg))
return False
# print("ARIA: remediation configuraton string (RCS) is valid (rcs={0})".format(rcs_save))
return True
"""
Returns true if the RCS provided at object instantiation
time is a valid RCS value, otherwise it returns false.
"""
def valid(self):
if not self._valid(self.rcs):
return False
return True
"""
Allows setting the RCS string to act on, this will only
set it if the string is already empty. Otherwise it
should use modify.
"""
def set(self, rcs):
if self.rcs is None:
if not self._valid(rcs):
return False
self.rcs = rcs
else:
return False
return True
"""
Allows changing the RCS string after its been previsouly
set or not.
"""
def modify(self, rcs):
if not self._valid(rcs):
return False
self.rcs = rcs
return True
"""
Assuming the securtiy domain component of the RCS is valid
then it returns the parsed out security domain component if
it exists. It will return it as a list of security domain
object name strings.
If there is an error in parsing the security domain component
null is returned. If it was not provided then "all"
list is returned.
"""
def security_domain(self):
SDL, RET, RDL, rcs, rmsg = self._parse(self.rcs)
if RDL is None:
return None, False
return SDL, True
"""
Assuming the RCS is valid
then return the RDL
If there is an error in parsing the RDL component is
returned as a NULL.
"""
def remediation_device_list(self):
SDL, RET, RDL, rcs, rmsg = self._parse(self.rcs)
if RDL is None:
return None, False
return RDL, True
"""
Assuming the securtiy domain component of the RCS is valid
Assuming the RCS is valid
then return the remediation action instruction.
If there is an error in parsing the RET component is
returned as a NULL.
"""
def remediation_instruction(self):
SDL, RET, RDL, rcs, rmsg = self._parse(self.rcs)
if RDL is None:
return None, False
return RET, True
class ARIA(object):
def __init__(self, sdso_url: str, verify_cert: bool = True):
self.sdso_url = sdso_url
self.time_out = 20
self.verify_cert = verify_cert
"""HELPER FUNCTION"""
@staticmethod
def _build_alert_instruction(transport_type: str, tti_index: int, aio_index: int,
trigger_type: str, trigger_value: int) -> str:
""" Create an alert instruction
Args:
transport_type: The type of notification to generate.
Valid values are 'email', 'SMS', 'syslog' or 'webhook'.
tti_index: The index of the entry in the transport type table.
aio_index: The index of the entry in the alert information object table.
trigger_type: The frequency of the alert. Valid values are 'one-shot', 're-trigger-count',
're-trigger-timed-ms' or 're-trigger-timed-sec'.
trigger_value: The threshold that must be met before the alert is triggered.
Returns: Alert instruction string.
Raises:
ValueError: If parameters are out of range or not in the type list.
"""
transport_type_list = ['email', 'SMS', 'syslog', 'webhook']
if transport_type not in transport_type_list:
raise ValueError(f'Wrong transport_type {transport_type}! Valid values are email, SMS, syslog or webhook')
if tti_index > 7 or tti_index < 0:
# This is an ARIA PI Reaper production requirement
raise ValueError('Transport type info index(tti_index) out of range! '
'Valid value must be in the range [0, 7].')
if aio_index > 15 or aio_index < 0:
# This is an ARIA PI Reaper production requirement
raise ValueError('Alert info object index(aio_index) out of range! '
'Valid value must be in range [0, 15]')
trigger_type_list = ['one-shot', 're-trigger-count', 're-trigger-timed-ms', 're-trigger-timed-sec']
if trigger_type not in trigger_type_list:
# This is an ARIA PI Reaper production requirement
raise ValueError(f'Wrong trigger_type {trigger_type}! Valid values are one-shot, re-trigger-count, '
're-trigger-timed-ms, re-trigger-timed-sec')
if trigger_value < 1 or trigger_value > 8191:
# This is an ARIA PI Reaper production requirement
raise ValueError('Trigger value(trigger_value) out of range! It must be in range [1, 8191]')
instruction = f'ALERT {transport_type} {tti_index} {aio_index} {trigger_type} {trigger_value}'
return instruction
@staticmethod
def _process_port_range(port_range: str = None) -> str:
""" Validation function for range of ports
Args:
port_range: The source or destination port(s). This accepts a
comma-separated list (e.g., “1, 3”), a range (e.g., “1-3”), or a combination (e.g., “1, 3-5”).
Returns: The string of port_range.
Raises:
ValueError: If port_range is out of range 0-65535 or in wrong format.
"""
if not port_range:
port_range = '0-65535' # default port_range value
split_port_range = port_range.replace(' ', '').split(',')
res = ''
for port in split_port_range:
if res:
res = res + ', '
if '-' in port:
beg, end = port.replace(' ', '').split('-')
for j in beg, end:
if int(j) < 0 or int(j) > 65535:
raise ValueError('Port must be in 0-65535!')
if int(beg) > int(end):
raise ValueError('Wrong port range format!')
res += beg + ' - ' + end
else:
if int(port) < 0 or int(port) > 65535:
raise ValueError('Port must be in 0-65535!')
res += port
return res
@staticmethod
def _process_ip_address(ip: str) -> str:
""" Validation function for IP address
Args:
ip: The IP address and mask of the IP address, in the format <IP_address>/<mask>. If the mask is omitted,
a value of 32 is used.
Returns: String of IP address.
Raises:
ValueError: If the netmask is out of range or IP address is not expressed in CIDR notation
"""
netmask = '32'
ip_str = ip.replace(' ', '')
if '/' in ip_str:
ip_addr, netmask = ip_str.split('/')
else:
ip_addr = ip_str
if int(netmask) > 32 or int(netmask) < 1:
raise ValueError('Subnet mask must be in range [1, 32].')
ip_addr_split = ip_addr.split('.')
for syllable in ip_addr_split:
if int(syllable) < 0 or int(syllable) > 255:
raise ValueError('Wrong IP format!')
if len(ip_addr_split) != 4:
raise ValueError('Wrong IP format!')
res = ip_addr + '/' + netmask
return res
@staticmethod
def _parse_rcs(rcs):
""" Parse Remediation Configuration String
Args:
rcs: Remediation Configuration String.
Returns:
sd_list: List of securityDomain Object
sia_list: List of securityDomain SIA Object
Raises:
ParameterError: Raised when Input RCS is not valid.
"""
rcs = RCS(rcs)
if not rcs.valid():
raise ParameterError('Your Input RCS is not valid!')
sd_list_tuple, sd_list_valid = rcs.security_domain()
sd_list = []
if sd_list_valid:
for element in sd_list_tuple:
sd_list.append({"SDN": element})
sia_list_tuple, sia_list_valid = rcs.remediation_device_list()
sia_list = []
sd_list = [{"SDN": "all"}]
if sia_list_valid:
for element in sia_list_tuple:
sia_object = {
'sia_specification_type': element[0],
'sia_specification': element[1]
}
sia_list.append(sia_object)
return sd_list, sia_list
@staticmethod
def _generate_rule_forward_spec(rule_name: str, logic_block: str, rule: str, named_rule_action: str, sd_list: list,
sia_list: list, instance_id: str = None) -> dict:
""" Generate rule forward spec for ruleforward API
Args:
rule_name: The name of the rule to create.
logic_block: Parameter used to form named rule data. Examples: '5-tuple', 'src-port', etc.
rule: Parameter used to form named rule data.
named_rule_action: Must be 'add' or 'remove'
instance_id: The instance number of the ARIA PI instance.
sd_list: List of security domain object.
sia_list: List of security domain sia object.
Returns: Dictionary data of named rule.
"""
instance_id_type = 'instance-number'
if instance_id is None:
instance_id_type = 'all'
instance_id = ''
if named_rule_action == 'remove':
rule = ''
named_rule = f'\"name\": \"{rule_name}\", \"logic_block\": \"{logic_block}\", \"rule\": \"{rule}\"'
named_rule_distribution = {
'kind': 'NamedRuleDistribution',
'instance_id': instance_id,
'instance_id_type': instance_id_type,
'named_rule': named_rule,
'named_rule_action': named_rule_action,
'sd_list': sd_list,
'sia_list': sia_list
}
rule_forward_spec = {
'selector': named_rule_distribution
}
return rule_forward_spec
def _wait_for_trid(self, trid: str) -> bool:
""" Valid whether the request completed by trid
Args:
trid: The request id when you want to adding a rule to ARIA PI Reaper.
Returns: True if complete, False if not.
"""
# url to valid the request
trid_url = self.sdso_url + f'/packetClassification/completion/transaction?PC_TRID={trid}'
# Use trid of transaction to get if a transaction success
t0 = time.perf_counter()
delta = time.perf_counter() - t0
while delta < 20:
res = requests.get(trid_url, timeout=self.time_out, verify=self.verify_cert)
delta = time.perf_counter() - t0
if res.ok:
try:
tcl_list = res.json().get('tclList')
except json.JSONDecodeError:
raise
for tcl_entry in tcl_list:
if 'SUCCESS' in tcl_entry['status']:
return True
elif 'FAILURE' in tcl_entry['status']:
return False
time.sleep(1)
return False
def _remove_rule(self, rule_name: str, logic_block: str, instance_id: str = None, rcs: str = None) -> dict:
""" Remove rule in the ARIA PI Reaper
Args:
rule_name: The name of the rule to create.
logic_block: Parameter used to form named rule data. Examples: '5-tuple', 'src-port', etc.
instance_id: The instance number of the ARIA PI instance.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
url = self.sdso_url + '/ruleForward'
headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
sd_list, sia_list = self._parse_rcs(rcs)
data = self._generate_rule_forward_spec(rule_name=rule_name, logic_block=logic_block, rule='no-rule',
named_rule_action='remove', instance_id=instance_id,
sd_list=sd_list, sia_list=sia_list)
try:
response = requests.put(url, data=json.dumps(data), headers=headers, timeout=self.time_out,
verify=self.verify_cert)
except requests.exceptions.RequestException:
raise
command_state_str = 'Failure'
response_timestamp = None
ep_res = None
if response and response.ok:
response_json = response.json()
endpoints = response_json.get('endpoints')
if not endpoints or len(endpoints) == 0:
command_state_str = 'Endpoint matching RCS not found!'
else:
command_state_str = 'Success'
for ep in endpoints:
trid = ep.get('trid')
status = self._wait_for_trid(str(trid))
ep['completion'] = status
if not status:
command_state_str = 'Failure'
response_timestamp = response_json.get('timestamp')
ep_res = endpoints
context = {
'Rule': {
'Name': rule_name,
'Definition': f'Remove {rule_name}',
'RCS': rcs
},
'Status': {
'command_state': command_state_str,
'timestamp': response_timestamp
},
'Endpoints': ep_res
}
return context
def _do_request(self, data: dict, rule_name: str, rule: str, rcs: str = None) -> dict:
""" Send a request to ARIA PI Reaper to create a rule
Args:
data: Rule Forward Spec data.
rule_name: Name of the rule.
rule: String representation of rule.
Returns: Dictionary context data contains useful response information.
"""
url = self.sdso_url + '/ruleForward'
headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
data['selector']['instance_id_type'] = 'instance-number'
data['selector']['instance_id'] = '0'
instance_number = 10 # 10 total instances in ARIA PI Reaper
command_state_str = 'Failure'
response_timestamp = None
endpoints = None
try:
response = requests.put(url=url, data=json.dumps(data), headers=headers, timeout=self.time_out,
verify=self.verify_cert)
except requests.exceptions.RequestException:
raise
failed_endpoints_index = []
success_endpoints_index = []
if response and response.ok:
response_json = response.json()
endpoints = response_json.get('endpoints')
response_timestamp = response_json.get('timestamp')
if endpoints and len(endpoints) > 0:
for ep_index, ep in enumerate(endpoints):
trid = ep.get('trid')
status = self._wait_for_trid(str(trid))
# Add completion and instance_number in ep field
ep['instance_number'] = '0'
ep['completion'] = status
if status:
success_endpoints_index.append(ep_index)
else:
failed_endpoints_index.append(ep_index)
# no endpoints matches
if len(failed_endpoints_index) == 0 and len(success_endpoints_index) == 0:
command_state_str = "Endpoint matching RCS not found!"
# rules are created successfully on all endpoints
elif len(success_endpoints_index) > 0 and len(failed_endpoints_index) == 0:
command_state_str = "Success"
# rules are not created successfully on part or all endpoints, should try to forward rules on
# different instance for the failed endpoints
else:
# forward rule to each endpoints by AgentFQN
command_state_str = "Success"
for ep_index in failed_endpoints_index:
ep = endpoints[ep_index]
AgentFQN = ep.get('AgentFQN')
temp_forward_data = data.copy()
sia_object = {
'sia_specification_type': 'FQN',
'sia_specification': AgentFQN
}
temp_forward_data['selector']['sia_list'] = [sia_object]
ep_state = False
for i in range(1, instance_number):
data['selector']['instance_id'] = str(i)
try:
ep_response = requests.put(url=url, data=json.dumps(temp_forward_data), headers=headers,
timeout=self.time_out, verify=self.verify_cert)
ep_response_json = ep_response.json()
if ep_response_json.get('endpoints'):
cur_ep = ep_response_json.get('endpoints')[0]
cur_trid = cur_ep.get('trid')
cur_state = self._wait_for_trid(str(cur_trid))
if cur_state:
ep_state = True
break
except requests.exceptions.RequestException:
pass
if not ep_state:
command_state_str = 'Failure'
ep['completion'] = ep_state
ep['instance_number'] = i if ep_state else None
context = {
'Rule': {
'Name': rule_name,
'Definition': rule,
'RCS': rcs
},
'Status': {
'command_state': command_state_str,
'timestamp': response_timestamp
},
'Endpoints': endpoints
}
return context
"""SOAR API"""
def block_conversation(self, src_ip: str, target_ip: str, rule_name: str, src_port: str = None,
target_port: str = None, protocol: str = None, rcs: str = None) -> dict:
""" Creates a rule that drops all packets matching the specified 5-tuple values.
Args:
src_ip: The source IP address.
target_ip: The destination IP address.
rule_name: The name of the rule to create.
src_port: The source port(s). This accepts a comma-separated list (e.g., “1, 3”),
a range (e.g., “1-3”), or a combination (e.g., “1, 3-5”).
target_port: The destination port(s). This accepts a comma-separated list (e.g., “1, 3”),
a range (e.g., “1-3”), or a combination (e.g., “1, 3-5”).
protocol: The protocol (e.g., TCP) used for the packets.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
src_ip = self._process_ip_address(src_ip)
src_port = self._process_port_range(src_port)
target_ip = self._process_ip_address(target_ip)
target_port = self._process_port_range(target_port)
if not protocol:
protocol = 'HOPOPT-255' # default protocol is no value provided
protocol = protocol.upper()
rule = f'{target_ip} @ {target_port} & {src_ip} @ {src_port} <> {protocol} : DROP, END'
sd_list, sia_list = self._parse_rcs(rcs)
data = self._generate_rule_forward_spec(rule_name=rule_name, logic_block='5-tuple', rule=rule,
named_rule_action='add', sd_list=sd_list, sia_list=sia_list)
return self._do_request(data, rule_name, rule, rcs)
def unblock_conversation(self, rule_name: str, rcs: str = None) -> dict:
""" Deletes a named rule from the 5-tuple logic block.
This allows the previously blocked conversation to resume.
Args:
rule_name: The name of the rule to delete.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
return self._remove_rule(rule_name=rule_name, logic_block='5-tuple', instance_id=None, rcs=rcs)
def record_conversation(self, src_ip: str, target_ip: str, vlan_id: str, rule_name: str, src_port: str = None,
target_port: str = None, protocol: str = None, sia_interface: str = None,
transport_type: str = None, tti_index: str = None, aio_index: str = None,
trigger_type: str = None, trigger_value: str = None, rcs: str = None) -> dict:
""" Creates a rule that redirects a conversation matching 5-tuple values
to the Packet Recorder and generates an alert.
Packets are tagged with the VID specified in the command.
Args:
src_ip: The source IP address.
target_ip: The destination IP address.
vlan_id: The VLAN ID your network switch uses to forward packets to the Packet Recorder.
rule_name: The name of the rule to create.
src_port: The source port(s). This accepts a comma-separated list (e.g., “1, 3”),
a range (e.g., “1-3”), or a combination (e.g., “1, 3-5”).
target_port: The destination port(s). This accepts a comma-separated list (e.g., “1, 3”),
a range (e.g., “1-3”), or a combination (e.g., “1, 3-5”).
protocol: The protocol (e.g., TCP) used for the packets.
sia_interface: The letter of the interface on the SIA used for forwarding packets.
If omitted, interface A is used.
transport_type: The type of notification to generate. Valid values are: email, syslog.
tti_index: The index of the entry in the transport type table.
aio_index: The index of the entry in the alert information object table.
trigger_type: The frequency of the alert. Valid values are 'one-shot', 're-trigger-count',
're-trigger-timed-ms' or 're-trigger-timed-sec'.
trigger_value: The threshold that must be met before the alert is triggered.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
Raises:
ParameterError: Raised when transport_type is used but one or more parameters in tti_index,
aio_index, trigger_type and trigger_value are missing.
"""
if sia_interface is None or sia_interface != 'B':
sia_interface = 'A' # SIA use labels A and B to select its interface (data port), default to A.
src_ip = self._process_ip_address(src_ip)
src_port = self._process_port_range(src_port)
target_ip = self._process_ip_address(target_ip)
target_port = self._process_port_range(target_port)
if not protocol:
protocol = 'HOPOPT-255'
protocol = protocol.upper()
rule = f'{target_ip} @ {target_port} & {src_ip} @ {src_port} <> {protocol} : ' \
f'REDIRECT-VLAN {sia_interface} {vlan_id}'
if transport_type is not None:
if tti_index is None or aio_index is None or trigger_type is None or trigger_value is None:
raise ParameterError(f'Please provide tti_index, aio_index, trigger_type and trigger_value to '
f'use {transport_type} to send an alert.')
rule += ', '
rule += self._build_alert_instruction(transport_type, int(tti_index), int(aio_index),
trigger_type, int(trigger_value))
rule += ', END'
sd_list, sia_list = self._parse_rcs(rcs)
data = self._generate_rule_forward_spec(rule_name=rule_name, logic_block='5-tuple', rule=rule,
named_rule_action='add', sd_list=sd_list, sia_list=sia_list)
return self._do_request(data, rule_name, rule, rcs)
def stop_recording_conversation(self, rule_name: str, rcs: str = None) -> dict:
""" Removes the named rule from the 5-tuple block.
This stops redirecting traffic to the Packet Recorder.
Args:
rule_name: The name of the rule to delete.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
return self._remove_rule(rule_name=rule_name, logic_block='5-tuple', instance_id=None, rcs=rcs)
def alert_conversation(self, src_ip: str, target_ip: str, rule_name: str, transport_type: str, tti_index: str,
aio_index: str, trigger_type: str, trigger_value: str, src_port: str = None,
target_port: str = None, protocol: str = None, rcs: str = None) -> dict:
""" Adds a rule that generates an alert when a conversation matching the specified 5-tuple values is detected.
Args:
src_ip: The source IP address.
target_ip: The destination IP address.
rule_name: The name of the rule to create.
transport_type: The type of notification to generate. Valid values are: email, syslog.
tti_index: The index of the entry in the transport type table.
aio_index: The index of the entry in the alert information object table.
trigger_type: The frequency of the alert. Valid values are 'one-shot', 're-trigger-count',
're-trigger-timed-ms' or 're-trigger-timed-sec'.
trigger_value: The threshold that must be met before the alert is triggered.
src_port: The source port(s). This accepts a comma-separated list (e.g., “1, 3”), a range (e.g., “1-3”),
or a combination (e.g., “1, 3-5”).
target_port: The destination port(s). This accepts a comma-separated list (e.g., “1, 3”),
a range (e.g., “1-3”), or a combination (e.g., “1, 3-5”).
protocol: The protocol (e.g., TCP) used for the packets.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
src_ip = self._process_ip_address(src_ip)
src_port = self._process_port_range(src_port)
target_ip = self._process_ip_address(target_ip)
target_port = self._process_port_range(target_port)
if not protocol:
protocol = 'HOPOPT-255' # default protocol
protocol = protocol.upper()
rule = f'{target_ip} @ {target_port} & {src_ip} @ {src_port} <> {protocol} : '
rule += self._build_alert_instruction(transport_type, int(tti_index), int(aio_index),
trigger_type, int(trigger_value)) + ', END'
sd_list, sia_list = self._parse_rcs(rcs)
data = self._generate_rule_forward_spec(rule_name=rule_name, logic_block='5-tuple', rule=rule,
named_rule_action='add', sd_list=sd_list, sia_list=sia_list)
return self._do_request(data, rule_name, rule, rcs)
def mute_alert_conversation(self, rule_name: str, rcs: str = None) -> dict:
""" Removes a named rule from the 5-tuple logic block, disabling the alerts.
Args:
rule_name: The name of the rule to delete.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
return self._remove_rule(rule_name=rule_name, logic_block='5-tuple', instance_id=None, rcs=rcs)
def block_dest_port(self, port_range: str, rule_name: str, rcs: str) -> dict:
""" Creates a rule that blocks packets destined for one or more specific ports.
Args:
port_range: The destination port(s). This accepts a comma-separated list (e.g., “1, 3”),
a range (e.g., “1-3”), or a combination (e.g., “1, 3-5”).
rule_name: The name of the rule to create.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
rule = f'{self._process_port_range(port_range)}: DROP, END'
sd_list, sia_list = self._parse_rcs(rcs)
data = self._generate_rule_forward_spec(rule_name=rule_name, logic_block='dst-port', rule=rule,
named_rule_action='add', sd_list=sd_list, sia_list=sia_list)
return self._do_request(data, rule_name, rule, rcs)
def unblock_dest_port(self, rule_name: str, rcs: str = None) -> dict:
""" Removes a named rule from the destination port logic block.
This allows the previously blocked traffic to resume.
Args:
rule_name: The name of the rule to delete.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
return self._remove_rule(rule_name=rule_name, logic_block='dst-port', instance_id=None, rcs=rcs)
def record_dest_port(self, port_range: str, vlan_id: str, rule_name: str, sia_interface: str = None,
transport_type: str = None, tti_index: str = None, aio_index: str = None,
trigger_type: str = None, trigger_value: str = None, rcs: str = None) -> dict:
""" Adds a rule that redirects traffic destined for one or more ports to the Packet Recorder
and generates an alert.
Packets are tagged with the VID specified in the command.
Args:
port_range: The destination port(s). This accepts a comma-separated list (e.g., “1, 3”),
a range (e.g., “1-3”), or a combination (e.g., “1, 3-5”).
vlan_id: The VLAN ID your network switch uses to forward packets to the Packet Recorder.
rule_name: The name of the rule to create.
sia_interface: The letter of the interface on the SIA used for forwarding packets.
If omitted, interface A is used.
transport_type: The type of notification to generate. Valid values are: email, syslog.
tti_index: The index of the entry in the transport type table.
aio_index: The index of the entry in the alert information object table.
trigger_type: The frequency of the alert. Valid values are 'one-shot', 're-trigger-count',
're-trigger-timed-ms' or 're-trigger-timed-sec'.
trigger_value: The threshold that must be met before the alert is triggered.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
Raises:
ParameterError: Raised when transport_type is used but one or more parameters in tti_index,
aio_index, trigger_type and trigger_value are missing.
"""
if sia_interface is None or sia_interface != 'B':
sia_interface = 'A' # SIA use labels A and B to select its interface (data port), default to A.
rule = f'{self._process_port_range(port_range)}: REDIRECT-VLAN {sia_interface} {vlan_id}'
if transport_type is not None:
if tti_index is None or aio_index is None or trigger_type is None or trigger_value is None:
raise ParameterError(f'Please provide tti_index, aio_index, trigger_type and trigger_value '
f'to use {transport_type} to send an alert.')
rule += ', '
rule += self._build_alert_instruction(transport_type, int(tti_index), int(aio_index),
trigger_type, int(trigger_value))
rule += ', END'
sd_list, sia_list = self._parse_rcs(rcs)
data = self._generate_rule_forward_spec(rule_name=rule_name, logic_block='dst-port', rule=rule,
named_rule_action='add', sd_list=sd_list, sia_list=sia_list)
return self._do_request(data, rule_name, rule, rcs)
def stop_recording_dest_port(self, rule_name: str, rcs: str = None) -> dict:
""" Removes a named rule from the destination port logic block.
This stops redirecting traffic to the Packet Recorder.
Args:
rule_name: The name of the rule to delete.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
return self._remove_rule(rule_name=rule_name, logic_block='dst-port', instance_id=None, rcs=rcs)
def alert_dest_port(self, port_range: str, rule_name: str, transport_type: str, tti_index: str, aio_index: str,
trigger_type: str, trigger_value: str, rcs: str = None) -> dict:
""" Creates a rule that generates an alert when traffic destined for one or more ports is detected.
Args:
port_range: The destination port(s). This accepts a comma-separated list (e.g., “1, 3”),
a range (e.g., “1-3”), or a combination (e.g., “1, 3-5”).
rule_name: The name of the rule to create.
transport_type: The type of notification to generate. Valid values are: email, syslog.
tti_index: The index of the entry in the transport type table.
aio_index: The index of the entry in the alert information object table.
trigger_type: The frequency of the alert. Valid values are 'one-shot', 're-trigger-count',
're-trigger-timed-ms' or 're-trigger-timed-sec'.
trigger_value: The threshold that must be met before the alert is triggered.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
rule = f'{self._process_port_range(port_range)}: '
rule += self._build_alert_instruction(transport_type, int(tti_index), int(aio_index), trigger_type,
int(trigger_value)) + ', END'
sd_list, sia_list = self._parse_rcs(rcs)
data = self._generate_rule_forward_spec(rule_name=rule_name, logic_block='dst-port', rule=rule,
named_rule_action='add', sd_list=sd_list, sia_list=sia_list)
return self._do_request(data, rule_name, rule, rcs)
def mute_alert_dest_port(self, rule_name: str, rcs: str = None) -> dict:
""" Removes a named rule from the destination port logic block, disabling the alerts.
Args:
rule_name: The name of the rule to delete.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
return self._remove_rule(rule_name=rule_name, logic_block='dst-port', instance_id=None, rcs=rcs)
def block_src_port(self, port_range: str, rule_name: str, rcs: str = None) -> dict:
""" Adds a rule that blocks packets originating from one or more specific ports.
Args:
port_range: The source port(s). This accepts a comma-separated list (e.g., “1, 3”),
a range (e.g., “1-3”), or a combination (e.g., “1, 3-5”).
rule_name: The name of the rule to create.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
rule = f'{self._process_port_range(port_range)}: DROP, END'
sd_list, sia_list = self._parse_rcs(rcs)
data = self._generate_rule_forward_spec(rule_name=rule_name, logic_block='src-port', rule=rule,
named_rule_action='add', sd_list=sd_list, sia_list=sia_list)
return self._do_request(data, rule_name, rule, rcs)
def unblock_src_port(self, rule_name: str, rcs: str = None) -> dict:
""" Removes a named rule from the source port logic block.
This allows the previously blocked traffic to resume.
Args:
rule_name: The name of the rule to delete.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
return self._remove_rule(rule_name=rule_name, logic_block='src-port', instance_id=None, rcs=rcs)
def record_src_port(self, port_range: str, vlan_id: str, rule_name: str, sia_interface: str = None,
transport_type: str = None, tti_index: str = None, aio_index: str = None,
trigger_type: str = None, trigger_value: str = None, rcs: str = None) -> dict:
""" Adds a rule that redirects traffic originating from one or more ports to
the Packet Recorder and generates an alert.
Packets are tagged with the VID specified in the command.
Args:
port_range: The source port(s). This accepts a comma-separated list (e.g., “1, 3”),
a range (e.g., “1-3”), or a combination (e.g., “1, 3-5”).
vlan_id: The VLAN ID your network switch uses to forward packets to the Packet Recorder.
rule_name: The name of the rule to create.
sia_interface: The letter of the interface on the SIA used for forwarding packets.
If omitted, interface A is used.
transport_type: The type of notification to generate. Valid values are: email, syslog.
tti_index: The index of the entry in the transport type table.
aio_index: The index of the entry in the alert information object table.
trigger_type: The frequency of the alert. Valid values are 'one-shot', 're-trigger-count',
're-trigger-timed-ms' or 're-trigger-timed-sec'.
trigger_value: The threshold that must be met before the alert is triggered.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
Raises:
ParameterError: Raised when transport_type is used but one or more parameters in tti_index,
aio_index, trigger_type and trigger_value are missing.
"""
if sia_interface is None or sia_interface != 'B':
sia_interface = 'A' # SIA use labels A and B to select its interface (data port), default to A.
rule = f'{self._process_port_range(port_range)}: REDIRECT-VLAN {sia_interface} {vlan_id}'
if transport_type is not None:
if tti_index is None or aio_index is None or trigger_type is None or trigger_value is None:
raise ParameterError(f'Please provide tti_index, aio_index, trigger_type and trigger_value '
f'to use {transport_type} to send an alert.')
rule += ', '
rule += self._build_alert_instruction(transport_type, int(tti_index), int(aio_index), trigger_type,
int(trigger_value))
rule += ', END'
sd_list, sia_list = self._parse_rcs(rcs)
data = self._generate_rule_forward_spec(rule_name=rule_name, logic_block='src-port', rule=rule,
named_rule_action='add', sd_list=sd_list, sia_list=sia_list)
return self._do_request(data, rule_name, rule, rcs)
def stop_recording_src_port(self, rule_name: str, rcs: str = None):
""" Removes a named rule from the source port logic block.
This stops redirecting traffic to the Packet Recorder.
Args:
rule_name: The name of the rule to delete.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
return self._remove_rule(rule_name=rule_name, logic_block='src-port', instance_id=None, rcs=rcs)
def alert_src_port(self, port_range: str, rule_name: str, transport_type: str, tti_index: str, aio_index: str,
trigger_type: str, trigger_value: str, rcs: str = None) -> dict:
""" Creates a rule that generates an alert when traffic originating from one or more ports is detected.
Args:
port_range: The source port(s). This accepts a comma-separated list (e.g., “1, 3”),
a range (e.g., “1-3”), or a combination (e.g., “1, 3-5”).
rule_name: The name of the rule to create.
transport_type: The type of notification to generate. Valid values are: email, syslog.
tti_index: The index of the entry in the transport type table.
aio_index: The index of the entry in the alert information object table.
trigger_type: The frequency of the alert. Valid values are 'one-shot', 're-trigger-count',
're-trigger-timed-ms' or 're-trigger-timed-sec'.
trigger_value: The threshold that must be met before the alert is triggered.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
rule = f'{self._process_port_range(port_range)}: '
rule += self._build_alert_instruction(transport_type, int(tti_index), int(aio_index),
trigger_type, int(trigger_value)) + ', END'
sd_list, sia_list = self._parse_rcs(rcs)
data = self._generate_rule_forward_spec(rule_name=rule_name, logic_block='src-port', rule=rule,
named_rule_action='add', sd_list=sd_list, sia_list=sia_list)
return self._do_request(data, rule_name, rule, rcs)
def mute_alert_src_port(self, rule_name: str, rcs: str = None) -> dict:
""" Removes a named rule from the source port logic block, disabling the alerts.
Args:
rule_name: The name of the rule to delete.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
return self._remove_rule(rule_name=rule_name, logic_block='src-port', instance_id=None, rcs=rcs)
def block_dest_subnet(self, target_ip: str, rule_name: str, rcs: str = None) -> dict:
""" Adds a rule that blocks packets destined for a specific IP address or range of IP addresses.
Args:
target_ip: The IP address and mask of the destination IP address(es), in the format <IP_address>/<mask>.
If the mask is omitted, a value of 32 is used.
rule_name: The name of the rule to create.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
rule = f'{self._process_ip_address(target_ip)}: DROP, END'
sd_list, sia_list = self._parse_rcs(rcs)
data = self._generate_rule_forward_spec(rule_name=rule_name, logic_block='dst-subnet', rule=rule,
named_rule_action='add', sd_list=sd_list, sia_list=sia_list)
return self._do_request(data, rule_name, rule, rcs)
def unblock_dest_subnet(self, rule_name: str, rcs: str = None) -> dict:
""" Removes a named rule from the destination subnet logic block.
This allows the previously blocked traffic to resume.
Args:
rule_name: The name of the rule to delete.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
return self._remove_rule(rule_name=rule_name, logic_block='dst-subnet', instance_id=None, rcs=rcs)
def record_dest_subnet(self, target_ip: str, vlan_id: str, rule_name: str, sia_interface: str = None,
transport_type: str = None, tti_index: str = None, aio_index: str = None,
trigger_type: str = None, trigger_value: str = None, rcs: str = None) -> dict:
""" Creates a rule that redirects traffic destined for a specific IP address or
range of IP addresses to the Packet Recorder and generates an alert.
Packets are tagged with the VID specified in the command.
Args:
target_ip: The IP address and mask of the destination IP address(es), in the format <IP_address>/<mask>.
If the mask is omitted, a value of 32 is used.
vlan_id: The VLAN ID your network switch uses to forward packets to the Packet Recorder.
rule_name: The name of the rule to create.
sia_interface: The letter of the interface on the SIA used for forwarding packets.
If omitted, interface A is used.
transport_type: The type of notification to generate. Valid values are: email, syslog.
tti_index: The index of the entry in the transport type table.
aio_index: The index of the entry in the alert information object table.
trigger_type: The frequency of the alert. Valid values are 'one-shot', 're-trigger-count',
're-trigger-timed-ms' or 're-trigger-timed-sec'.
trigger_value: The threshold that must be met before the alert is triggered.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
Raises:
ParameterError: Raised when transport_type is used but one or more parameters in tti_index,
aio_index, trigger_type and trigger_value are missing.
"""
if sia_interface is None or sia_interface != 'B':
sia_interface = 'A' # SIA use labels A and B to select its interface (data port), default to A.
rule = f'{self._process_ip_address(target_ip)}: REDIRECT-VLAN {sia_interface} {vlan_id}'
if transport_type is not None:
if tti_index is None or aio_index is None or trigger_type is None or trigger_value is None:
raise ParameterError(f'Please provide tti_index, aio_index, trigger_type and trigger_value '
f'to use {transport_type} to send an alert.')
rule += ', '
rule += self._build_alert_instruction(transport_type, int(tti_index), int(aio_index),
trigger_type, int(trigger_value))
rule += ', END'
sd_list, sia_list = self._parse_rcs(rcs)
data = self._generate_rule_forward_spec(rule_name=rule_name, logic_block='dst-subnet', rule=rule,
named_rule_action='add', sd_list=sd_list, sia_list=sia_list)
return self._do_request(data, rule_name, rule, rcs)
def stop_recording_dest_subnet(self, rule_name: str, rcs: str = None) -> dict:
""" Removes a named rule from the destination subnet logic block.
This stops redirecting traffic to the Packet Recorder.
Args:
rule_name: The name of the rule to delete.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
return self._remove_rule(rule_name=rule_name, logic_block='dst-subnet', instance_id=None, rcs=rcs)
def alert_dest_subnet(self, target_ip: str, rule_name: str, transport_type: str, tti_index: str, aio_index: str,
trigger_type: str, trigger_value: str, rcs: str = None) -> dict:
""" Creates a rule that generates an alert when traffic destined for
a specific IP address or range of IP addresses is detected.
Args:
target_ip: The IP address and mask of the destination IP address(es), in the format <IP_address>/<mask>.
If the mask is omitted, a value of 32 is used.
rule_name: The name of the rule to create.
transport_type: The type of notification to generate. Valid values are: email, syslog.
tti_index: The index of the entry in the transport type table.
aio_index: The index of the entry in the alert information object table.
trigger_type: The frequency of the alert. Valid values are 'one-shot', 're-trigger-count',
're-trigger-timed-ms' or 're-trigger-timed-sec'.
trigger_value: The threshold that must be met before the alert is triggered.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
rule = f'{self._process_ip_address(target_ip)}: '
rule += self._build_alert_instruction(transport_type, int(tti_index), int(aio_index), trigger_type,
int(trigger_value)) + ', END'
sd_list, sia_list = self._parse_rcs(rcs)
data = self._generate_rule_forward_spec(rule_name=rule_name, logic_block='dst-subnet', rule=rule,
named_rule_action='add', sd_list=sd_list, sia_list=sia_list)
return self._do_request(data, rule_name, rule, rcs)
def mute_alert_dest_subnet(self, rule_name: str, rcs: str = None) -> dict:
""" Removes a named rule from the destination subnet logic block, disabling the alerts.
Args:
rule_name: The name of the rule to delete.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
return self._remove_rule(rule_name=rule_name, logic_block='dst-subnet', instance_id=None, rcs=rcs)
def block_src_subnet(self, src_ip: str, rule_name: str, rcs: str = None) -> dict:
""" Adds a rule that blocks packets originating from a specific IP address or range of IP addresses.
Args:
src_ip: The IP address and mask of the source IP address(es), in the format <IP_address>/<mask>.
If the mask is omitted, a value of 32 is used.
rule_name: The name of the rule to create.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
rule = f'{self._process_ip_address(src_ip)}: DROP, END'
sd_list, sia_list = self._parse_rcs(rcs)
data = self._generate_rule_forward_spec(rule_name=rule_name, logic_block='src-subnet', rule=rule,
named_rule_action='add', sd_list=sd_list, sia_list=sia_list)
return self._do_request(data, rule_name, rule, rcs)
def unblock_src_subnet(self, rule_name: str, rcs: str = None) -> dict:
""" Removes a named rule from the source subnet logic block.
This allows the previously blocked traffic to resume.
Args:
rule_name: The name of the rule to delete.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
return self._remove_rule(rule_name=rule_name, logic_block='src-subnet', instance_id=None, rcs=rcs)
def record_src_subnet(self, src_ip: str, vlan_id: str, rule_name: str, sia_interface: str = None,
transport_type: str = None, tti_index: str = None, aio_index: str = None,
trigger_type: str = None, trigger_value: str = None, rcs: str = None) -> dict:
""" Creates a rule that redirects traffic originating from one or more specific IP addresses
to the Packet Recorder and generates an alert.
Packets are tagged with the VID specified in the command.
Args:
src_ip: The IP address and mask of the source IP address(es), in the format <IP_address>/<mask>.
If the mask is omitted, a value of 32 is used.
vlan_id: The VLAN ID your network switch uses to forward packets to the Packet Recorder.
rule_name: The name of the rule to create.
sia_interface: The letter of the interface on the SIA used for forwarding packets.
If omitted, interface A is used.
transport_type: The type of notification to generate. Valid values are: email, syslog.
tti_index: The index of the entry in the transport type table.
aio_index: The index of the entry in the alert information object table.
trigger_type: The frequency of the alert. Valid values are 'one-shot', 're-trigger-count',
're-trigger-timed-ms' or 're-trigger-timed-sec'.
trigger_value: The threshold that must be met before the alert is triggered.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
Raises:
ParameterError: Raised when transport_type is used but one or more parameters in tti_index,
aio_index, trigger_type and trigger_value are missing.
"""
if sia_interface is None or sia_interface != 'B':
sia_interface = 'A' # SIA use labels A and B to select its interface (data port), default to A.
rule = f'{self._process_ip_address(src_ip)}: REDIRECT-VLAN {sia_interface} {vlan_id}'
if transport_type is not None:
if tti_index is None or aio_index is None or trigger_type is None or trigger_value is None:
raise ParameterError(f'Please provide tti_index, aio_index, trigger_type and trigger_value '
f'to use {transport_type} to send an alert.')
rule += ', '
rule += self._build_alert_instruction(transport_type, int(tti_index), int(aio_index),
trigger_type, int(trigger_value))
rule += ', END'
sd_list, sia_list = self._parse_rcs(rcs)
data = self._generate_rule_forward_spec(rule_name=rule_name, logic_block='src-subnet', rule=rule,
named_rule_action='add', sd_list=sd_list, sia_list=sia_list)
return self._do_request(data, rule_name, rule, rcs)
def stop_recording_src_subnet(self, rule_name: str, rcs: str = None) -> dict:
""" Removes a named rule from the source subnet logic block.
This stops redirecting traffic to the Packet Recorder.
Args:
rule_name: The name of the rule to delete.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
return self._remove_rule(rule_name=rule_name, logic_block='src-subnet', instance_id=None, rcs=rcs)
def alert_src_subnet(self, src_ip: str, rule_name: str, transport_type: str, tti_index: str, aio_index: str,
trigger_type: str, trigger_value: str, rcs: str = None) -> dict:
""" Adds a rule that generates an alert when traffic originating from a specific IP address
or range of IP addresses is detected.
Args:
src_ip: The IP address and mask of the source IP address(es), in the format <IP_address>/<mask>.
If the mask is omitted, a value of 32 is used.
rule_name: The name of the rule to create.
transport_type: The type of notification to generate. Valid values are: email, syslog.
tti_index: The index of the entry in the transport type table.
aio_index: The index of the entry in the alert information object table.
trigger_type: The frequency of the alert. Valid values are 'one-shot', 're-trigger-count',
're-trigger-timed-ms' or 're-trigger-timed-sec'.
trigger_value: The threshold that must be met before the alert is triggered.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
rule = f'{self._process_ip_address(src_ip)}: '
rule += self._build_alert_instruction(transport_type, int(tti_index), int(aio_index),
trigger_type, int(trigger_value)) + ', END'
sd_list, sia_list = self._parse_rcs(rcs)
data = self._generate_rule_forward_spec(rule_name=rule_name, logic_block='src-subnet', rule=rule,
named_rule_action='add', sd_list=sd_list, sia_list=sia_list)
return self._do_request(data, rule_name, rule, rcs)
def mute_alert_src_subnet(self, rule_name: str, rcs: str = None) -> dict:
""" Removes a named rule from the source subnet logic block, disabling the alerts.
Args:
rule_name: The name of the rule to delete.
rcs: Remediation Configuration String.
Returns: Dictionary context data contains useful response information.
"""
return self._remove_rule(rule_name=rule_name, logic_block='src-subnet', instance_id=None, rcs=rcs)
''' HELPER FUNCTIONS '''
def func_call(instance: ARIA, func_name: str, command_name: str, demisto_arguments: list, args: dict):
""" Helper function used to call different demisto command
Args:
instance: An ARIA instance.
func_name: Name of the functions in the ARIA class.
command_name: Related demisto command name.
demisto_arguments: List of arguments name in the right order.
args: Input of demisto arguments dict.
"""
arguments_value = []
for arg in demisto_arguments:
value = args.get(arg) # get values from demisto command
arguments_value.append(value)
context_entry = getattr(instance, func_name)(*tuple(arguments_value)) # get returned tuple
table_header = ['Rule', 'Status', 'Endpoints']
context_name = func_name.title().replace('_', '')
ec = {
f'Aria.{context_name}(val.name && val.name == obj.name)': context_entry
}
readable_output = tableToMarkdown(command_name, context_entry, table_header)
return readable_output, ec
''' COMMAND FUNCTION '''
def block_conversation_command(instance, args):
demisto_arguments = ['src_ip', 'target_ip', 'rule_name', 'src_port', 'target_port', 'protocol', 'rcs']
return func_call(instance, 'block_conversation', 'aria-block-conversation', demisto_arguments, args)
def unblock_conversation_command(instance, args):
demisto_arguments = ['rule_name', 'rcs']
return func_call(instance, 'unblock_conversation', 'aria-unblock-conversation', demisto_arguments, args)
def record_conversation_command(instance, args):
demisto_arguments = ['src_ip', 'target_ip', 'vlan_id', 'rule_name', 'src_port', 'target_port', 'protocol',
'sia_interface', 'transport_type', 'tti_index', 'aio_index', 'trigger_type', 'trigger_value', 'rcs']
return func_call(instance, 'record_conversation', 'aria-record-conversation', demisto_arguments, args)
def stop_recording_conversation_command(instance, args):
demisto_arguments = ['rule_name', 'rcs']
return func_call(instance, 'stop_recording_conversation', 'aria-stop-recording-conversation',
demisto_arguments, args)
def alert_conversation_command(instance, args):
demisto_arguments = ['src_ip', 'target_ip', 'rule_name', 'transport_type', 'tti_index', 'aio_index', 'trigger_type',
'trigger_value', 'src_port', 'target_port', 'protocol', 'rcs']
return func_call(instance, 'alert_conversation', 'aria-alert-conversation', demisto_arguments, args)
def mute_alert_conversation_command(instance, args):
demisto_arguments = ['rule_name', 'rcs']
return func_call(instance, 'mute_alert_conversation', 'aria-mute-alert-conversation', demisto_arguments, args)
def block_dest_port_command(instance, args):
demisto_arguments = ['port_range', 'rule_name', 'rcs']
return func_call(instance, 'block_dest_port', 'aria-block-dest-port', demisto_arguments, args)
def unblock_dest_port_command(instance, args):
demisto_arguments = ['rule_name', 'rcs']
return func_call(instance, 'unblock_dest_port', 'aria-unblock-dest-port', demisto_arguments, args)
def record_dest_port_command(instance, args):
demisto_arguments = ['port_range', 'vlan_id', 'rule_name', 'sia_interface', 'transport_type', 'tti_index',
'aio_index', 'trigger_type', 'trigger_value', 'rcs']
return func_call(instance, 'record_dest_port', 'aria-record-dest-port', demisto_arguments, args)
def stop_recording_dest_port_command(instance, args):
demisto_arguments = ['rule_name', 'rcs']
return func_call(instance, 'stop_recording_dest_port', 'aria-stop-recording-dest-port', demisto_arguments, args)
def alert_dest_port_command(instance, args):
demisto_arguments = ['port_range', 'rule_name', 'transport_type', 'tti_index', 'aio_index', 'trigger_type',
'trigger_value', 'rcs']
return func_call(instance, 'alert_dest_port', 'aria-alert-dest-port', demisto_arguments, args)
def mute_alert_dest_port_command(instance, args):
demisto_arguments = ['rule_name', 'rcs']
return func_call(instance, 'mute_alert_dest_port', 'aria-mute-alert-dest-port', demisto_arguments, args)
def block_src_port_command(instance, args):
demisto_arguments = ['port_range', 'rule_name', 'rcs']
return func_call(instance, 'block_src_port', 'aria-block-src-port', demisto_arguments, args)
def unblock_src_port_command(instance, args):
demisto_arguments = ['rule_name', 'rcs']
return func_call(instance, 'unblock_src_port', 'aria-unblock-src-port', demisto_arguments, args)
def record_src_port_command(instance, args):
demisto_arguments = ['port_range', 'vlan_id', 'rule_name', 'sia_interface', 'transport_type', 'tti_index',
'aio_index', 'trigger_type', 'trigger_value', 'rcs']
return func_call(instance, 'record_src_port', 'aria-record-src-port', demisto_arguments, args)
def stop_recording_src_port_command(instance, args):
demisto_arguments = ['rule_name', 'rcs']
return func_call(instance, 'stop_recording_src_port', 'aria-stop-recording-src-port', demisto_arguments, args)
def alert_src_port_command(instance, args):
demisto_arguments = ['port_range', 'rule_name', 'transport_type', 'tti_index', 'aio_index', 'trigger_type',
'trigger_value', 'rcs']
return func_call(instance, 'alert_src_port', 'aria-alert-src-port', demisto_arguments, args)
def mute_alert_src_port_command(instance, args):
demisto_arguments = ['rule_name', 'rcs']
return func_call(instance, 'mute_alert_src_port', 'aria-mute-alert-src-port', demisto_arguments, args)
def block_dest_subnet_command(instance, args):
demisto_arguments = ['target_ip', 'rule_name', 'rcs']
return func_call(instance, 'block_dest_subnet', 'aria-block-dest-subnet', demisto_arguments, args)
def unblock_dest_subnet_command(instance, args):
demisto_arguments = ['rule_name', 'rcs']
return func_call(instance, 'unblock_dest_subnet', 'aria-unblock-dest-subnet', demisto_arguments, args)
def record_dest_subnet_command(instance, args):
demisto_arguments = ['target_ip', 'vlan_id', 'rule_name', 'sia_interface', 'transport_type', 'tti_index',
'aio_index', 'trigger_type', 'trigger_value', 'rcs']
return func_call(instance, 'record_dest_subnet', 'aria-record-dest-subnet', demisto_arguments, args)
def stop_recording_dest_subnet_command(instance, args):
demisto_arguments = ['rule_name', 'rcs']
return func_call(instance, 'stop_recording_dest_subnet', 'aria-stop-recording-dest-subnet',
demisto_arguments, args)
def alert_dest_subnet_command(instance, args):
demisto_arguments = ['target_ip', 'rule_name', 'transport_type', 'tti_index', 'aio_index', 'trigger_type',
'trigger_value', 'rcs']
return func_call(instance, 'alert_dest_subnet', 'aria-alert-dest-subnet', demisto_arguments, args)
def mute_alert_dest_subnet_command(instance, args):
demisto_arguments = ['rule_name', 'rcs']
return func_call(instance, 'mute_alert_dest_subnet', 'aria-mute-alert-dest-subnet', demisto_arguments, args)
def block_src_subnet_command(instance, args):
demisto_arguments = ['src_ip', 'rule_name', 'rcs']
return func_call(instance, 'block_src_subnet', 'aria-block-src-subnet', demisto_arguments, args)
def unblock_src_subnet_command(instance, args):
demisto_arguments = ['rule_name', 'rcs']
return func_call(instance, 'unblock_src_subnet', 'aria-unblock-src-subnet', demisto_arguments, args)
def record_src_subnet_command(instance, args):
demisto_arguments = ['src_ip', 'vlan_id', 'rule_name', 'sia_interface', 'transport_type', 'tti_index', 'aio_index',
'trigger_type', 'trigger_value', 'rcs']
return func_call(instance, 'record_src_subnet', 'aria-record-src-subnet', demisto_arguments, args)
def stop_recording_src_subnet_command(instance, args):
demisto_arguments = ['rule_name', 'rcs']
return func_call(instance, 'stop_recording_src_subnet', 'aria-stop-recording-src-subnet', demisto_arguments, args)
def alert_src_subnet_command(instance, args):
demisto_arguments = ['src_ip', 'rule_name', 'transport_type', 'tti_index', 'aio_index', 'trigger_type',
'trigger_value', 'rcs']
return func_call(instance, 'alert_src_subnet', 'aria-alert-src-subnet', demisto_arguments, args)
def mute_alert_src_subnet_command(instance, args):
demisto_arguments = ['rule_name', 'rcs']
return func_call(instance, 'mute_alert_src_subnet', 'aria-mute-alert-src-subnet', demisto_arguments, args)
def main():
# disable insecure warnings
requests.packages.urllib3.disable_warnings()
# IP address or FQDN of your SDSo node
SDSO = demisto.params().get('sdso')
handle_proxy()
INSECURE = demisto.params().get('insecure', False)
verify_cert = not INSECURE
sdso_url = f'{SDSO}/Aria/SS/1.0.0/PacketIntelligence/server'
aria = ARIA(sdso_url, verify_cert)
commnds_dict = {
'aria-block-conversation': block_conversation_command,
'aria-unblock-conversation': unblock_conversation_command,
'aria-record-conversation': record_conversation_command,
'aria-stop-recording-conversation': stop_recording_conversation_command,
'aria-alert-conversation': alert_conversation_command,
'aria-mute-alert-conversation': mute_alert_conversation_command,
'aria-block-dest-port': block_dest_port_command,
'aria-unblock-dest-port': unblock_dest_port_command,
'aria-record-dest-port': record_dest_port_command,
'aria-stop-recording-dest-port': stop_recording_dest_port_command,
'aria-alert-dest-port': alert_dest_port_command,
'aria-mute-alert-dest-port': mute_alert_dest_port_command,
'aria-block-src-port': block_src_port_command,
'aria-unblock-src-port': unblock_src_port_command,
'aria-record-src-port': record_src_port_command,
'aria-stop-recording-src-port': stop_recording_src_port_command,
'aria-alert-src-port': alert_src_port_command,
'aria-mute-alert-src-port': mute_alert_src_port_command,
'aria-block-dest-subnet': block_dest_subnet_command,
'aria-unblock-dest-subnet': unblock_dest_subnet_command,
'aria-record-dest-subnet': record_dest_subnet_command,
'aria-stop-recording-dest-subnet': stop_recording_dest_subnet_command,
'aria-alert-dest-subnet': alert_dest_subnet_command,
'aria-mute-alert-dest-subnet': mute_alert_dest_subnet_command,
'aria-block-src-subnet': block_src_subnet_command,
'aria-unblock-src-subnet': unblock_src_subnet_command,
'aria-record-src-subnet': record_src_subnet_command,
'aria-stop-recording-src-subnet': stop_recording_src_subnet_command,
'aria-alert-src-subnet': alert_src_subnet_command,
'aria-mute-alert-src-subnet': mute_alert_src_subnet_command
}
command = demisto.command()
LOG('ARIA: command is %s' % (command,))
if demisto.command() == 'test-module':
# Test if the ARIA PI Reaper is ready
url = sdso_url + '/endPoint'
try:
res = requests.get(url, timeout=20, verify=verify_cert)
size = len(json.loads(res.text))
if res.ok and size != 0:
demisto.results('ok')
else:
return_error('Fail to Connect to SDSo or no PacketIntelligence Service!')
except (json.JSONDecodeError, requests.exceptions.RequestException):
return_error('Fail to Connect to SDSo or no PacketIntelligence Service!')
else:
cmd_func = commnds_dict.get(command)
if cmd_func is None:
raise NotImplementedError(f'Command "{command}" is not implemented.')
else:
readable_output, ec = cmd_func(aria, demisto.args())
context_entry = list(ec.values())[0]
LOG(json.dumps(ec))
if context_entry['Status']['command_state'] == 'Success':
return_outputs(readable_output, ec)
elif context_entry['Status']['command_state'] == 'Failure':
LOG.print_log()
return_error(f'One or more endpoint(s) fail to create/remove rules. Please see {context_entry}')
else:
return_error(f'Endpoint matching RCS not found! Please see {context_entry}')
# python2 uses __builtin__ python3 uses builtins
if __name__ == '__builtin__' or __name__ == 'builtins':
main()
| mit | c2a79ba65c4a8027a295e9f74464b405 | 40.972763 | 125 | 0.566309 | 3.806132 | false | false | false | false |
demisto/content | Packs/CommonScripts/Scripts/ProvidesCommand/ProvidesCommand_test.py | 2 | 1741 | import demistomock as demisto
import json
def executeCommand(name, args=None):
if name == 'demisto-api-get' and args and 'uri' in args and args['uri'] == "/settings/integration-commands":
file_name = 'TestData/integration_commands.json'
elif name == 'demisto-api-post' and args and 'uri' in args and args['uri'] == "/settings/integration/search":
file_name = 'TestData/integration_search.json'
else:
raise ValueError('Unimplemented command called: {}'.format(name))
with open(file_name, 'r') as f:
raw_data = f.read()
data = json.loads(raw_data)
return data
def test_main(mocker):
from ProvidesCommand import main
mocker.patch.object(demisto, 'executeCommand', side_effect=executeCommand)
mocker.patch.object(demisto, 'args', return_value={
'command': 'send-mail'
})
mocker.patch.object(demisto, 'results')
main()
assert demisto.results.call_count == 1
results = demisto.results.call_args
assert results[0][0] == 'EWS Mail Sender,Gmail,Mail Sender (Deprecated),Mail Sender (New)'
mocker.patch.object(demisto, 'args', return_value={
'command': 'send-mail',
'enabled': 'true'
})
mocker.patch.object(demisto, 'results')
main()
assert demisto.results.call_count == 1
results = demisto.results.call_args
assert results[0][0] == 'Mail Sender (New)'
mocker.patch.object(demisto, 'args', return_value={
'command': 'send-mail',
'enabled': 'false'
})
mocker.patch.object(demisto, 'results')
main()
assert demisto.results.call_count == 1
results = demisto.results.call_args
assert results[0][0] == 'EWS Mail Sender,Gmail,Mail Sender (Deprecated)'
| mit | f3ab3cdea9d19fcaf05aa0367bdd0e10 | 33.137255 | 113 | 0.649627 | 3.524291 | false | true | false | false |
demisto/content | Packs/Cryptocurrency/Scripts/CryptoCurrenciesFormat/CryptoCurrenciesFormat.py | 2 | 1206 | import demistomock as demisto
from hashlib import sha256
from CommonServerPython import * # noqa: E402 lgtm [py/polluting-import]
from typing import Union
DIGITS58 = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
def decode_base58(address, length) -> bytes:
n = 0
for char in address:
n = n * 58 + DIGITS58.index(char)
return n.to_bytes(length, 'big')
def verify_is_bitcoin(address) -> Union[bytes, bool]:
try:
bitcoin_bytes = decode_base58(address, 25)
'''Check if the last four bytes are equal to the
first four bytes of a double SHA-256 digest of the previous 21 bytes.
Source: https://rosettacode.org/wiki/Bitcoin/address_validation#Python '''
return bitcoin_bytes[-4:] == sha256(sha256(bitcoin_bytes[:-4]).digest()).digest()[:4]
except Exception:
return False
def main():
address_list = argToList(demisto.args().get('input'))
list_results = [f'bitcoin:{address}' if verify_is_bitcoin(address) else '' for address in address_list]
if list_results:
demisto.results(list_results)
else:
demisto.results('')
if __name__ in ('__main__', 'builtin', 'builtins'):
main()
| mit | 158d3eb39603925c4de1c392808478ee | 29.15 | 107 | 0.668325 | 3.610778 | false | false | false | false |
demisto/content | Packs/Base/Scripts/WordTokenizerV2/word_tokenizer_test.py | 2 | 3412 | # coding=utf-8
from collections import defaultdict
import demistomock
from CommonServerPython import *
def get_args():
args = defaultdict(lambda: "yes")
args['encoding'] = 'utf8'
args['removeNonEnglishWords'] = 'no'
args['hashWordWithSeed'] = "5381"
args['language'] = 'English'
return args
demistomock.args = get_args
from WordTokenizerV2 import remove_line_breaks, clean_html, tokenize_text, word_tokenize,\
remove_multiple_whitespaces, map_indices_to_words # noqa
def test_remove_line_breaks():
text = """this text
with line break
"""
assert remove_multiple_whitespaces(remove_line_breaks(text)) == "this text with line break"
def test_clean_html():
text = """<html>hello</html>"""
assert clean_html(text) == "hello"
def test_tokenize_text():
text = "test@demisto.com is 100 going to http://google.com bla bla"
assert "EMAIL_PATTERN NUMBER_PATTERN go URL_PATTERN bla bla" == tokenize_text(text)[0]
def test_word_tokenize():
text = "test@demisto.com is 100 going to http://google.com bla bla"
entry = word_tokenize(text)
assert "EMAIL_PATTERN NUMBER_PATTERN go URL_PATTERN bla bla" == entry['Contents']['tokenizedText']
assert "2074773130 1320446219 5863419 1810208405 193487380 193487380" == entry['Contents'][
'hashedTokenizedText']
def test_word_tokenize_words_to_tokens():
words = ["let\'s", "gonna", "ain't", "we'll", "shouldn't", "will\\won't"]
words_to_tokens = {w: tokenize_text(w)[0].split() for w in words}
tokenized_text, _, original_words_to_tokens, _ = tokenize_text(' '.join(words_to_tokens))
for w, tokens_list in words_to_tokens.items():
if w not in original_words_to_tokens:
continue
tokens_list_output = original_words_to_tokens[w]
assert all(t in tokens_list_output for t in tokens_list) and all(t in tokens_list for t in tokens_list_output)
def test_inclusion():
text = 'a aa aaa'
indices_to_words = map_indices_to_words(text)
assert indices_to_words == {0: 'a', 2: 'aa', 3: 'aa', 6: 'aaa', 7: 'aaa', 8: 'aaa'}
def test_multi_lang_tokenization_spacy(mocker):
input_sentences = {
'English': 'Lemon pie is one of the best desserts exist',
'German': "Zitronenkuchen ist eines der besten Desserts, die es gibt",
'French': "La tarte au citron est l'un des meilleurs desserts qui existent",
'Spanish': 'La tarta de limón es uno de los mejores postres que existen',
'Portuguese': "Torta de limão é uma das melhores sobremesas existentes",
'Italian': "La torta al limone è uno dei migliori dessert esistenti",
'Dutch': "Citroentaart is een van de beste desserts die er zijn"
}
for language, input in input_sentences.items():
mocker.patch.object(demisto, 'args', return_value={'language': language})
try:
res = word_tokenize(input)
except IOError:
continue
tokenized_res = res['Contents']['tokenizedText']
assert len(tokenized_res) > 0
def test_multi_lang_tokenization_basic_tokeniztion(mocker):
input_chinese_sentence = "你好,世界"
mocker.patch.object(demisto, 'args', return_value={'language': 'Other', 'tokenizationMethod': 'byLetters'})
res = word_tokenize(input_chinese_sentence)
tokenized_res = res['Contents']['tokenizedText']
assert len(tokenized_res) > 0
| mit | 8baf0378ab46646f6c309f2941e4e5fc | 36.340659 | 118 | 0.665097 | 3.230038 | false | true | false | false |
demisto/content | Packs/ProofpointServerProtection/Integrations/ProofpointProtectionServerV2/ProofpointProtectionServerV2.py | 2 | 19087 | from typing import Any, Dict, Union
import demistomock as demisto # noqa: F401
import urllib3
from CommonServerPython import * # noqa: F401
from dateparser import parse
from requests import Response
urllib3.disable_warnings()
class Client(BaseClient):
def health_check(self) -> Dict[str, str]:
return self._http_request(method='GET', url_suffix='/pss/health')
@logger
def smart_search_request(self,
action: Optional[str] = None,
from_: Optional[str] = None,
to: Optional[str] = None,
virus: Optional[str] = None,
env_from: Optional[str] = None,
env_rcpt: Optional[str] = None,
attach: Optional[str] = None,
qid: Optional[str] = None,
host: Optional[str] = None,
sid: Optional[str] = None,
subject: Optional[str] = None,
guid: Optional[str] = None,
hdr_mid: Optional[str] = None,
count: Optional[int] = 100,
) -> Dict[str, Union[str, List]]:
return self._http_request(
method='GET',
url_suffix='/pss/filter',
params={
'action': action,
'from': from_,
'to': to,
'virus': virus,
'env_from': env_from,
'env_rcpt': env_rcpt,
'attach': attach,
'qid': qid,
'host': host,
'sid': sid,
'subject': subject,
'guid': guid,
'hdr_mid': hdr_mid,
'count': count,
}
)
@logger
def list_quarantined_messages_request(self,
from_: Optional[str] = None,
rcpt: Optional[str] = None,
startdate: Optional[str] = None,
enddate: Optional[str] = None,
subject: Optional[str] = None,
folder: Optional[str] = None,
) -> Dict[str, Union[str, List]]:
return self._http_request(
method='GET',
url_suffix='/quarantine',
params={
'from': from_,
'rcpt': rcpt,
'startdate': startdate,
'enddate': enddate,
'subject': subject,
'folder': folder,
'dlpviolation': 'details',
'messagestatus': 't',
}
)
@logger
def quarantine_action_request(self,
action: str,
folder: str,
localguid: str,
scan: Optional[str] = None,
brandtemplate: Optional[str] = None,
securitypolicy: Optional[str] = None,
deletedfolder: Optional[str] = None,
targetfolder: Optional[str] = None,
subject: Optional[str] = None,
appendoldsubject: Optional[str] = None,
from_: Optional[str] = None,
headerfrom: Optional[str] = None,
to: Optional[str] = None,
comment: Optional[str] = None,
resp_type: str = 'json',
) -> Dict[str, str]:
return self._http_request(
method='POST',
url_suffix='/quarantine',
json_data={
'action': action,
'folder': folder,
'localguid': localguid,
'scan': scan,
'brandtemplate': brandtemplate,
'securitypolicy': securitypolicy,
'deletedfolder': deletedfolder,
'targetfolder': targetfolder,
'subject': subject,
'appendoldsubject': appendoldsubject,
'from': from_,
'headerfrom': headerfrom,
'to': to,
'comment': comment,
},
resp_type=resp_type,
)
@logger
def download_message_request(self,
guid: str,
) -> Response:
return self._http_request(
method='GET',
url_suffix='/quarantine',
params={
'guid': guid,
},
resp_type='response',
ok_codes=(200, 404),
)
@logger
def get_user(self, email_or_uid: str) -> Response:
return self._http_request(
method='GET',
url_suffix=f'/enduser/{email_or_uid}'
)
@logger
def create_user(self, email: str, fields: dict, attributes: dict) -> Response:
json_data = {'attributes': attributes}
json_data.update(fields)
return self._http_request(
method='POST',
url_suffix=f'/enduser/{email}',
json_data=json_data,
ok_codes=(200, 400)
)
@logger
def modify_user(self, email_or_uid: str, fields: dict, attributes: dict) -> Response:
json_data = {'attributes': attributes}
json_data.update(fields)
return self._http_request(
method='PUT',
url_suffix=f'/enduser/{email_or_uid}',
json_data=json_data
)
@logger
def delete_user(self, email_or_uid: str) -> Response:
return self._http_request(
method='DELETE',
url_suffix=f'/enduser/{email_or_uid}',
ok_codes=(200, 404)
)
def test_module(client: Client) -> str:
client.health_check() # test pss managed module
client.list_quarantined_messages_request(subject='Test') # test Quarantine managed module
return 'ok'
def smart_search(client: Client, args: Dict[str, Any]) -> CommandResults:
assert (start_time := parse(args.get('start_time', '24 hours'), settings={'RETURN_AS_TIMEZONE_AWARE': True})), \
f"Failed parsing start time: {args.get('start_time')}"
if end_time := args.get('end_time'):
assert (end_time := parse(end_time, settings={'RETURN_AS_TIMEZONE_AWARE': True})), \
f"Failed parsing start time: {end_time}"
end_time = end_time.strftime("%Y-%m-%dT%H:%M:%S%z")
result = client.smart_search_request(
action=args.get('action'),
from_=start_time.strftime("%Y-%m-%dT%H:%M:%S%z"),
to=end_time,
virus=args.get('virus'),
env_from=args.get('sender'),
env_rcpt=args.get('recipient'),
attach=args.get('attachment'),
qid=args.get('queue_id'),
host=args.get('host'),
sid=args.get('sid'),
subject=args.get('subject'),
guid=args.get('guid'),
hdr_mid=args.get('message_id'),
count=int(args.get('limit', 100)),
)
if isinstance(result, dict) and result.get('result'):
search_result = result.get('result')
command_results_args = {
'readable_output': tableToMarkdown(
'Proofpoint Protection Server Smart Search Results',
search_result,
['GUID', 'Date', 'Sender', 'Recipients', 'Subject', 'Final_Action'],
),
'outputs_prefix': 'Proofpoint.SmartSearch',
'outputs_key_field': 'GUID',
'outputs': search_result,
'raw_response': result,
}
else:
command_results_args = {'readable_output': 'No results found.'}
return CommandResults(**command_results_args)
def list_quarantined_messages(client: Client, args: Dict[str, Any]) -> CommandResults:
sender = args.get('sender')
recipient = args.get('recipient')
subject = args.get('subject')
if not any([sender, recipient, subject]):
raise ValueError('At least one of the following arguments must be specified: sender, recipient, subject.')
assert (start_time := parse(args.get('start_time', '24 hours'))), \
f"Failed parsing start time: {args.get('start_time')}"
assert (end_time := parse(args.get('end_time', 'now'))), f"Failed parsing end time: {args.get('end_time')}"
result = client.list_quarantined_messages_request(
from_=sender,
rcpt=recipient,
startdate=start_time.strftime('%Y-%m-%d %H:%M:%S'),
enddate=end_time.strftime('%Y-%m-%d %H:%M:%S'),
subject=subject,
folder=args.get('folder_name'),
)
if isinstance(result, dict) and result.get('records'):
records = result.get('records')
command_results_args = {
'readable_output': tableToMarkdown(
'Proofpoint Protection Server Quarantined Messages',
records,
['localguid', 'folder', 'spamscore', 'from', 'rcpts', 'date', 'subject', 'size', 'host_ip'],
),
'outputs_prefix': 'Proofpoint.QuarantinedMessage',
'outputs_key_field': 'guid',
'outputs': records,
'raw_response': result,
}
else:
command_results_args = {'readable_output': 'No results found.'}
return CommandResults(**command_results_args)
def release_message(client: Client, args: Dict[str, Any]) -> CommandResults:
result = str(client.quarantine_action_request(
action='release',
folder=args.get('folder_name'),
localguid=args.get('local_guid'),
deletedfolder=args.get('deleted_folder'),
scan='t' if args.get('scan') == 'true' else 'f',
brandtemplate=args.get('brand_template'),
securitypolicy=args.get('security_policy'),
resp_type='text',
))
return CommandResults(readable_output=result)
def resubmit_message(client: Client, args: Dict[str, Any]) -> CommandResults:
result = str(client.quarantine_action_request(
action='resubmit',
folder=args.get('folder_name'),
localguid=args.get('local_guid'),
resp_type='text',
))
return CommandResults(readable_output=result)
def forward_message(client: Client, args: Dict[str, Any]) -> CommandResults:
result = str(client.quarantine_action_request(
action='forward',
folder=args.get('folder_name'),
localguid=args.get('local_guid'),
to=args.get('recipient'),
deletedfolder=args.get('deleted_folder'),
subject=args.get('subject'),
appendoldsubject='t' if args.get('append_old_subject') == 'true' else 'f',
from_=args.get('sender'),
headerfrom=args.get('header_from'),
comment=args.get('comment'),
resp_type='text',
))
return CommandResults(readable_output=result)
def move_message(client: Client, args: Dict[str, Any]) -> CommandResults:
local_guid = args.get('local_guid')
result = client.quarantine_action_request(
action='move',
folder=args.get('folder_name'),
localguid=local_guid,
targetfolder=args.get('target_folder'),
)
if isinstance(result, dict):
return CommandResults(readable_output=result.get('status', f'Successfully moved message {local_guid}'))
raise RuntimeError(f'Message move action failed.\n{result}')
def delete_message(client: Client, args: Dict[str, Any]) -> CommandResults:
local_guid = args.get('local_guid')
result = client.quarantine_action_request(
action='delete',
folder=args.get('folder_name'),
localguid=local_guid,
deletedfolder=args.get('deleted_folder'),
)
if isinstance(result, dict):
return CommandResults(readable_output=result.get('status', f'Successfully deleted message {local_guid}'))
raise RuntimeError(f'Message delete action failed.\n{result}')
def download_message(client: Client, args: Dict[str, Any]) -> Union[CommandResults, Dict]:
guid = args.get('guid', '')
result = client.download_message_request(guid)
if result.status_code == 404:
return CommandResults(readable_output='No message found.')
return fileResult(guid + '.eml', result.content)
def get_user(client: Client, args: Dict[str, Any]) -> CommandResults:
email = args.get('email')
uid = args.get('uid')
if email or uid:
result = client.get_user(email or uid)
if isinstance(result, dict):
command_results_args = {
'readable_output': tableToMarkdown(
'Proofpoint Protection Server Users',
result,
['uid', 'email', 'firstname', 'lastname', 'created', 'lastmodified'],
),
'outputs_prefix': 'Proofpoint.User',
'outputs_key_field': 'email',
'outputs': result,
'raw_response': result,
}
else:
raise RuntimeError(f'Failed to get user.\n{result}')
else:
command_results_args = {
'readable_output': 'Please specify an email or uid'
}
return CommandResults(**command_results_args)
def create_user(client: Client, args: Dict[str, Any]) -> CommandResults:
email = args.get('email')
fields = json.loads(args.get('fields', '{}'))
attributes = json.loads(args.get('attributes', '{}'))
result = client.create_user(email, fields, attributes)
demisto.debug(f'result: {result}')
if isinstance(result, dict):
if result.get('status') == 400:
if result.get('errors', {}).get('invalidarguments', [])[0].get('error') == 'User already exists':
command_results_args: Dict[str, Any] = {
'readable_output': 'User already exists'
}
else:
raise RuntimeError(f'Failed to create user.\n{result}')
else:
command_results_args = {
'readable_output': tableToMarkdown(
'User created',
result,
['uid', 'email', 'firstname', 'lastname', 'created', 'lastmodified'],
),
'outputs_prefix': 'Proofpoint.User',
'outputs_key_field': 'email',
'outputs': result,
'raw_response': result
}
return CommandResults(**command_results_args)
else:
raise RuntimeError(f'Failed to create user.\n{result}')
def modify_user(client: Client, args: Dict[str, Any]) -> CommandResults:
email = args.get('email')
uid = args.get('uid')
fields = json.loads(args.get('fields', '{}'))
attributes = json.loads(args.get('attributes', '{}'))
if email or uid:
result = client.modify_user(email or uid, fields, attributes)
if isinstance(result, dict):
command_results_args: Dict[str, Any] = {
'readable_output': tableToMarkdown(
'Modified User',
result,
['uid', 'email', 'firstname', 'lastname', 'created', 'lastmodified'],
),
'outputs_prefix': 'Proofpoint.User',
'outputs_key_field': 'email',
'outputs': result,
'raw_response': result,
}
else:
raise RuntimeError(f'Failed to modify user.\n{result}')
else:
command_results_args = {
'readable_output': 'Please specify an email or uid'
}
return CommandResults(**command_results_args)
def delete_user(client: Client, args: Dict[str, Any]) -> CommandResults:
email = args.get('email')
uid = args.get('uid')
if email or uid:
result = client.delete_user(email or uid)
if isinstance(result, dict):
if result.get('status') == 404:
if result.get('errors', {}).get('invalidarguments', [])[0].get('error') == 'User not found':
command_results_args: Dict[str, Any] = {
'readable_output': 'User not found'
}
else:
raise RuntimeError(f'Failed to delete user.\n{result}')
else:
command_results_args = {
'readable_output': 'Deleted User',
'raw_response': result,
}
else:
raise RuntimeError(f'Failed to delete user.\n{result}')
else:
command_results_args = {
'readable_output': 'Please specify an email or uid'
}
return CommandResults(**command_results_args)
def main() -> None:
try:
command = demisto.command()
params = demisto.params()
handle_proxy()
client = Client(
base_url=urljoin(params['url'], '/rest/v1'),
auth=(params['credentials']['identifier'], params['credentials']['password']),
verify=not params.get('unsecure', False),
proxy=params.get('proxy', False),
)
commands = {
'proofpoint-pps-get-user': get_user,
'proofpoint-pps-create-user': create_user,
'proofpoint-pps-modify-user': modify_user,
'proofpoint-pps-delete-user': delete_user,
}
if command == 'test-module':
return_results(test_module(client))
elif command == 'proofpoint-pps-smart-search':
return_results(smart_search(client, demisto.args()))
elif command == 'proofpoint-pps-quarantine-messages-list':
return_results(list_quarantined_messages(client, demisto.args()))
elif command == 'proofpoint-pps-quarantine-message-release':
return_results(release_message(client, demisto.args()))
elif command == 'proofpoint-pps-quarantine-message-resubmit':
return_results(resubmit_message(client, demisto.args()))
elif command == 'proofpoint-pps-quarantine-message-forward':
return_results(forward_message(client, demisto.args()))
elif command == 'proofpoint-pps-quarantine-message-move':
return_results(move_message(client, demisto.args()))
elif command == 'proofpoint-pps-quarantine-message-delete':
return_results(delete_message(client, demisto.args()))
elif command == 'proofpoint-pps-quarantine-message-download':
return_results(download_message(client, demisto.args()))
elif command in commands:
return_results(commands[command](client, demisto.args()))
except Exception as e:
return_error(str(e), error=e)
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | 9fa1dd1847d8f1c1ef4a9607de5ae19f | 38.681913 | 116 | 0.52837 | 4.193102 | false | false | false | false |
demisto/content | Packs/Attlasian/Integrations/Attlasian_IAM/Attlasian_IAM.py | 2 | 8380 | import demistomock as demisto
from CommonServerPython import *
import traceback
# Disable insecure warnings
requests.packages.urllib3.disable_warnings()
'''CLIENT CLASS'''
class Client(BaseClient):
"""
Atlassian IAM Client class that implements logic to authenticate with Atlassian.
"""
def __init__(self, base_url, directory_id, headers, ok_codes=None, verify=True, proxy=False):
super().__init__(base_url, verify=verify, proxy=proxy, ok_codes=ok_codes, headers=headers)
self.directory_id = directory_id
def test(self):
uri = f'/scim/directory/{self.directory_id}/Users?count=1'
res = self._http_request(method='GET', url_suffix=uri)
return res
def get_user(self, filter_name, filter_value):
uri = f'/scim/directory/{self.directory_id}/Users'
query_params = {
'filter': f'{filter_name} eq "{filter_value}"'
}
if filter_name == 'id':
uri += f'/{filter_value}'
query_params = {}
res = self._http_request(
method='GET',
url_suffix=uri,
params=query_params
)
if res:
user_app_data = res.get('Resources')[0] if res.get('totalResults') == 1 else res
user_id = user_app_data.get('id')
is_active = user_app_data.get('active')
username = user_app_data.get('userName')
email = get_first_primary_email_by_scim_schema(user_app_data)
return IAMUserAppData(user_id, username, is_active, user_app_data, email)
return None
def create_user(self, user_data):
if isinstance(user_data.get('emails'), dict):
user_data['emails'] = [user_data['emails']]
uri = f'/scim/directory/{self.directory_id}/Users'
res = self._http_request(
method='POST',
url_suffix=uri,
json_data=user_data
)
user_app_data = res
user_id = user_app_data.get('id')
is_active = user_app_data.get('active')
username = user_app_data.get('userName')
email = get_first_primary_email_by_scim_schema(user_app_data)
return IAMUserAppData(user_id, username, is_active, user_app_data, email)
def update_user(self, user_id, user_data):
if isinstance(user_data.get('emails'), dict):
user_data['emails'] = [user_data['emails']]
uri = f'/scim/directory/{self.directory_id}/Users/{user_id}'
res = self._http_request(
method='PUT',
url_suffix=uri,
json_data=user_data
)
user_app_data = res
user_id = user_app_data.get('id')
is_active = user_app_data.get('active')
username = user_app_data.get('userName')
return IAMUserAppData(user_id, username, is_active, user_app_data)
def disable_user(self, user_id):
uri = f'/scim/directory/{self.directory_id}/Users/{user_id}'
res = self._http_request(
method='DELETE',
url_suffix=uri
)
user_app_data = res
user_id = user_app_data.get('id')
is_active = user_app_data.get('active')
username = user_app_data.get('userName')
return IAMUserAppData(user_id, username, is_active, user_app_data)
def get_app_fields(self):
app_fields = {}
uri = f'/scim/directory/{self.directory_id}/Schemas/urn:ietf:params:scim:schemas:core:2.0:User'
res = self._http_request(
method='GET',
url_suffix=uri
)
elements = res.get('attributes', [])
for elem in elements:
if elem.get('name'):
field_name = elem.get('name')
description = elem.get('description')
app_fields[field_name] = description
return app_fields
@staticmethod
def handle_exception(user_profile, e, action):
""" Handles failed responses from the application API by setting the User Profile object with the results.
Args:
user_profile (IAMUserProfile): The User Profile object.
e (Exception): The exception error. If DemistoException, holds the response json.
action (IAMActions): An enum represents the current action (get, update, create, etc).
"""
if e.__class__ is DemistoException and hasattr(e, 'res') and e.res is not None:
error_code = e.res.status_code
try:
resp = e.res.json()
error_message = resp.get('detail')
except ValueError:
error_message = str(e)
else:
error_code = ''
error_message = str(e)
if error_code == 204:
user_profile.set_result(action=action,
success=True,
details='The user was successfully disabled.')
user_profile.set_result(action=action,
success=False,
error_code=error_code,
error_message=f'{error_message}\n{traceback.format_exc()}')
demisto.error(traceback.format_exc())
'''COMMAND FUNCTIONS'''
def test_module(client):
client.test()
return_results('ok')
def get_mapping_fields(client: Client) -> GetMappingFieldsResponse:
""" Creates and returns a GetMappingFieldsResponse object of the user schema in the application
:param client: (Client) The integration Client object that implements a get_app_fields() method
:return: (GetMappingFieldsResponse) An object that represents the user schema
"""
app_fields = client.get_app_fields()
incident_type_scheme = SchemeTypeMapping(type_name=IAMUserProfile.DEFAULT_INCIDENT_TYPE)
for field, description in app_fields.items():
incident_type_scheme.add_field(field, description)
return GetMappingFieldsResponse([incident_type_scheme])
def main():
user_profile = None
params = demisto.params()
command = demisto.command()
args = demisto.args()
verify_certificate = not params.get('insecure', False)
proxy = params.get('proxy', False)
base_url = params.get('url')
if base_url[-1] != '/':
base_url += '/'
access_token = params.get('access_token')
directory_id = params.get('directory_id')
mapper_in = params.get('mapper_in')
mapper_out = params.get('mapper_out')
is_create_enabled = params.get("create_user_enabled")
is_disable_enabled = params.get("disable_user_enabled")
is_enable_enabled = params.get("enable_user_enabled")
is_update_enabled = demisto.params().get("update_user_enabled")
create_if_not_exists = demisto.params().get("create_if_not_exists")
iam_command = IAMCommand(is_create_enabled, is_enable_enabled, is_disable_enabled, is_update_enabled,
create_if_not_exists, mapper_in, mapper_out,
get_user_iam_attrs=['id', 'userName', 'emails'])
headers = {
'Content-Type': 'application/json',
'Accept': 'application/json',
'Authorization': f'Bearer {access_token}'
}
client = Client(
base_url=base_url,
directory_id=directory_id,
verify=verify_certificate,
proxy=proxy,
headers=headers,
ok_codes=(200, 201)
)
demisto.debug(f'Command being called is {command}')
if command == 'iam-get-user':
user_profile = iam_command.get_user(client, args)
elif command == 'iam-create-user':
user_profile = iam_command.create_user(client, args)
elif command == 'iam-update-user':
user_profile = iam_command.update_user(client, args)
elif command == 'iam-disable-user':
user_profile = iam_command.disable_user(client, args)
if user_profile:
# user_profile.return_outputs()
return_results(user_profile)
try:
if command == 'test-module':
test_module(client)
elif command == 'get-mapping-fields':
return_results(get_mapping_fields(client))
except Exception:
# For any other integration command exception, return an error
return_error(f'Failed to execute {command} command.')
from IAMApiModule import * # noqa: E402
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | 99b62aecfc75d037b9144a7507af345b | 33.628099 | 114 | 0.594391 | 3.833486 | false | false | false | false |
demisto/content | Packs/UnisysStealth/Integrations/UnisysStealth/UnisysStealth.py | 2 | 6075 | import json
import os
import demistomock as demisto # noqa: F401
import requests
from CommonServerPython import * # noqa: F401
from requests.auth import HTTPBasicAuth
# disable insecure warnings
requests.packages.urllib3.disable_warnings()
USERNAME = demisto.params().get('credentials')['identifier']
PASSWORD = demisto.params().get('credentials')['password']
SERVER_IP = demisto.params().get('server_ip')
PORT = demisto.params().get('port')
ISOLATION_ID = demisto.params()['isolation_id']
BASE_URL = f'https://{SERVER_IP}:{PORT}/uisStealth/EcoApi/v1'
HEADERS = {
'Accept': "application/json",
'Content-Type': "application/json",
}
VERIFY = demisto.params().get('insecure', False)
if not demisto.params().get('proxy', False):
os.environ.pop('HTTP_PROXY', '')
os.environ.pop('HTTPS_PROXY', '')
os.environ.pop('http_proxy', '')
os.environ.pop('https_proxy', '')
proxy = demisto.params().get('proxy', False)
def http_request(method, uri, data=None, **kwargs):
try:
requests.Request()
res = requests.request(
method=method,
url=f"{BASE_URL}{uri}",
verify=VERIFY,
data=data,
headers=HEADERS,
auth=HTTPBasicAuth(USERNAME, PASSWORD),
**kwargs
)
except requests.exceptions.Timeout:
raise DemistoException(
'HTTP Request to Stealth has timed out. Please try again')
except requests.exceptions.TooManyRedirects:
raise DemistoException('Invalid API Endpoint')
if res.status_code not in {200, 204}:
raise DemistoException(f'Error received {res.status_code} in API response')
# May need to change this to .content
return res
def test_module():
data = http_request(
method='GET',
uri='/role',
)
return data
def get_roles():
data = http_request(
method='GET',
uri='/role',
)
return data.json()
def isolate_machine(endpoint):
payload = {
"role": [
{
"id": ISOLATION_ID,
"endpoint": [
{
"name": endpoint
}
]
}
]
}
data = http_request(
'PUT',
uri='/role/isolate',
data=json.dumps(payload),
)
return data
def unisolate_machine(endpoint):
data = http_request(
method='DELETE',
uri=f'/role/isolate?hostname={endpoint}'
)
return data
def isolate_user(user):
payload = {
"role": [
{
"id": ISOLATION_ID,
"accounts": {
"user": [
{
"name": user
}
]
}
}
]
}
data = http_request(
'PUT',
'/role/isolate',
data=json.dumps(payload),
)
return data
def unisolate_user(user):
url_string = f'/role/isolate?username={user}'
data = http_request(
method='DELETE',
uri=url_string,
)
return data
def isolate_machine_and_user(endpoint, user):
payload = {
"role": [
{
"id": ISOLATION_ID,
"accounts": {
"user": [
{
"name": user
}
]
},
"endpoint": [
{
"name": endpoint
}
]
}
]
}
data = http_request(
method='PUT',
uri='/role/isolate',
data=json.dumps(payload)
)
return data
def unisolate_machine_and_user(endpoint, user):
data = http_request(
method='DELETE',
uri=f'/role/isolate?username={user}&hostname={endpoint}'
)
return data
if demisto.command() == 'test-module':
result = test_module()
demisto.results("ok")
elif demisto.command() == 'stealth-get-stealth-roles':
result = get_roles()
rows = [{"Name": role["name"], "ID": role["id"]} for role in result["role"]]
table = tableToMarkdown("Stealth Roles", rows)
return_outputs(readable_output=table, outputs={"Stealth": result}, raw_response=result)
elif demisto.command() == 'stealth-isolate-machine':
endpoint = demisto.args()['endpoint']
result = isolate_machine(endpoint)
return_outputs(readable_output=f"{endpoint} successfully isolated", outputs={"Stealth": {"isolate": endpoint}})
demisto.results(result)
elif demisto.command() == 'stealth-unisolate-machine':
endpoint = demisto.args()['endpoint']
result = unisolate_machine(endpoint)
return_outputs(readable_output=f"{endpoint} successfully unisolated", outputs={"Stealth": {"unisolate": endpoint}})
elif demisto.command() == 'stealth-isolate-user':
user = demisto.args()['user']
result = isolate_user(user)
return_outputs(readable_output=f"{user} successfully isolated", outputs={"Stealth": {"isolate": user}})
elif demisto.command() == 'stealth-unisolate-user':
user = demisto.args()['user']
result = unisolate_user(user)
return_outputs(readable_output=f"{user} successfully unisolated", outputs={"Stealth": {"unisolate": user}})
elif demisto.command() == 'stealth-isolate-machine-and-user':
endpoint = demisto.args()['endpoint']
user = demisto.args()['user']
result = isolate_machine_and_user(endpoint, user)
return_outputs(readable_output=f"{endpoint} and {user} successfully isolated",
outputs={"Stealth": {"isolate": [endpoint, user]}})
elif demisto.command() == 'stealth-unisolate-machine-and-user':
endpoint = demisto.args()['endpoint']
user = demisto.args()['user']
result = unisolate_machine_and_user(endpoint, user)
return_outputs(readable_output=f"{endpoint} and {user} successfully unisolated",
outputs={"Stealth": {"unisolate": [endpoint, user]}})
else:
demisto.results("Enter valid command")
| mit | 4b9d220442a4c1e51dcd8504ec86099e | 28.347826 | 119 | 0.564774 | 3.975785 | false | false | false | false |
demisto/content | Packs/CortexAttackSurfaceManagement/Scripts/GenerateASMReport/GenerateASMReport_test.py | 2 | 3260 | import json
import demistomock as demisto # noqa: F401
from CommonServerPython import EntryType
def util_load_json(path):
with open(path, mode="r") as f:
return json.loads(f.read())
def test_get_asm_args(mocker):
"""Tests get_asm_args helper function.
Given:
- Mock JSON that mimics Demisto.args() object.
When:
- Sending JSON to get_asm_args helper function.
Then:
- Checks the output of the helper function with the expected output.
"""
from GenerateASMReport import get_asm_args
args = util_load_json("test_data/args.json")
result = get_asm_args(args)
assert isinstance(result, dict)
assert result["asmdatacollection"] == [
{
"Answerer": "fake_user@domain.com",
"Options": "NoAutomatedRemediation",
"Selected": "File a ServiceNow Ticket",
"Timestamp": "1666033665586",
}
]
def test_color_for_severity(mocker):
"""Tests color_for_severity helper function.
Given:
- Mock severity (string).
When:
- Sending severity (string) to color_for_severity helper function.
Then:
- Checks the output of the helper function with the expected output.
"""
from GenerateASMReport import color_for_severity
result = color_for_severity("High")
assert result == "red"
def test_build_template(mocker):
"""Tests build_template command function.
Given:
- Mock current date and output from get_asm_args helper function.
When:
- Running the 'build_template' function.
Then:
- Checks the output of the command function with the expected output.
"""
from GenerateASMReport import build_template
date_result = [
{
"Contents": "2022-10-26T16:06:49.164Z",
}
]
mocker.patch.object(demisto, "executeCommand", return_value=date_result)
args = util_load_json("test_data/args.json")
result = build_template(args)
assert isinstance(result, list)
for item in result:
assert isinstance(item, dict)
assert result[1] == {
"type": "header",
"data": "ASM Investigation Summary Report",
"layout": {
"rowPos": 1,
"columnPos": 2,
"style": {
"textAlign": "center",
"fontSize": 28,
"color": "black",
"background-color": "white",
},
},
}
def test_build_report(mocker):
"""Tests build_report command function.
Given:
- Mock template list from build_template function and alert ID.
When:
- Running the 'build_report' function.
Then:
- Checks the output of the command function with the expected output.
"""
from GenerateASMReport import build_report
template = util_load_json("test_data/template.json")
sanepdf_raw = util_load_json("test_data/sanepdf_raw.json")
mocker.patch.object(demisto, "executeCommand", return_value=sanepdf_raw)
result = build_report(template, 1234)
assert isinstance(result, dict)
assert result["Type"] == EntryType.ENTRY_INFO_FILE
| mit | bbad3661e6383345ba513a422f80869f | 29.185185 | 81 | 0.598773 | 4.064838 | false | true | false | false |
demisto/content | Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.py | 2 | 63140 | import hashlib
import secrets
import string
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
from CoreIRApiModule import *
from itertools import zip_longest
# Disable insecure warnings
urllib3.disable_warnings()
TIME_FORMAT = "%Y-%m-%dT%H:%M:%S"
NONCE_LENGTH = 64
API_KEY_LENGTH = 128
INTEGRATION_CONTEXT_BRAND = 'PaloAltoNetworksXDR'
XDR_INCIDENT_TYPE_NAME = 'Cortex XDR Incident'
INTEGRATION_NAME = 'Cortex XDR - IR'
XDR_INCIDENT_FIELDS = {
"status": {"description": "Current status of the incident: \"new\",\"under_"
"investigation\",\"resolved_known_issue\","
"\"resolved_duplicate\",\"resolved_false_positive\","
"\"resolved_true_positive\",\"resolved_security_testing\",\"resolved_other\"",
"xsoar_field_name": 'xdrstatusv2'},
"assigned_user_mail": {"description": "Email address of the assigned user.",
'xsoar_field_name': "xdrassigneduseremail"},
"assigned_user_pretty_name": {"description": "Full name of the user assigned to the incident.",
"xsoar_field_name": "xdrassigneduserprettyname"},
"resolve_comment": {"description": "Comments entered by the user when the incident was resolved.",
"xsoar_field_name": "xdrresolvecomment"},
"manual_severity": {"description": "Incident severity assigned by the user. "
"This does not affect the calculated severity low medium high",
"xsoar_field_name": "severity"},
}
XSOAR_RESOLVED_STATUS_TO_XDR = {
'Other': 'resolved_other',
'Duplicate': 'resolved_duplicate',
'False Positive': 'resolved_false_positive',
'Resolved': 'resolved_true_positive',
}
MIRROR_DIRECTION = {
'None': None,
'Incoming': 'In',
'Outgoing': 'Out',
'Both': 'Both'
}
def convert_epoch_to_milli(timestamp):
if timestamp is None:
return None
if 9 < len(str(timestamp)) < 13:
timestamp = int(timestamp) * 1000
return int(timestamp)
def convert_datetime_to_epoch(the_time=0):
if the_time is None:
return None
try:
if isinstance(the_time, datetime):
return int(the_time.strftime('%s'))
except Exception as err:
demisto.debug(err)
return 0
def convert_datetime_to_epoch_millis(the_time=0):
return convert_epoch_to_milli(convert_datetime_to_epoch(the_time=the_time))
def generate_current_epoch_utc():
return convert_datetime_to_epoch_millis(datetime.now(timezone.utc))
def generate_key():
return "".join([secrets.choice(string.ascii_letters + string.digits) for _ in range(API_KEY_LENGTH)])
def create_auth(api_key):
nonce = "".join([secrets.choice(string.ascii_letters + string.digits) for _ in range(NONCE_LENGTH)])
timestamp = str(generate_current_epoch_utc()) # Get epoch time utc millis
hash_ = hashlib.sha256()
hash_.update((api_key + nonce + timestamp).encode("utf-8"))
return nonce, timestamp, hash_.hexdigest()
def clear_trailing_whitespace(res):
index = 0
while index < len(res):
for key, value in res[index].items():
if isinstance(value, str):
res[index][key] = value.rstrip()
index += 1
return res
def filter_and_save_unseen_incident(incidents: List, limit: int, number_of_already_filtered_incidents: int) -> List:
"""
Filters incidents that were seen already and saves the unseen incidents to LastRun object.
:param incidents: List of incident - must be list
:param limit: the maximum number of incident per fetch
:param number_of_already_filtered_incidents: number of incidents that were fetched already
:return: the filtered incidents.
"""
last_run_obj = demisto.getLastRun()
fetched_starred_incidents = last_run_obj.pop('fetched_starred_incidents', {})
filtered_incidents = []
for incident in incidents:
incident_id = incident.get('incident_id')
if incident_id in fetched_starred_incidents:
demisto.debug(f'incident (ID {incident_id}) was already fetched in the past.')
continue
fetched_starred_incidents[incident_id] = True
filtered_incidents.append(incident)
number_of_already_filtered_incidents += 1
if number_of_already_filtered_incidents >= limit:
break
last_run_obj['fetched_starred_incidents'] = fetched_starred_incidents
demisto.setLastRun(last_run_obj)
return filtered_incidents
class Client(CoreClient):
def test_module(self, first_fetch_time):
"""
Performs basic get request to get item samples
"""
last_one_day, _ = parse_date_range(first_fetch_time, TIME_FORMAT)
try:
self.get_incidents(lte_creation_time=last_one_day, limit=1)
except Exception as err:
if 'API request Unauthorized' in str(err):
# this error is received from the XDR server when the client clock is not in sync to the server
raise DemistoException(f'{str(err)} please validate that your both '
f'XSOAR and XDR server clocks are in sync')
else:
raise
def handle_fetch_starred_incidents(self, limit: int, page_number: int, request_data: dict) -> List:
"""
handles pagination and filter of starred incidents that were fetched.
:param limit: the maximum number of incident per fetch
:param page_number: page number
:param request_data: the api call request data
:return: the filtered starred incidents.
"""
res = self._http_request(
method='POST',
url_suffix='/incidents/get_incidents/',
json_data={'request_data': request_data},
timeout=self.timeout
)
raw_incidents = res.get('reply', {}).get('incidents', [])
# we want to avoid duplications of starred incidents in the fetch-incident command (we fetch all incidents
# in the fetch window).
filtered_incidents = filter_and_save_unseen_incident(raw_incidents, limit, 0)
# we want to support pagination on starred incidents.
while len(filtered_incidents) < limit:
page_number += 1
search_from = page_number * limit
search_to = search_from + limit
request_data['search_from'] = search_from
request_data['search_to'] = search_to
res = self._http_request(
method='POST',
url_suffix='/incidents/get_incidents/',
json_data={'request_data': request_data},
timeout=self.timeout
)
raw_incidents = res.get('reply', {}).get('incidents', [])
if not raw_incidents:
break
filtered_incidents += filter_and_save_unseen_incident(raw_incidents, limit, len(filtered_incidents))
return filtered_incidents
def get_incidents(self, incident_id_list=None, lte_modification_time=None, gte_modification_time=None,
lte_creation_time=None, gte_creation_time=None, status=None, starred=None,
starred_incidents_fetch_window=None, sort_by_modification_time=None, sort_by_creation_time=None,
page_number=0, limit=100, gte_creation_time_milliseconds=0):
"""
Filters and returns incidents
:param incident_id_list: List of incident ids - must be list
:param lte_modification_time: string of time format "2019-12-31T23:59:00"
:param gte_modification_time: string of time format "2019-12-31T23:59:00"
:param lte_creation_time: string of time format "2019-12-31T23:59:00"
:param gte_creation_time: string of time format "2019-12-31T23:59:00"
:param starred_incidents_fetch_window: string of time format "2019-12-31T23:59:00"
:param starred: True if the incident is starred, else False
:param status: string of status
:param sort_by_modification_time: optional - enum (asc,desc)
:param sort_by_creation_time: optional - enum (asc,desc)
:param page_number: page number
:param limit: maximum number of incidents to return per page
:param gte_creation_time_milliseconds: greater than time in milliseconds
:return:
"""
search_from = page_number * limit
search_to = search_from + limit
request_data = {
'search_from': search_from,
'search_to': search_to,
}
if sort_by_creation_time and sort_by_modification_time:
raise ValueError('Should be provide either sort_by_creation_time or '
'sort_by_modification_time. Can\'t provide both')
if sort_by_creation_time:
request_data['sort'] = {
'field': 'creation_time',
'keyword': sort_by_creation_time
}
elif sort_by_modification_time:
request_data['sort'] = {
'field': 'modification_time',
'keyword': sort_by_modification_time
}
filters = []
if incident_id_list is not None and len(incident_id_list) > 0:
filters.append({
'field': 'incident_id_list',
'operator': 'in',
'value': incident_id_list
})
if status:
filters.append({
'field': 'status',
'operator': 'eq',
'value': status
})
if starred and starred_incidents_fetch_window:
filters.append({
'field': 'starred',
'operator': 'eq',
'value': True
})
filters.append({
'field': 'creation_time',
'operator': 'gte',
'value': starred_incidents_fetch_window
})
if demisto.command() == 'fetch-incidents':
if len(filters) > 0:
request_data['filters'] = filters
incidents = self.handle_fetch_starred_incidents(limit, page_number, request_data)
return incidents
else:
if lte_creation_time:
filters.append({
'field': 'creation_time',
'operator': 'lte',
'value': date_to_timestamp(lte_creation_time, TIME_FORMAT)
})
if gte_creation_time:
filters.append({
'field': 'creation_time',
'operator': 'gte',
'value': date_to_timestamp(gte_creation_time, TIME_FORMAT)
})
if lte_modification_time:
filters.append({
'field': 'modification_time',
'operator': 'lte',
'value': date_to_timestamp(lte_modification_time, TIME_FORMAT)
})
if gte_modification_time:
filters.append({
'field': 'modification_time',
'operator': 'gte',
'value': date_to_timestamp(gte_modification_time, TIME_FORMAT)
})
if gte_creation_time_milliseconds > 0:
filters.append({
'field': 'creation_time',
'operator': 'gte',
'value': gte_creation_time_milliseconds
})
if len(filters) > 0:
request_data['filters'] = filters
res = self._http_request(
method='POST',
url_suffix='/incidents/get_incidents/',
json_data={'request_data': request_data},
timeout=self.timeout
)
incidents = res.get('reply', {}).get('incidents', [])
return incidents
def get_incident_extra_data(self, incident_id, alerts_limit=1000):
"""
Returns incident by id
:param incident_id: The id of incident
:param alerts_limit: Maximum number alerts to get
:return:
"""
request_data = {
'incident_id': incident_id,
'alerts_limit': alerts_limit,
}
reply = self._http_request(
method='POST',
url_suffix='/incidents/get_incident_extra_data/',
json_data={'request_data': request_data},
timeout=self.timeout
)
incident = reply.get('reply')
return incident
def save_modified_incidents_to_integration_context(self):
last_modified_incidents = self.get_incidents(limit=100, sort_by_modification_time='desc')
modified_incidents_context = {}
for incident in last_modified_incidents:
incident_id = incident.get('incident_id')
modified_incidents_context[incident_id] = incident.get('modification_time')
set_integration_context({'modified_incidents': modified_incidents_context})
def get_contributing_event_by_alert_id(self, alert_id: int) -> dict:
request_data = {
"request_data": {
"alert_id": alert_id,
}
}
reply = self._http_request(
method='POST',
url_suffix='/alerts/get_correlation_alert_data/',
json_data=request_data,
timeout=self.timeout,
)
return reply.get('reply', {})
def replace_featured_field(self, field_type: str, fields: list[dict]) -> dict:
request_data = {
'request_data': {
'fields': fields
}
}
reply = self._http_request(
method='POST',
url_suffix=f'/featured_fields/replace_{field_type}',
json_data=request_data,
timeout=self.timeout,
raise_on_status=True
)
return reply.get('reply')
def get_incidents_command(client, args):
"""
Retrieve a list of incidents from XDR, filtered by some filters.
"""
# sometimes incident id can be passed as integer from the playbook
incident_id_list = args.get('incident_id_list')
if isinstance(incident_id_list, int):
incident_id_list = str(incident_id_list)
incident_id_list = argToList(incident_id_list)
# make sure all the ids passed are strings and not integers
for index, id_ in enumerate(incident_id_list):
if isinstance(id_, (int, float)):
incident_id_list[index] = str(id_)
lte_modification_time = args.get('lte_modification_time')
gte_modification_time = args.get('gte_modification_time')
since_modification_time = args.get('since_modification_time')
if since_modification_time and gte_modification_time:
raise ValueError('Can\'t set both since_modification_time and lte_modification_time')
if since_modification_time:
gte_modification_time, _ = parse_date_range(since_modification_time, TIME_FORMAT)
lte_creation_time = args.get('lte_creation_time')
gte_creation_time = args.get('gte_creation_time')
since_creation_time = args.get('since_creation_time')
if since_creation_time and gte_creation_time:
raise ValueError('Can\'t set both since_creation_time and lte_creation_time')
if since_creation_time:
gte_creation_time, _ = parse_date_range(since_creation_time, TIME_FORMAT)
statuses = argToList(args.get('status', ''))
starred = args.get('starred')
starred_incidents_fetch_window = args.get('starred_incidents_fetch_window', '3 days')
starred_incidents_fetch_window, _ = parse_date_range(starred_incidents_fetch_window, to_timestamp=True)
sort_by_modification_time = args.get('sort_by_modification_time')
sort_by_creation_time = args.get('sort_by_creation_time')
page = int(args.get('page', 0))
limit = int(args.get('limit', 100))
# If no filters were given, return a meaningful error message
if not incident_id_list and (not lte_modification_time and not gte_modification_time and not since_modification_time
and not lte_creation_time and not gte_creation_time and not since_creation_time
and not statuses and not starred):
raise ValueError("Specify a query for the incidents.\nFor example:"
" !xdr-get-incidents since_creation_time=\"1 year\" sort_by_creation_time=\"desc\" limit=10")
if statuses:
raw_incidents = []
for status in statuses:
raw_incidents += client.get_incidents(
incident_id_list=incident_id_list,
lte_modification_time=lte_modification_time,
gte_modification_time=gte_modification_time,
lte_creation_time=lte_creation_time,
gte_creation_time=gte_creation_time,
sort_by_creation_time=sort_by_creation_time,
sort_by_modification_time=sort_by_modification_time,
page_number=page,
limit=limit,
status=status,
starred=starred,
starred_incidents_fetch_window=starred_incidents_fetch_window,
)
if len(raw_incidents) > limit:
raw_incidents = raw_incidents[:limit]
else:
raw_incidents = client.get_incidents(
incident_id_list=incident_id_list,
lte_modification_time=lte_modification_time,
gte_modification_time=gte_modification_time,
lte_creation_time=lte_creation_time,
gte_creation_time=gte_creation_time,
sort_by_creation_time=sort_by_creation_time,
sort_by_modification_time=sort_by_modification_time,
page_number=page,
limit=limit,
starred=starred,
starred_incidents_fetch_window=starred_incidents_fetch_window,
)
return (
tableToMarkdown('Incidents', raw_incidents),
{
f'{INTEGRATION_CONTEXT_BRAND}.Incident(val.incident_id==obj.incident_id)': raw_incidents
},
raw_incidents
)
def check_if_incident_was_modified_in_xdr(incident_id, last_mirrored_in_time_timestamp, last_modified_incidents_dict):
if incident_id in last_modified_incidents_dict: # search the incident in the dict of modified incidents
incident_modification_time_in_xdr = int(str(last_modified_incidents_dict[incident_id]))
demisto.debug(f"XDR incident {incident_id}\n"
f"modified time: {incident_modification_time_in_xdr}\n"
f"last mirrored in time: {last_mirrored_in_time_timestamp}")
if incident_modification_time_in_xdr > last_mirrored_in_time_timestamp: # need to update this incident
demisto.info(f"Incident '{incident_id}' was modified. performing extra-data request.")
return True
# the incident was not modified
return False
def get_last_mirrored_in_time(args):
demisto_incidents = demisto.get_incidents() # type: ignore
if demisto_incidents: # handling 5.5 version
demisto_incident = demisto_incidents[0]
last_mirrored_in_time = demisto_incident.get('CustomFields', {}).get('lastmirroredintime')
if not last_mirrored_in_time: # this is an old incident, update anyway
return 0
last_mirrored_in_timestamp = arg_to_timestamp(last_mirrored_in_time, 'last_mirrored_in_time')
else: # handling 6.0 version
last_mirrored_in_time = arg_to_timestamp(args.get('last_update'), 'last_update')
last_mirrored_in_timestamp = (last_mirrored_in_time - (120 * 1000))
return last_mirrored_in_timestamp
def get_incident_extra_data_command(client, args):
incident_id = args.get('incident_id')
alerts_limit = int(args.get('alerts_limit', 1000))
return_only_updated_incident = argToBoolean(args.get('return_only_updated_incident', 'False'))
if return_only_updated_incident:
last_mirrored_in_time = get_last_mirrored_in_time(args)
last_modified_incidents_dict = get_integration_context().get('modified_incidents', {})
if check_if_incident_was_modified_in_xdr(incident_id, last_mirrored_in_time, last_modified_incidents_dict):
pass # the incident was modified. continue to perform extra-data request
else: # the incident was not modified
return "The incident was not modified in XDR since the last mirror in.", {}, {}
demisto.debug(f"Performing extra-data request on incident: {incident_id}")
raw_incident = client.get_incident_extra_data(incident_id, alerts_limit)
incident = raw_incident.get('incident')
incident_id = incident.get('incident_id')
raw_alerts = raw_incident.get('alerts').get('data')
context_alerts = clear_trailing_whitespace(raw_alerts)
for alert in context_alerts:
alert['host_ip_list'] = alert.get('host_ip').split(',') if alert.get('host_ip') else []
file_artifacts = raw_incident.get('file_artifacts').get('data')
network_artifacts = raw_incident.get('network_artifacts').get('data')
readable_output = [tableToMarkdown('Incident {}'.format(incident_id), incident)]
if len(context_alerts) > 0:
readable_output.append(tableToMarkdown('Alerts', context_alerts,
headers=[key for key in context_alerts[0] if key != 'host_ip']))
else:
readable_output.append(tableToMarkdown('Alerts', []))
if len(network_artifacts) > 0:
readable_output.append(tableToMarkdown('Network Artifacts', network_artifacts))
else:
readable_output.append(tableToMarkdown('Network Artifacts', []))
if len(file_artifacts) > 0:
readable_output.append(tableToMarkdown('File Artifacts', file_artifacts))
else:
readable_output.append(tableToMarkdown('File Artifacts', []))
incident.update({
'alerts': context_alerts,
'file_artifacts': file_artifacts,
'network_artifacts': network_artifacts
})
account_context_output = assign_params(**{
'Username': incident.get('users', '')
})
endpoint_context_output = assign_params(**{
'Hostname': incident.get('hosts', '')
})
context_output = {f'{INTEGRATION_CONTEXT_BRAND}.Incident(val.incident_id==obj.incident_id)': incident}
if account_context_output:
context_output['Account(val.Username==obj.Username)'] = account_context_output
if endpoint_context_output:
context_output['Endpoint(val.Hostname==obj.Hostname)'] = endpoint_context_output
file_context, process_context, domain_context, ip_context = get_indicators_context(incident)
if file_context:
context_output[Common.File.CONTEXT_PATH] = file_context
if domain_context:
context_output[Common.Domain.CONTEXT_PATH] = domain_context
if ip_context:
context_output[Common.IP.CONTEXT_PATH] = ip_context
if process_context:
context_output['Process(val.Name && val.Name == obj.Name)'] = process_context
return (
'\n'.join(readable_output),
context_output,
raw_incident
)
def create_parsed_alert(product, vendor, local_ip, local_port, remote_ip, remote_port, event_timestamp, severity,
alert_name, alert_description):
alert = {
"product": product,
"vendor": vendor,
"local_ip": local_ip,
"local_port": local_port,
"remote_ip": remote_ip,
"remote_port": remote_port,
"event_timestamp": event_timestamp,
"severity": severity,
"alert_name": alert_name,
"alert_description": alert_description
}
return alert
def insert_parsed_alert_command(client, args):
product = args.get('product')
vendor = args.get('vendor')
local_ip = args.get('local_ip')
local_port = arg_to_int(
arg=args.get('local_port'),
arg_name='local_port'
)
remote_ip = args.get('remote_ip')
remote_port = arg_to_int(
arg=args.get('remote_port'),
arg_name='remote_port'
)
severity = args.get('severity')
alert_name = args.get('alert_name')
alert_description = args.get('alert_description', '')
if args.get('event_timestamp') is None:
# get timestamp now if not provided
event_timestamp = int(round(time.time() * 1000))
else:
event_timestamp = int(args.get('event_timestamp'))
alert = create_parsed_alert(
product=product,
vendor=vendor,
local_ip=local_ip,
local_port=local_port,
remote_ip=remote_ip,
remote_port=remote_port,
event_timestamp=event_timestamp,
severity=severity,
alert_name=alert_name,
alert_description=alert_description
)
client.insert_alerts([alert])
return (
'Alert inserted successfully',
None,
None
)
def insert_cef_alerts_command(client, args):
# parsing alerts list. the reason we don't use argToList is because cef_alerts could contain comma (,) so
# we shouldn't split them by comma
alerts = args.get('cef_alerts')
if isinstance(alerts, list):
pass
elif isinstance(alerts, str):
if alerts[0] == '[' and alerts[-1] == ']':
# if the string contains [] it means it is a list and must be parsed
alerts = json.loads(alerts)
else:
# otherwise it is a single alert
alerts = [alerts]
else:
raise ValueError('Invalid argument "cef_alerts". It should be either list of strings (cef alerts), '
'or single string')
client.insert_cef_alerts(alerts)
return (
'Alerts inserted successfully',
None,
None
)
def sort_all_list_incident_fields(incident_data):
"""Sorting all lists fields in an incident - without this, elements may shift which results in false
identification of changed fields"""
if incident_data.get('hosts', []):
incident_data['hosts'] = sorted(incident_data.get('hosts', []))
incident_data['hosts'] = [host.upper() for host in incident_data.get('hosts', [])]
if incident_data.get('users', []):
incident_data['users'] = sorted(incident_data.get('users', []))
incident_data['users'] = [user.upper() for user in incident_data.get('users', [])]
if incident_data.get('incident_sources', []):
incident_data['incident_sources'] = sorted(incident_data.get('incident_sources', []))
if incident_data.get('alerts', []):
incident_data['alerts'] = sort_by_key(incident_data.get('alerts', []), main_key='alert_id', fallback_key='name')
reformat_sublist_fields(incident_data['alerts'])
if incident_data.get('file_artifacts', []):
incident_data['file_artifacts'] = sort_by_key(incident_data.get('file_artifacts', []), main_key='file_name',
fallback_key='file_sha256')
reformat_sublist_fields(incident_data['file_artifacts'])
if incident_data.get('network_artifacts', []):
incident_data['network_artifacts'] = sort_by_key(incident_data.get('network_artifacts', []),
main_key='network_domain', fallback_key='network_remote_ip')
reformat_sublist_fields(incident_data['network_artifacts'])
def sync_incoming_incident_owners(incident_data):
if incident_data.get('assigned_user_mail') and demisto.params().get('sync_owners'):
user_info = demisto.findUser(email=incident_data.get('assigned_user_mail'))
if user_info:
demisto.debug(f"Syncing incident owners: XDR incident {incident_data.get('incident_id')}, "
f"owner {user_info.get('username')}")
incident_data['owner'] = user_info.get('username')
else:
demisto.debug(f"The user assigned to XDR incident {incident_data.get('incident_id')} "
f"is not registered on XSOAR")
def handle_incoming_user_unassignment(incident_data):
incident_data['assigned_user_mail'] = ''
incident_data['assigned_user_pretty_name'] = ''
if demisto.params().get('sync_owners'):
demisto.debug(f'Unassigning owner from XDR incident {incident_data.get("incident_id")}')
incident_data['owner'] = ''
def handle_incoming_closing_incident(incident_data):
closing_entry = {} # type: Dict
if incident_data.get('status') in XDR_RESOLVED_STATUS_TO_XSOAR:
demisto.debug(f"Closing XDR issue {incident_data.get('incident_id')}")
closing_entry = {
'Type': EntryType.NOTE,
'Contents': {
'dbotIncidentClose': True,
'closeReason': XDR_RESOLVED_STATUS_TO_XSOAR.get(incident_data.get("status")),
'closeNotes': incident_data.get('resolve_comment', '')
},
'ContentsFormat': EntryFormat.JSON
}
incident_data['closeReason'] = closing_entry['Contents']['closeReason']
incident_data['closeNotes'] = closing_entry['Contents']['closeNotes']
if incident_data.get('status') == 'resolved_known_issue':
close_notes = f'Known Issue.\n{incident_data.get("closeNotes", "")}'
closing_entry['Contents']['closeNotes'] = close_notes
incident_data['closeNotes'] = close_notes
return closing_entry
def get_mapping_fields_command():
xdr_incident_type_scheme = SchemeTypeMapping(type_name=XDR_INCIDENT_TYPE_NAME)
for field in XDR_INCIDENT_FIELDS:
xdr_incident_type_scheme.add_field(name=field, description=XDR_INCIDENT_FIELDS[field].get('description'))
mapping_response = GetMappingFieldsResponse()
mapping_response.add_scheme_type(xdr_incident_type_scheme)
return mapping_response
def get_modified_remote_data_command(client, args):
remote_args = GetModifiedRemoteDataArgs(args)
last_update = remote_args.last_update # In the first run, this value will be set to 1 minute earlier
demisto.debug(f'Performing get-modified-remote-data command. Last update is: {last_update}')
last_update_utc = dateparser.parse(last_update, settings={'TIMEZONE': 'UTC'}) # convert to utc format
if last_update_utc:
last_update_without_ms = last_update_utc.isoformat().split('.')[0]
raw_incidents = client.get_incidents(gte_modification_time=last_update_without_ms, limit=100)
modified_incident_ids = list()
for raw_incident in raw_incidents:
incident_id = raw_incident.get('incident_id')
modified_incident_ids.append(incident_id)
return GetModifiedRemoteDataResponse(modified_incident_ids)
def get_remote_data_command(client, args):
remote_args = GetRemoteDataArgs(args)
demisto.debug(f'Performing get-remote-data command with incident id: {remote_args.remote_incident_id}')
incident_data = {}
try:
# when Demisto version is 6.1.0 and above, this command will only be automatically executed on incidents
# returned from get_modified_remote_data_command so we want to perform extra-data request on those incidents.
return_only_updated_incident = not is_demisto_version_ge('6.1.0') # True if version is below 6.1 else False
incident_data = get_incident_extra_data_command(client, {"incident_id": remote_args.remote_incident_id,
"alerts_limit": 1000,
"return_only_updated_incident": return_only_updated_incident,
"last_update": remote_args.last_update})
if 'The incident was not modified' not in incident_data[0]:
demisto.debug(f"Updating XDR incident {remote_args.remote_incident_id}")
incident_data = incident_data[2].get('incident')
incident_data['id'] = incident_data.get('incident_id')
sort_all_list_incident_fields(incident_data)
# deleting creation time as it keeps updating in the system
del incident_data['creation_time']
# handle unasignment
if incident_data.get('assigned_user_mail') is None:
handle_incoming_user_unassignment(incident_data)
else:
# handle owner sync
sync_incoming_incident_owners(incident_data)
# handle closed issue in XDR and handle outgoing error entry
entries = [handle_incoming_closing_incident(incident_data)]
reformatted_entries = []
for entry in entries:
if entry:
reformatted_entries.append(entry)
incident_data['in_mirror_error'] = ''
return GetRemoteDataResponse(
mirrored_object=incident_data,
entries=reformatted_entries
)
else: # no need to update this incident
incident_data = {
'id': remote_args.remote_incident_id,
'in_mirror_error': ""
}
return GetRemoteDataResponse(
mirrored_object=incident_data,
entries=[]
)
except Exception as e:
demisto.debug(f"Error in XDR incoming mirror for incident {remote_args.remote_incident_id} \n"
f"Error message: {str(e)}")
if "Rate limit exceeded" in str(e):
return_error("API rate limit")
if incident_data:
incident_data['in_mirror_error'] = str(e)
sort_all_list_incident_fields(incident_data)
# deleting creation time as it keeps updating in the system
del incident_data['creation_time']
else:
incident_data = {
'id': remote_args.remote_incident_id,
'in_mirror_error': str(e)
}
return GetRemoteDataResponse(
mirrored_object=incident_data,
entries=[]
)
def fetch_incidents(client, first_fetch_time, integration_instance, last_run: dict = None, max_fetch: int = 10,
statuses: List = [], starred: Optional[bool] = None, starred_incidents_fetch_window: str = None):
# Get the last fetch time, if exists
last_fetch = last_run.get('time') if isinstance(last_run, dict) else None
incidents_from_previous_run = last_run.get('incidents_from_previous_run', []) if isinstance(last_run,
dict) else []
# Handle first time fetch, fetch incidents retroactively
if last_fetch is None:
last_fetch, _ = parse_date_range(first_fetch_time, to_timestamp=True)
if starred:
starred_incidents_fetch_window, _ = parse_date_range(starred_incidents_fetch_window, to_timestamp=True)
incidents = []
if incidents_from_previous_run:
raw_incidents = incidents_from_previous_run
else:
if statuses:
raw_incidents = []
for status in statuses:
raw_incidents += client.get_incidents(gte_creation_time_milliseconds=last_fetch, status=status,
limit=max_fetch, sort_by_creation_time='asc', starred=starred,
starred_incidents_fetch_window=starred_incidents_fetch_window)
raw_incidents = sorted(raw_incidents, key=lambda inc: inc['creation_time'])
else:
raw_incidents = client.get_incidents(gte_creation_time_milliseconds=last_fetch, limit=max_fetch,
sort_by_creation_time='asc', starred=starred,
starred_incidents_fetch_window=starred_incidents_fetch_window)
# save the last 100 modified incidents to the integration context - for mirroring purposes
client.save_modified_incidents_to_integration_context()
# maintain a list of non created incidents in a case of a rate limit exception
non_created_incidents: list = raw_incidents.copy()
next_run = dict()
try:
# The count of incidents, so as not to pass the limit
count_incidents = 0
for raw_incident in raw_incidents:
incident_id = raw_incident.get('incident_id')
incident_data = get_incident_extra_data_command(client, {"incident_id": incident_id,
"alerts_limit": 1000})[2].get('incident')
sort_all_list_incident_fields(incident_data)
incident_data['mirror_direction'] = MIRROR_DIRECTION.get(demisto.params().get('mirror_direction', 'None'),
None)
incident_data['mirror_instance'] = integration_instance
incident_data['last_mirrored_in'] = int(datetime.now().timestamp() * 1000)
description = raw_incident.get('description')
occurred = timestamp_to_datestring(raw_incident['creation_time'], TIME_FORMAT + 'Z')
incident = {
'name': f'XDR Incident {incident_id} - {description}',
'occurred': occurred,
'rawJSON': json.dumps(incident_data),
}
if demisto.params().get('sync_owners') and incident_data.get('assigned_user_mail'):
incident['owner'] = demisto.findUser(email=incident_data.get('assigned_user_mail')).get('username')
# Update last run and add incident if the incident is newer than last fetch
if raw_incident['creation_time'] > last_fetch:
last_fetch = raw_incident['creation_time']
incidents.append(incident)
non_created_incidents.remove(raw_incident)
count_incidents += 1
if count_incidents == max_fetch:
break
except Exception as e:
if "Rate limit exceeded" in str(e):
demisto.info(f"Cortex XDR - rate limit exceeded, number of non created incidents is: "
f"'{len(non_created_incidents)}'.\n The incidents will be created in the next fetch")
else:
raise
if non_created_incidents:
next_run['incidents_from_previous_run'] = non_created_incidents
else:
next_run['incidents_from_previous_run'] = []
next_run['time'] = last_fetch + 1
return next_run, incidents
def get_endpoints_by_status_command(client: Client, args: Dict) -> CommandResults:
status = args.get('status')
last_seen_gte = arg_to_timestamp(
arg=args.get('last_seen_gte'),
arg_name='last_seen_gte'
)
last_seen_lte = arg_to_timestamp(
arg=args.get('last_seen_lte'),
arg_name='last_seen_lte'
)
endpoints_count, raw_res = client.get_endpoints_by_status(status, last_seen_gte=last_seen_gte,
last_seen_lte=last_seen_lte)
ec = {'status': status, 'count': endpoints_count}
return CommandResults(
readable_output=f'{status} endpoints count: {endpoints_count}',
outputs_prefix=f'{INTEGRATION_CONTEXT_BRAND}.EndpointsStatus',
outputs_key_field='status',
outputs=ec,
raw_response=raw_res)
def file_details_results(client: Client, args: Dict, add_to_context: bool) -> None:
return_entry, file_results = retrieve_file_details_command(client, args, add_to_context)
demisto.results(return_entry)
if file_results:
demisto.results(file_results)
def get_contributing_event_command(client: Client, args: Dict) -> CommandResults:
if alert_ids := argToList(args.get('alert_ids')):
alerts = []
for alert_id in alert_ids:
if alert := client.get_contributing_event_by_alert_id(int(alert_id)):
page_number = max(int(args.get('page_number', 1)), 1) - 1 # Min & default zero (First page)
page_size = max(int(args.get('page_size', 50)), 0) # Min zero & default 50
offset = page_number * page_size
limit = max(int(args.get('limit', 0)), 0) or offset + page_size
alert_with_events = {
'alertID': str(alert_id),
'events': alert.get('events', [])[offset:limit],
}
alerts.append(alert_with_events)
readable_output = tableToMarkdown(
'Contributing events', alerts, headerTransform=pascalToSpace, removeNull=True, is_auto_json_transform=True
)
return CommandResults(
readable_output=readable_output,
outputs_prefix=f'{INTEGRATION_CONTEXT_BRAND}.ContributingEvent',
outputs_key_field='alertID',
outputs=alerts,
raw_response=alerts
)
else:
return CommandResults(readable_output='The alert_ids argument cannot be empty.')
def replace_featured_field_command(client: Client, args: Dict) -> CommandResults:
field_type = args.get('field_type', '')
values = argToList(args.get('values'))
len_values = len(values)
comments = argToList(args.get('comments'))[:len_values]
ad_type = argToList(args.get('ad_type', 'group'))[:len_values]
if field_type == 'ad_groups':
fields = [
{
'value': field[0], 'comment': field[1], 'type': field[2]
} for field in zip_longest(values, comments, ad_type, fillvalue='')
]
else:
fields = [
{'value': field[0], 'comment': field[1]} for field in zip_longest(values, comments, fillvalue='')
]
client.replace_featured_field(field_type, fields)
result = {'fieldType': field_type, 'fields': fields}
readable_output = tableToMarkdown(
f'Replaced featured: {result.get("fieldType")}', result.get('fields'), headerTransform=pascalToSpace
)
return CommandResults(
readable_output=readable_output,
outputs_prefix=f'{INTEGRATION_CONTEXT_BRAND}.FeaturedField',
outputs_key_field='fieldType',
outputs=result,
raw_response=result
)
def main(): # pragma: no cover
"""
Executes an integration command
"""
command = demisto.command()
params = demisto.params()
LOG(f'Command being called is {command}')
# using two different credentials object as they both fields need to be encrypted
api_key = params.get('apikey') or params.get('apikey_creds').get('password', '')
api_key_id = params.get('apikey_id') or params.get('apikey_id_creds').get('password', '')
first_fetch_time = params.get('fetch_time', '3 days')
base_url = urljoin(params.get('url'), '/public_api/v1')
proxy = params.get('proxy')
verify_cert = not params.get('insecure', False)
statuses = params.get('status')
starred = True if params.get('starred') else None
starred_incidents_fetch_window = params.get('starred_incidents_fetch_window', '3 days')
try:
timeout = int(params.get('timeout', 120))
except ValueError as e:
demisto.debug(f'Failed casting timeout parameter to int, falling back to 120 - {e}')
timeout = 120
try:
max_fetch = int(params.get('max_fetch', 10))
except ValueError as e:
demisto.debug(f'Failed casting max fetch parameter to int, falling back to 10 - {e}')
max_fetch = 10
nonce = "".join([secrets.choice(string.ascii_letters + string.digits) for _ in range(64)])
timestamp = str(int(datetime.now(timezone.utc).timestamp()) * 1000)
auth_key = "%s%s%s" % (api_key, nonce, timestamp)
auth_key = auth_key.encode("utf-8")
api_key_hash = hashlib.sha256(auth_key).hexdigest()
if argToBoolean(params.get("prevent_only", False)):
api_key_hash = api_key
headers = {
"x-xdr-timestamp": timestamp,
"x-xdr-nonce": nonce,
"x-xdr-auth-id": str(api_key_id),
"Authorization": api_key_hash
}
client = Client(
base_url=base_url,
proxy=proxy,
verify=verify_cert,
headers=headers,
timeout=timeout
)
args = demisto.args()
args["integration_context_brand"] = INTEGRATION_CONTEXT_BRAND
args["integration_name"] = INTEGRATION_NAME
try:
if command == 'test-module':
client.test_module(first_fetch_time)
demisto.results('ok')
elif command == 'fetch-incidents':
integration_instance = demisto.integrationInstance()
next_run, incidents = fetch_incidents(client, first_fetch_time, integration_instance,
demisto.getLastRun().get('next_run'), max_fetch, statuses, starred,
starred_incidents_fetch_window)
last_run_obj = demisto.getLastRun()
last_run_obj['next_run'] = next_run
demisto.setLastRun(last_run_obj)
demisto.incidents(incidents)
elif command == 'xdr-get-incidents':
return_outputs(*get_incidents_command(client, args))
elif command == 'xdr-get-incident-extra-data':
return_outputs(*get_incident_extra_data_command(client, args))
elif command == 'xdr-update-incident':
return_outputs(*update_incident_command(client, args))
elif command == 'xdr-get-endpoints':
return_results(get_endpoints_command(client, args))
elif command == 'xdr-insert-parsed-alert':
return_outputs(*insert_parsed_alert_command(client, args))
elif command == 'xdr-insert-cef-alerts':
return_outputs(*insert_cef_alerts_command(client, args))
elif command == 'xdr-isolate-endpoint':
return_results(isolate_endpoint_command(client, args))
elif command == 'xdr-endpoint-isolate':
polling_args = {
**args,
"endpoint_id_list": args.get('endpoint_id')
}
return_results(run_polling_command(client=client,
args=polling_args,
cmd="xdr-endpoint-isolate",
command_function=isolate_endpoint_command,
command_decision_field="action_id",
results_function=get_endpoints_command,
polling_field="is_isolated",
polling_value=["AGENT_ISOLATED"],
stop_polling=True))
elif command == 'xdr-unisolate-endpoint':
return_results(unisolate_endpoint_command(client, args))
elif command == 'xdr-endpoint-unisolate':
polling_args = {
**args,
"endpoint_id_list": args.get('endpoint_id')
}
return_results(run_polling_command(client=client,
args=polling_args,
cmd="xdr-endpoint-unisolate",
command_function=unisolate_endpoint_command,
command_decision_field="action_id",
results_function=get_endpoints_command,
polling_field="is_isolated",
polling_value=["AGENT_UNISOLATED",
"CANCELLED",
"ֿPENDING_ABORT",
"ABORTED",
"EXPIRED",
"COMPLETED_PARTIAL",
"COMPLETED_SUCCESSFULLY",
"FAILED",
"TIMEOUT"],
stop_polling=True))
elif command == 'xdr-get-distribution-url':
return_outputs(*get_distribution_url_command(client, args))
elif command == 'xdr-get-create-distribution-status':
return_outputs(*get_distribution_status_command(client, args))
elif command == 'xdr-get-distribution-versions':
return_outputs(*get_distribution_versions_command(client, args))
elif command == 'xdr-create-distribution':
return_outputs(*create_distribution_command(client, args))
elif command == 'xdr-get-audit-management-logs':
return_outputs(*get_audit_management_logs_command(client, args))
elif command == 'xdr-get-audit-agent-reports':
return_outputs(*get_audit_agent_reports_command(client, args))
elif command == 'xdr-quarantine-files':
return_results(quarantine_files_command(client, args))
elif command == 'xdr-file-quarantine':
return_results(run_polling_command(client=client,
args=args,
cmd="xdr-file-quarantine",
command_function=quarantine_files_command,
command_decision_field="action_id",
results_function=action_status_get_command,
polling_field="status",
polling_value=["PENDING",
"IN_PROGRESS",
"PENDING_ABORT"]))
elif command == 'core-quarantine-files':
polling_args = {
**args,
"endpoint_id": argToList(args.get("endpoint_id_list"))[0]
}
return_results(run_polling_command(client=client,
args=polling_args,
cmd="core-quarantine-files",
command_function=quarantine_files_command,
command_decision_field="action_id",
results_function=action_status_get_command,
polling_field="status",
polling_value=["PENDING",
"IN_PROGRESS",
"PENDING_ABORT"]))
elif command == 'xdr-get-quarantine-status':
return_results(get_quarantine_status_command(client, args))
elif command == 'xdr-restore-file':
return_results(restore_file_command(client, args))
elif command == 'xdr-file-restore':
return_results(run_polling_command(client=client,
args=args,
cmd="xdr-file-restore",
command_function=restore_file_command,
command_decision_field="action_id",
results_function=action_status_get_command,
polling_field="status",
polling_value=["PENDING",
"IN_PROGRESS",
"PENDING_ABORT"]))
elif command == 'xdr-endpoint-scan':
return_results(endpoint_scan_command(client, args))
elif command == 'xdr-endpoint-scan-execute':
return_results(run_polling_command(client=client,
args=args,
cmd="xdr-endpoint-scan-execute",
command_function=endpoint_scan_command,
command_decision_field="action_id",
results_function=action_status_get_command,
polling_field="status",
polling_value=["PENDING",
"IN_PROGRESS",
"PENDING_ABORT"]))
elif command == 'xdr-endpoint-scan-abort':
return_results(endpoint_scan_abort_command(client, args))
elif command == 'get-mapping-fields':
return_results(get_mapping_fields_command())
elif command == 'get-remote-data':
return_results(get_remote_data_command(client, args))
elif command == 'update-remote-system':
return_results(update_remote_system_command(client, args))
elif command == 'xdr-delete-endpoints':
return_outputs(*delete_endpoints_command(client, args))
elif command == 'xdr-get-policy':
return_outputs(*get_policy_command(client, args))
elif command == 'xdr-get-endpoint-device-control-violations':
return_outputs(*get_endpoint_device_control_violations_command(client, args))
elif command == 'xdr-retrieve-files':
return_results(retrieve_files_command(client, args))
elif command == 'xdr-file-retrieve':
polling = run_polling_command(client=client,
args=args,
cmd="xdr-file-retrieve",
command_function=retrieve_files_command,
command_decision_field="action_id",
results_function=action_status_get_command,
polling_field="status",
polling_value=["PENDING",
"IN_PROGRESS",
"PENDING_ABORT"])
raw = polling.raw_response
# raw is the response returned by the get-action-status
if polling.scheduled_command:
return_results(polling)
return
status = raw[0].get('status') # type: ignore
if status == 'COMPLETED_SUCCESSFULLY':
file_details_results(client, args, True)
else: # status is not in polling value and operation was not COMPLETED_SUCCESSFULLY
polling.outputs_prefix = f'{args.get("integration_context_brand", "CoreApiModule")}' \
f'.RetrievedFiles(val.action_id == obj.action_id)'
return_results(polling)
elif command == 'xdr-retrieve-file-details':
file_details_results(client, args, False)
elif command == 'xdr-get-scripts':
return_outputs(*get_scripts_command(client, args))
elif command == 'xdr-get-script-metadata':
return_outputs(*get_script_metadata_command(client, args))
elif command == 'xdr-get-script-code':
return_outputs(*get_script_code_command(client, args))
elif command == 'xdr-action-status-get':
return_results(action_status_get_command(client, args))
elif command == 'get-modified-remote-data':
return_results(get_modified_remote_data_command(client, demisto.args()))
elif command == 'xdr-script-run': # used with polling = true always
return_results(script_run_polling_command(args, client))
elif command == 'xdr-run-script':
return_results(run_script_command(client, args))
elif command == 'xdr-run-snippet-code-script':
return_results(run_snippet_code_script_command(client, args))
elif command == 'xdr-snippet-code-script-execute':
return_results(run_polling_command(client=client,
args=args,
cmd="xdr-snippet-code-script-execute",
command_function=run_snippet_code_script_command,
command_decision_field="action_id",
results_function=action_status_get_command,
polling_field="status",
polling_value=["PENDING",
"IN_PROGRESS",
"PENDING_ABORT"]))
elif command == 'xdr-get-script-execution-status':
return_results(get_script_execution_status_command(client, args))
elif command == 'xdr-get-script-execution-results':
return_results(get_script_execution_results_command(client, args))
elif command == 'xdr-get-script-execution-result-files':
return_results(get_script_execution_result_files_command(client, args))
elif command == 'xdr-get-cloud-original-alerts':
return_results(get_original_alerts_command(client, args))
elif command == 'xdr-get-alerts':
return_results(get_alerts_by_filter_command(client, args))
elif command == 'xdr-run-script-execute-commands':
return_results(run_script_execute_commands_command(client, args))
elif command == 'xdr-script-commands-execute':
return_results(run_polling_command(client=client,
args=args,
cmd="xdr-script-commands-execute",
command_function=run_script_execute_commands_command,
command_decision_field="action_id",
results_function=action_status_get_command,
polling_field="status",
polling_value=["PENDING",
"IN_PROGRESS",
"PENDING_ABORT"]))
elif command == 'xdr-run-script-delete-file':
return_results(run_script_delete_file_command(client, args))
elif command == 'xdr-file-delete-script-execute':
return_results(run_polling_command(client=client,
args=args,
cmd="xdr-file-delete-script-execute",
command_function=run_script_delete_file_command,
command_decision_field="action_id",
results_function=action_status_get_command,
polling_field="status",
polling_value=["PENDING",
"IN_PROGRESS",
"PENDING_ABORT"]))
elif command == 'xdr-run-script-file-exists':
return_results(run_script_file_exists_command(client, args))
elif command == 'xdr-file-exist-script-execute':
return_results(run_polling_command(client=client,
args=args,
cmd="xdr-file-exist-script-execute",
command_function=run_script_file_exists_command,
command_decision_field="action_id",
results_function=action_status_get_command,
polling_field="status",
polling_value=["PENDING",
"IN_PROGRESS",
"PENDING_ABORT"]))
elif command == 'xdr-run-script-kill-process':
return_results((client, args))
elif command == 'xdr-kill-process-script-execute':
return_results(run_polling_command(client=client,
args=args,
cmd="xdr-kill-process-script-execute",
command_function=run_script_kill_process_command,
command_decision_field="action_id",
results_function=action_status_get_command,
polling_field="status",
polling_value=["PENDING",
"IN_PROGRESS",
"PENDING_ABORT"]))
elif command == 'endpoint':
return_results(endpoint_command(client, args))
elif command == 'xdr-get-endpoints-by-status':
return_results(get_endpoints_by_status_command(client, args))
elif command == 'xdr-blocklist-files':
return_results(blocklist_files_command(client, args))
elif command == 'xdr-allowlist-files':
return_results(allowlist_files_command(client, args))
elif command == 'xdr-remove-blocklist-files':
return_results(remove_blocklist_files_command(client, args))
elif command == 'xdr-remove-allowlist-files':
return_results(remove_allowlist_files_command(client, args))
elif command == 'xdr-get-contributing-event':
return_results(get_contributing_event_command(client, args))
elif command == 'xdr-replace-featured-field':
return_results(replace_featured_field_command(client, args))
except Exception as err:
return_error(str(err))
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | b1e3193b464313de80ea8a66766ed6ad | 41.951701 | 126 | 0.55872 | 4.238939 | false | false | false | false |
demisto/content | Packs/Netskope/Integrations/NetskopeAPIv1/NetskopeAPIv1.py | 2 | 38749 | # type: ignore
from copy import deepcopy
from typing import Any, Dict, List, Optional, Tuple
from urllib.parse import urljoin
import urllib3
import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
# disable insecure warnings
urllib3.disable_warnings()
DEFAULT_PAGE = 1
DEFAULT_LIMIT = 50
DEFAULT_MAX_FETCH = DEFAULT_LIMIT
DEFAULT_EVENTS_FETCH = DEFAULT_LIMIT
DEFAULT_EVENT_TYPE = 'application'
DEFAULT_FIRST_FETCH = '7 days'
MAX_LIMIT = 100
MAX_FETCH = 200
MAX_EVENTS_FETCH = 200
TIME_PERIOD_MAPPING = {
'Last 60 Minutes': 3600,
'Last 24 Hours': 86400,
'Last 7 Days': 604800,
'Last 30 Days': 2592000,
'Last 60 Days': 5184000,
'Last 90 Days': 7776000
}
class Client(BaseClient):
"""
Client for Netskope RESTful API.
Args:
base_url (str): The base URL of Netskope.
token (str): The token to authenticate against Netskope API.
use_ssl (bool): Specifies whether to verify the SSL certificate or not.
use_proxy (bool): Specifies if to use XSOAR proxy settings.
"""
def __init__(self, base_url: str, token: str, use_ssl: bool, use_proxy: bool):
super().__init__(urljoin(base_url, '/api/v1/'), verify=use_ssl, proxy=use_proxy)
self._session.params['token'] = token
def list_events_request(self,
query: Optional[str] = None,
event_type: Optional[str] = None,
timeperiod: Optional[int] = None,
start_time: Optional[int] = None,
end_time: Optional[int] = None,
insertion_start_time: Optional[int] = None,
insertion_end_time: Optional[int] = None,
limit: Optional[int] = None,
skip: Optional[int] = None,
unsorted: Optional[bool] = None) -> Dict[str, Any]:
"""
Get events extracted from SaaS traffic and or logs.
Args:
query (Optional[str]): Free query to filter the events.
event_type (Optional[str]): Select events by their type.
timeperiod (Optional[int]): Get all events from a certain time period.
start_time (Optional[int]): Restrict events to those that have timestamps greater than the provided timestamp.
end_time (Optional[int]): Restrict events to those that have timestamps less than or equal to the provided timestamp.
insertion_start_time (Optional[int]): Restrict events to those that were inserted to the system
after the provided timestamp.
insertion_end_time (Optional[int]): Restrict events to those that were inserted to the system
before the provided timestamp.
limit (Optional[int]): The maximum amount of events to retrieve (up to 10000 events).
skip (Optional[int]): The skip number of the events to retrieve (minimum is 1).
unsorted (Optional[bool]): If true, the returned data will not be sorted (useful for improved performance).
Returns:
Dict[str, Any]: Netskope events.
"""
body = remove_empty_elements({
'query': query,
'type': event_type,
'timeperiod': timeperiod,
'starttime': start_time,
'endtime': end_time,
'insertionstarttime': insertion_start_time,
'insertionendtime': insertion_end_time,
'limit': limit,
'skip': skip,
'unsorted': unsorted
})
return self._http_request(method='POST', url_suffix='events', json_data=body)
def list_alerts_request(self,
query: Optional[str] = None,
alert_type: Optional[str] = None,
acked: Optional[bool] = None,
timeperiod: Optional[int] = None,
start_time: Optional[int] = None,
end_time: Optional[int] = None,
insertion_start_time: Optional[int] = None,
insertion_end_time: Optional[int] = None,
limit: Optional[int] = None,
skip: Optional[int] = None,
unsorted: Optional[bool] = None) -> Dict[str, Any]:
"""
Get alerts generated by Netskope, including policy, DLP, and watch list alerts.
Args:
query (Optional[str]): Free query to filter the alerts.
alert_type (Optional[str]): Select alerts by their type.
acked (Optional[bool]): Whether to retrieve acknowledged alerts or not.
timeperiod (Optional[int]): Get alerts from certain time period.
start_time (Optional[int]): Restrict alerts to those that have timestamps greater than the provided timestamp.
end_time (Optional[int]): Restrict alerts to those that have timestamps less than or equal to the provided timestamp.
insertion_start_time (Optional[int]): Restrict alerts which have been inserted into the system
after the provided timestamp.
insertion_end_time (Optional[int]): Restrict alerts which have been inserted into the system
before the provided timestamp.
limit (Optional[int]): The maximum number of alerts to return (up to 10000).
skip (Optional[int]): The skip number of the alerts to retrieve (minimum is 1).
unsorted (Optional[bool]): If true, the returned data will not be sorted (useful for improved performance).
Returns:
Dict[str, Any]: Netskope alerts.
"""
body = remove_empty_elements({
'query': query,
'alert_type': alert_type,
'acked': acked,
'timeperiod': timeperiod,
'starttime': start_time,
'endtime': end_time,
'insertionstarttime': insertion_start_time,
'insertionendtime': insertion_end_time,
'limit': limit,
'skip': skip,
'unsorted': unsorted
})
return self._http_request(method='POST', url_suffix='alerts', json_data=body)
def list_quarantined_files_request(self,
start_time: Optional[int] = None,
end_time: Optional[int] = None,
limit: Optional[int] = None,
skip: Optional[int] = None) -> Dict[str, Any]:
"""
List all quarantined files.
Args:
start_time (Optional[int]): Get files last modified within a certain time period.
end_time (Optional[int]): Get files last modified within a certain time period.
limit (Optional[int]): The maximum amount of clients to retrieve (up to 10000).
skip (Optional[int]): The skip number of the clients to retrieve (minimum is 1).
Returns:
Dict[str, Any]: Netskope quarantine files.
"""
body = remove_empty_elements({
'starttime': start_time,
'endtime': end_time,
'limit': limit,
'skip': skip,
'op': 'get-files'
})
return self._http_request(method='POST', url_suffix='quarantine', json_data=body)
def get_quarantined_file_request(self, quarantine_profile_id: str, file_id: str) -> bytes:
"""
Download a quarantined file.
Args:
quarantine_profile_id (str): The ID of quarantine profile.
file_id (str): The ID of the quarantined file.
Returns:
bytes: The quarantined file content.
"""
body = {
'quarantine_profile_id': quarantine_profile_id,
'file_id': file_id,
'op': 'download-url'
}
return self._http_request(method='POST',
url_suffix='quarantine',
json_data=body,
resp_type='content')
def update_quarantined_file_request(self, quarantine_profile_id: str, file_id: str,
action: str) -> None:
"""
Take an action on a quarantined file.
Args:
quarantine_profile_id (str): The profile id of the quarantined file.
file_id (str): The id of the quarantined file.
action (str): Action to be performed on a quarantined.
"""
body = {
'quarantine_profile_id': quarantine_profile_id,
'file_id': file_id,
'action': action,
'op': 'take-action'
}
self._http_request(method='POST', url_suffix='quarantine', json_data=body, resp_type='text')
def update_url_list_request(self, name: str, urls: List[str]) -> None:
"""
Update the URL List with the values provided.
Args:
name (str): Name of an existing URL List shown in the Netskope UI on the URL List skip.
urls (List[str]): The content of the URL list.
"""
body = {'name': name, 'list': ','.join(urls)}
self._http_request(method='POST', url_suffix='updateUrlList', json_data=body)
def update_file_hash_list_request(self, name: str, hashes: List[str]) -> None:
"""
Update file hash list with the values provided.
Args:
name (str): Name of an existing file hash list shown in the Netskope UI on the file hash list skip.
hashes (str): List of file hashes (md5 or sha256).
"""
body = {'name': name, 'list': ','.join(hashes)}
return self._http_request(method='POST', url_suffix='updateFileHashList', json_data=body)
def list_clients_request(self,
query: Optional[str] = None,
limit: Optional[int] = None,
skip: Optional[int] = None) -> Dict[str, Any]:
"""
Get information about the Netskope clients.
Args:
query (Optional[str]): Free query on the clients, based on the client fields.
limit (Optional[int]): The maximum amount of clients to retrieve (up to 10000).
skip (Optional[int]): The skip number of the clients to retrieve (minimum is 1).
Returns:
Dict[str, Any]: The clients information.
"""
body = remove_empty_elements({'query': query, 'limit': limit, 'skip': skip})
return self._http_request(method='POST', url_suffix='clients', params=body)
def _http_request(self, *args, **kwargs):
response = super()._http_request(*args, **kwargs)
if isinstance(response, dict) and 'errors' in response:
errors = '\n'.join(response['errors'])
raise DemistoException(f'Invalid API call: {errors}', res=response)
return response
def arg_to_boolean(arg: Optional[str]) -> Optional[bool]:
"""
Converts an XSOAR argument to a Python boolean or None.
Args:
arg (Optional[str]): The argument to convert.
Returns:
Optional[bool]: A boolean if arg can be converted,
or None if arg is None.
"""
if arg is None:
return None
return argToBoolean(arg)
def arg_to_seconds_timestamp(arg: Optional[str]) -> Optional[int]:
"""
Converts an XSOAR date string argument to a timestamp in seconds.
Args:
arg (Optional[str]): The argument to convert.
Returns:
Optional[int]: A timestamp if arg can be converted,
or None if arg is None.
"""
if arg is None:
return None
return date_to_seconds_timestamp(arg_to_datetime(arg))
def date_to_seconds_timestamp(date_str_or_dt: Union[str, datetime]) -> int:
"""
Converts date string or datetime object to a timestamp in seconds.
Args:
date_str_or_dt (Union[str, datetime]): The datestring or datetime.
Returns:
int: The timestamp in seconds.
"""
return date_to_timestamp(date_str_or_dt) // 1000
def validate_time_arguments(start_time: Optional[int] = None,
end_time: Optional[int] = None,
insertion_start_time: Optional[int] = None,
insertion_end_time: Optional[int] = None,
timeperiod: Optional[int] = None) -> None:
"""
Validates time arguments from the user.
The user must provide one of the following:
- start_time and end_time.
- insertion_start_time and insertion_end_time.
- timeperiod.
Args:
start_time (Optional[int], optional): The start time to fetch from the API.
end_time (Optional[int], optional): The end time to fetch from the API.
insertion_start_time (Optional[int], optional): The insertion start time to fetch from the API.
insertion_end_time (Optional[int], optional): The insertion end time to fetch from the API.
timeperiod (Optional[str], optional): The timeperiod to fetch from the API.
Raises:
DemistoException: The user did not provide valid timestamp.
"""
combination = (all((start_time, end_time)), all(
(insertion_start_time, insertion_end_time)), bool(timeperiod))
if not any(combination):
raise DemistoException('Missing time arguments. Please provide start_time and end_time, '
'or insertion_start_time and or insertion_end_time or timeperiod.')
if combination.count(True) > 1:
raise DemistoException(
'Invalid time arguments. Please provide only start_time and end_time, '
'or insertion_start_time and or insertion_end_time or timeperiod. '
'You must not combine between the mentioned options.')
def validate_fetch_params(max_fetch: int, max_events_fetch: int, fetch_events: bool,
first_fetch: str, event_types: List[str]) -> None:
"""
Validates the parameters for fetch incident command.
Args:
max_fetch: (int): The maximum number of incidents for one fetch.
max_events_fetch (int) The maximum number of events per incident for one fetch.
fetch_events (bool): Whether or not fetch events when fetching incident.
first_fetch: (str): First fetch time in words.
"""
if first_fetch:
arg_to_datetime(first_fetch) # verify that it is a date.
if max_fetch > MAX_FETCH:
return_error(f'The Maximum number of incidents per fetch should not exceed {MAX_FETCH}.')
if fetch_events and max_events_fetch > MAX_EVENTS_FETCH:
return_error(
f'The Maximum number of events for each incident per fetch should not exceed {MAX_EVENTS_FETCH}.'
)
if not isinstance(event_types, list):
return_error('The fetched event types must be a list.')
def get_pagination_readable_message(header: str, page: int, limit: int) -> str:
return f'{header}\n Current page size: {limit}\n Showing page {page} out of others that may exist.'
def get_pagination_arguments(args: Dict[str, Any]) -> Tuple[int, int, int]:
"""
Gets and validates pagination arguments for client (skip and limit).
Args:
args (Dict[str, Any]): The command arguments (page and limit).
Returns:
Tuple[int, int]: The page, calculated skip and limit after validation.
"""
page = arg_to_number(args.get('page', DEFAULT_PAGE))
limit = arg_to_number(args.get('limit', DEFAULT_LIMIT))
if page < 1:
raise DemistoException('Page argument must be greater than 1')
if not 1 <= limit <= MAX_LIMIT:
raise DemistoException(f'Limit argument must be between 1 to {MAX_LIMIT}')
return page, (page - 1) * limit, limit
def list_events_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
Get events extracted from SaaS traffic and or logs.
Args:
client (client): The Netskope client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: Command results with raw response, outputs and readable outputs.
"""
query = args.get('query')
event_type = args['event_type']
timeperiod = TIME_PERIOD_MAPPING.get(args.get('timeperiod'))
start_time = arg_to_seconds_timestamp(args.get('start_time'))
end_time = arg_to_seconds_timestamp(args.get('end_time'))
insertion_start_time = arg_to_seconds_timestamp(args.get('insertion_start_time'))
insertion_end_time = arg_to_seconds_timestamp(args.get('insertion_end_time'))
page, skip, limit = get_pagination_arguments(args)
unsorted = arg_to_boolean(args.get('unsorted'))
validate_time_arguments(start_time=start_time,
end_time=end_time,
timeperiod=timeperiod,
insertion_start_time=insertion_start_time,
insertion_end_time=insertion_end_time)
response = client.list_events_request(query=query,
event_type=event_type,
timeperiod=timeperiod,
start_time=start_time,
end_time=end_time,
insertion_start_time=insertion_start_time,
insertion_end_time=insertion_end_time,
limit=limit,
skip=skip,
unsorted=unsorted)
outputs = deepcopy(response['data'])
for event in outputs:
event['event_id'] = event['_id']
event['timestamp'] = timestamp_to_datestring(event['timestamp'] * 1000)
readable_output = tableToMarkdown(
get_pagination_readable_message('Events List:', page=page, limit=limit),
outputs,
removeNull=True,
headers=['event_id', 'timestamp', 'type', 'access_method', 'app', 'traffic_type'],
headerTransform=string_to_table_header)
return CommandResults(outputs_prefix='Netskope.Event',
outputs_key_field='event_id',
outputs=outputs,
readable_output=readable_output,
raw_response=response)
def list_alerts_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
Get alerts generated by Netskope, including policy, DLP, and watch list alerts.
Args:
client (client): The Netskope client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: Command results with raw response, outputs and readable outputs.
"""
query = args.get('query')
alert_type = args.get('alert_type')
acked = arg_to_boolean(args.get('acked'))
timeperiod = TIME_PERIOD_MAPPING.get(args.get('timeperiod'))
start_time = arg_to_seconds_timestamp(args.get('start_time'))
end_time = arg_to_seconds_timestamp(args.get('end_time'))
insertion_start_time = arg_to_seconds_timestamp(args.get('insertion_start_time'))
insertion_end_time = arg_to_seconds_timestamp(args.get('insertion_end_time'))
page, skip, limit = get_pagination_arguments(args)
unsorted = arg_to_boolean(args.get('unsorted'))
validate_time_arguments(start_time=start_time,
end_time=end_time,
timeperiod=timeperiod,
insertion_start_time=insertion_start_time,
insertion_end_time=insertion_end_time)
response = client.list_alerts_request(query=query,
alert_type=alert_type,
acked=acked,
timeperiod=timeperiod,
start_time=start_time,
end_time=end_time,
insertion_start_time=insertion_start_time,
insertion_end_time=insertion_end_time,
limit=limit,
skip=skip,
unsorted=unsorted)
outputs = deepcopy(response['data'])
for alert in outputs:
alert['alert_id'] = alert['_id']
alert['timestamp'] = timestamp_to_datestring(alert['timestamp'] * 1000)
readable_output = tableToMarkdown(
get_pagination_readable_message('Alerts List:', page=page, limit=limit),
outputs,
removeNull=True,
headers=['alert_id', 'alert_name', 'alert_type', 'timestamp', 'action'],
headerTransform=string_to_table_header)
return CommandResults(outputs_prefix='Netskope.Alert',
outputs_key_field='alert_id',
outputs=outputs,
readable_output=readable_output,
raw_response=response)
def list_quarantined_files_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
List all quarantined files.
Args:
client (client): The Netskope client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: Command results with raw response, outputs and readable outputs.
"""
start_time = arg_to_seconds_timestamp(args.get('start_time'))
end_time = arg_to_seconds_timestamp(args.get('end_time'))
page, skip, limit = get_pagination_arguments(args)
response = client.list_quarantined_files_request(start_time=start_time,
end_time=end_time,
limit=limit,
skip=skip)
outputs = dict_safe_get(response, ['data', 'quarantined'])
for output in outputs:
for file_output in output['files']:
file_output['quarantine_profile_id'] = output['quarantine_profile_id']
file_output['quarantine_profile_name'] = output['quarantine_profile_name']
outputs = sum((output['files'] for output in outputs), [])
readable_header = get_pagination_readable_message('Quarantined Files List:',
page=page,
limit=limit)
readable_output = tableToMarkdown(readable_header,
outputs,
removeNull=True,
headers=[
'quarantine_profile_id', 'quarantine_profile_name',
'file_id', 'original_file_name', 'policy'
],
headerTransform=string_to_table_header)
return CommandResults(outputs_prefix='Netskope.Quarantine',
outputs_key_field='file_id',
outputs=outputs,
readable_output=readable_output,
raw_response=response)
def get_quarantined_file_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
Download a quarantined file.
Args:
client (client): The Netskope client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: Command results with raw response, outputs and readable outputs.
"""
quarantine_profile_id = args['quarantine_profile_id']
file_id = args['file_id']
response = client.get_quarantined_file_request(quarantine_profile_id=quarantine_profile_id,
file_id=file_id)
return fileResult(filename=f'{file_id}.zip', data=response, file_type=EntryType.FILE)
def update_quarantined_file_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
Take an action on a quarantined file.
Args:
client (client): The Netskope client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: Command results with raw response, outputs and readable outputs.
"""
quarantine_profile_id = args['quarantine_profile_id']
file_id = args['file_id']
action = args['action']
client.update_quarantined_file_request(quarantine_profile_id=quarantine_profile_id,
file_id=file_id,
action=action)
readable_output = f'## The file {file_id} was successfully {action}ed!'
return CommandResults(readable_output=readable_output)
def update_url_list_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
Update the URL List with the values provided.
Args:
client (client): The Netskope client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: Command results with raw response, outputs and readable outputs.
"""
name = args['name']
urls = argToList(args['urls'])
client.update_url_list_request(name=name, urls=urls)
outputs = {'name': name, 'URL': urls}
readable_output = f'URL List {name}:\n{", ".join(urls)}'
return CommandResults(outputs_prefix='Netskope.URLList',
outputs_key_field='name',
outputs=outputs,
readable_output=readable_output)
def update_file_hash_list_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
Update file hash list with the values provided.
Args:
client (client): The Netskope client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: Command results with raw response, outputs and readable outputs.
"""
name = args.get('name')
hashes = argToList(args.get('hash'))
client.update_file_hash_list_request(name=name, hashes=hashes)
outputs = {'name': name, 'hash': hashes}
readable_output = f'Hash List {name}:\n{", ".join(hashes)}'
return CommandResults(outputs_prefix='Netskope.FileHashList',
outputs_key_field='name',
outputs=outputs,
readable_output=readable_output)
def list_clients_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
Get information about the Netskope clients.
Args:
client (client): The Netskope client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: Command results with raw response, outputs and readable outputs.
"""
query = args.get('query')
page, skip, limit = get_pagination_arguments(args)
response = client.list_clients_request(query=query, limit=limit, skip=skip)
outputs = [client['attributes'] for client in response['data']]
for output in outputs:
output['client_id'] = output['_id']
readable_header = get_pagination_readable_message('Clients List:', page=page, limit=limit)
readable_output = tableToMarkdown(
readable_header,
outputs,
removeNull=True,
headers=['client_id', 'client_version', 'device_id', 'user_added_time'],
headerTransform=string_to_table_header)
return CommandResults(outputs_prefix='Netskope.Client',
outputs_key_field='client_id',
outputs=outputs,
readable_output=readable_output,
raw_response=response)
def list_host_associated_user_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
List all users of certain host by its hostname.
Args:
client (client): The Netskope client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: Command results with raw response, outputs and readable outputs.
"""
hostname = args['hostname']
page, skip, limit = get_pagination_arguments(args)
response = client.list_clients_request(query=f'host_info.hostname eq {hostname}',
limit=limit,
skip=skip)
outputs = sum((client['attributes'].get('users') for client in response['data']), [])
for output in outputs:
output['user_id'] = output['_id']
readable_header = get_pagination_readable_message(f'Users Associated With {hostname}:',
page=page,
limit=limit)
readable_output = tableToMarkdown(readable_header,
outputs,
removeNull=True,
headers=['user_id', 'username', 'user_source'],
headerTransform=string_to_table_header)
return CommandResults(outputs_prefix='Netskope.User',
outputs_key_field='user_id',
outputs=outputs,
readable_output=readable_output,
raw_response=response)
def list_user_associated_host_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
List all hosts related to a certain username.
Args:
client (client): The Netskope client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: Command results with raw response, outputs and readable outputs.
"""
username = args['username']
page, skip, limit = get_pagination_arguments(args)
response = client.list_clients_request(query=f'username eq {username}', limit=limit, skip=skip)
outputs = []
for client in response['data']:
attributes = client['attributes']
agent_status = dict_safe_get(attributes, ['last_event', 'status'])
outputs.append({'agent_status': agent_status, **attributes['host_info']})
readable_header = get_pagination_readable_message(f'Hosts Associated With {username}:',
page=page,
limit=limit)
readable_output = tableToMarkdown(readable_header,
outputs,
removeNull=True,
headers=['hostname', 'os_version', 'agent_status'],
headerTransform=string_to_table_header)
return CommandResults(outputs_prefix='Netskope.Host',
outputs_key_field='nsdeviceuid',
outputs=outputs,
readable_output=readable_output,
raw_response=response)
def test_module(client: Client, max_fetch: int, first_fetch: str, fetch_events: bool,
max_events_fetch: int, event_types: List[str]) -> str:
"""
Validates all integration parameters, and tests connection to Netskope instance.
"""
validate_fetch_params(max_fetch, max_events_fetch, fetch_events, first_fetch, event_types)
client.list_alerts_request(limit=1,
skip=0,
start_time=date_to_seconds_timestamp(datetime.now()),
end_time=date_to_seconds_timestamp(datetime.now()))
return 'ok'
def fetch_multiple_type_events(client: Client, max_fetch: int, start_time: int,
event_types: List[str],
query: Optional[str]) -> List[Dict[str, Any]]:
"""
Fetches events from multiple types.
The function makes an API call for each type, since the API requires
specifying the event type.
Args:
client (Client): The Netskope client.
max_fetch (int): The maximum amount of events to fetch for each type.
start_time (int): The time to fetch the events from.
event_types (List[str]): The event types to fetch as incidents.
query (Optional[str]): Query for filtering the events.
Returns:
List[Dict[str, Any]]: The fetched events.
"""
events = []
if event_types:
max_fetch = max_fetch // len(event_types)
for event_type in event_types:
new_events = client.list_events_request(start_time=start_time,
end_time=date_to_seconds_timestamp(datetime.now()),
limit=max_fetch,
unsorted=False,
event_type=event_type,
query=query)['data']
for event in new_events:
event['event_id'] = event['_id']
event['incident_type'] = event_type
events.extend(new_events)
return events
def fetch_incidents(client: Client, max_fetch: int, first_fetch: str, fetch_events: bool,
max_events_fetch: int, event_types: List[str], alerts_query: Optional[str],
events_query: Optional[str]) -> None:
"""
Fetches alerts and events as incidents.
Args:
client (Client): The Netskope client.
max_fetch (int): Maximum number of incidents to fetch.
first_fetch (str): The timestamp to fetch the incidents from.
max_events_fetch (int): Maximum number of events to fetch.
event_types (List[str]): The type of events to fetch.
alerts_query (Optional[str]): Query for filtering the fetched alerts.
events_query (Optional[str]): Query for filtering the fetched events.
"""
validate_fetch_params(max_fetch, max_events_fetch, fetch_events, first_fetch, event_types)
last_run = demisto.getLastRun() or {}
first_fetch = arg_to_seconds_timestamp(first_fetch)
last_alert_time = last_run.get('last_alert_time') or first_fetch
alerts = client.list_alerts_request(start_time=last_alert_time,
end_time=date_to_seconds_timestamp(datetime.now()),
limit=max_fetch,
query=alerts_query,
unsorted=False)['data']
last_event_time = last_run.get('last_event_time') or first_fetch
if fetch_events:
events = fetch_multiple_type_events(client,
max_fetch=max_events_fetch,
start_time=last_event_time,
event_types=event_types,
query=events_query)
else:
events = []
incidents = []
for alert in alerts:
alert['incident_type'] = alert['alert_type']
incidents.append({
'name': alert['alert_name'],
'occurred': timestamp_to_datestring(alert['timestamp']),
'rawJSON': json.dumps(alert)
})
for event in events:
incidents.append({
'name': event['event_id'],
'occurred': timestamp_to_datestring(event['timestamp']),
'rawJSON': json.dumps(event)
})
# The alerts and events are sorted in descending order.
# Also, we increment the timestamp in one second to avoid duplicates.
demisto.setLastRun({
'last_alert_time': alerts[0]['timestamp'] + 1 if alerts else last_alert_time,
'last_event_time': events[0]['timestamp'] + 1 if events else last_event_time
})
demisto.incidents(incidents)
def main():
params = demisto.params()
url = params['url']
token = params['credentials']['password']
use_ssl = not params.get('insecure', False)
use_proxy = params.get('proxy', False)
max_fetch = arg_to_number(params.get('max_fetch', DEFAULT_MAX_FETCH))
first_fetch = params.get('first_fetch', DEFAULT_FIRST_FETCH)
fetch_events = argToBoolean(params.get('fetch_events', False))
event_types = argToList(params.get('fetch_event_types', DEFAULT_EVENT_TYPE))
max_events_fetch = arg_to_number(params.get('max_events_fetch', DEFAULT_EVENTS_FETCH))
client = Client(url, token, use_ssl, use_proxy)
commands = {
'netskope-event-list': list_events_command,
'netskope-alert-list': list_alerts_command,
'netskope-quarantined-file-list': list_quarantined_files_command,
'netskope-quarantined-file-get': get_quarantined_file_command,
'netskope-quarantined-file-update': update_quarantined_file_command,
'netskope-url-list-update': update_url_list_command,
'netskope-file-hash-list-update': update_file_hash_list_command,
'netskope-client-list': list_clients_command,
'netskope-host-associated-user-list': list_host_associated_user_command,
'netskope-user-associated-host-list': list_user_associated_host_command,
}
try:
command = demisto.command()
if command == 'test-module':
return_results(
test_module(client,
max_fetch=max_fetch,
first_fetch=first_fetch,
fetch_events=fetch_events,
max_events_fetch=max_events_fetch,
event_types=event_types))
elif command == 'fetch-incidents':
fetch_incidents(client,
max_fetch=max_fetch,
first_fetch=first_fetch,
fetch_events=fetch_events,
max_events_fetch=max_events_fetch,
event_types=event_types,
alerts_query=demisto.params().get('alert_query'),
events_query=demisto.params().get('events_query'))
elif command in commands:
return_results(commands[command](client, demisto.args()))
else:
raise NotImplementedError(f'The command {command} does not exist!')
except Exception as e:
return_error(f'Failed to execute {demisto.command()} command.\nError:\n{e}')
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | b46a704b1e57d0896fc8590fa386027f | 39.574869 | 129 | 0.569434 | 4.331917 | false | false | false | false |
demisto/content | Packs/FortiSandbox/Integrations/FortiSandbox/FortiSandbox.py | 2 | 17497 | import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
"""IMPORTS"""
import base64
import hashlib
import json
import os
import requests
# Disable insecure warnings
requests.packages.urllib3.disable_warnings()
"""HELPER FUNCTIONS"""
def _handle_post(post_url, data):
try:
USE_SSL = demisto.params().get('secure')
res = requests.post(post_url, data=json.dumps(data), verify=USE_SSL)
return res
except ConnectionError:
return_error("Something went wrong with the POST Request. Please check task inputs\n " + res.text)
def _file_entry(data, name):
decoded_data = base64.b64decode(data)
return fileResult(name, decoded_data)
def _parse_results(response_data):
returned_data = json.loads(response_data.text)
if 'result' in returned_data and 'data' in returned_data['result']:
data = returned_data['result']['data']
result_object = {
'Type': entryTypes['note'],
'Contents': json.dumps(data),
'ContentsFormat': formats['json'],
'HumanReadable': tableToMarkdown('', data, removeNull=False),
'ReadableContentsFormat': formats['markdown']
}
else:
result_object = {
'Type': entryTypes['note'],
'Contents': response_data.text,
'ContentsFormat': formats['text']
}
return result_object
""" COMMAND FUNCTIONS """
def login(username, password, base_url):
url_suffix = '/sys/login/user'
payload = {
"method": "exec",
"params": [
{
'url': url_suffix,
'data': [{
'user': username,
'passwd': password
}]
}
],
"ver": "2.0",
"id": 1
}
return _handle_post(base_url + "/jsonrpc", payload)
def logout(session, base_url):
url_suffix = "/sys/logout"
payload = {
"method": "exec",
"params": [
{
'url': url_suffix
}
],
"session": session,
"ver": "2.0",
"id": 2
}
return _handle_post(base_url + "/jsonrpc", payload)
def get_sha_file_rating(session, base_url, ctype):
url_suffix = "/scan/result/filerating"
checksum = demisto.args().get('checksum')
payload = {
"method": "get",
"params": [
{
'url': url_suffix,
'checksum': checksum,
'ctype': ctype
}
],
"session": session,
"ver": "2.1",
"id": 13
}
return _handle_post(base_url + "/jsonrpc", payload)
def get_url_rating(session, base_url):
url_suffix = "/scan/result/urlrating"
urls = demisto.args().get('url')
address = urls.split(",")
payload = {
"method": "get",
"params": [
{
'url': url_suffix,
'address': address
}
],
"session": session,
"ver": "2.4",
"id": 14
}
return _handle_post(base_url + "/jsonrpc", payload)
def get_file_verdict(session, base_url):
url_suffix = "/scan/result/file"
checksum = demisto.args().get('checksum')
checksum_type = demisto.args().get('checksum_type')
payload = {
"method": "get",
"params": [
{
'url': url_suffix,
'checksum': checksum,
'ctype': checksum_type
}
],
"session": session,
"ver": "2.1",
"id": 10
}
return _handle_post(base_url + "/jsonrpc", payload)
def upload_file_on_demand(session, base_url):
"""File Processing"""
upload = demisto.args().get('file_entry_id')
file_path = demisto.getFilePath(upload)['path']
file_name = demisto.getFilePath(upload)['name']
file_sha256 = ""
if 'sha256' in demisto.args():
file_sha256 = demisto.args().get('sha256')
try:
file_handler = open(file_path, 'rb')
fname = os.path.basename(file_handler.name)
encoded_file_name = base64.b64encode(fname.encode())
file_data = file_handler.read()
encoded_file_data = base64.encodebytes(file_data)
file_handler.seek(0, os.SEEK_END)
file_size = file_handler.tell()
file_handler.close()
except Exception:
raise Exception('Failed to prepare file for upload.')
if int(file_size) >= 200000000:
# Max File Size is 20M, hence the check.
return_error("File too big to upload to the Sandbox, limit is 20MB")
"""File Upload"""
url_suffix = "/alert/ondemand/submit-file"
payload = {
"method": "set",
"params": [
{
"file": encoded_file_data.decode('utf-8'),
"filename": encoded_file_name.decode('utf-8'),
"archive_password": demisto.args().get("archive_password"),
"overwrite_vm_list": demisto.args().get("vm_csv_list"), # WIN7X86VM,WINXPVM
"skip_steps": demisto.args().get("skip_steps"),
# Do not use this parameter if no step to skip. 1 = Skip AV, 2= Skip Cloud, 4= Skip sandboxing,
# 8= Skip Static Scan.
"url": url_suffix,
"type": "file",
"timeout": "3600",
# "malpkg":"0"
# (Optional) set the value as "1" to require to add the sample to malware package if it satisfy the
# malware critia. By default, the value is "0".
}
],
"session": session,
"ver": "2.5",
"id": 11
}
return _handle_post(base_url + "/jsonrpc", payload), file_name, file_sha256
def upload_urls(session, base_url):
"""URL File Processing"""
csv_urls = demisto.args().get("urls")
file_name = "urls_for_upload_" + str(time.time())
sha256_hash = hashlib.sha256()
with open(file_name, 'w+') as urlfile:
for url in csv_urls.split(","):
urlfile.write(url + "\n")
"""URL File Upload"""
with open(file_name, 'rb') as file_handler:
file_data = file_handler.read()
encoded_file_data = base64.encodebytes(file_data)
fname = os.path.basename(file_handler.name)
encoded_file_name = base64.b64encode(fname.encode())
for byte_block in iter(lambda: file_handler.read(4096), b""):
sha256_hash.update(byte_block)
file_sha256 = sha256_hash.hexdigest()
url_suffix = "/alert/ondemand/submit-file"
payload = {
"method": "set",
"params": [
{
"file": encoded_file_data.decode('utf-8'),
"filename": encoded_file_name.decode('utf-8'),
"url": url_suffix,
"type": "url",
"timeout": "1200"
}
],
"session": session,
"ver": "2.2",
"id": 12
}
return _handle_post(base_url + "/jsonrpc", payload), fname, file_sha256
def query_job_verdict(session, base_url):
url_suffix = "/scan/result/job"
jid = demisto.args().get("job_id")
payload = {
"method": "get",
"params": [
{
"url": url_suffix,
"jid": jid
}
],
"session": session,
"ver": "2.1",
"id": 15
}
return _handle_post(base_url + "/jsonrpc", payload), jid
def get_jobid_from_submissionid(session, base_url):
url_suffix = "/scan/result/get-jobs-of-submission"
submission_id = demisto.args().get("submission_id")
payload = {
"method": "get",
"params": [
{
"url": url_suffix,
"sid": submission_id
}
],
"session": session,
"ver": "2.0",
"id": 17
}
return _handle_post(base_url + "/jsonrpc", payload), submission_id
def get_pdf_report(session, base_url):
url_suffix = "/scan/result/get-pdf-report"
query_type = demisto.args().get("query_type")
query_value = demisto.args().get("query_value")
file_sha256 = ""
if query_type == 'sha256':
file_sha256 = query_value
payload = {
"method": "get",
"params": [
{
"url": url_suffix,
"qtype": query_type,
"qval": query_value
}
],
"session": session,
"ver": "2.5",
"id": 50
}
return _handle_post(base_url + "/jsonrpc", payload), file_sha256
def main():
"""Parse and Validate Integration Params"""
username = demisto.params().get('credentials').get('identifier')
password = demisto.params().get('credentials').get('password')
base_url = demisto.params()['server'][:-1] if (demisto.params()['server'] and demisto.params()
['server'].endswith('/')) else demisto.params()['server']
log_in = json.loads(login(username, password, base_url).text)
session = log_in['session']
"""Commands Switch Panel"""
if demisto.command() == 'test-module':
""" This is the call made when pressing the integration test button """
login_resp = json.loads(login(username, password, base_url).text)
if login_resp['result']['status']['message'] == "OK" and int(login_resp['result']['status']['code']) == 0:
logout(login_resp['session'], base_url)
demisto.results('ok')
else:
demisto.results(login_resp)
elif demisto.command() == 'fortisandbox-simple-file-rating-sha256':
"""Query file's rating through its SHA-256 checksum if data exists [Simplified]"""
result = get_sha_file_rating(session, base_url, "sha256")
demisto.results(_parse_results(result))
elif demisto.command() == 'fortisandbox-simple-file-rating-sha1':
"""Query file's rating through its SHA-1 checksum if data exists [Simplified]"""
result = get_sha_file_rating(session, base_url, "sha1")
demisto.results(_parse_results(result))
elif demisto.command() == 'fortisandbox-url-rating':
"""Query URL Rating if data exists"""
result = get_url_rating(session, base_url)
demisto.results(result.text)
# demisto.results(_parse_results(result))
elif demisto.command() == 'fortisandbox-get-file-verdict-detailed':
"""Query file's verdict through its checksum."""
result = get_file_verdict(session, base_url)
returned_data = json.loads(result.text)
if 'result' in returned_data and 'data' in returned_data['result']:
data = returned_data['result']['data']
result_object = {
'Type': entryTypes['note'],
'Contents': json.dumps(data),
'ContentsFormat': formats['json']
}
else:
result_object = {
'Type': entryTypes['note'],
'Contents': json.dumps(returned_data),
'ContentsFormat': formats['text']
}
demisto.results(result_object)
elif demisto.command() == 'fortisandbox-upload-file':
"""Upload file (on-demand submit)"""
result, file_name, file_sha256 = upload_file_on_demand(session, base_url)
submission = json.loads(result.text)
if 'result' in submission and 'data' in submission['result']:
demisto.results({
'Type': entryTypes['note'],
'Contents': submission,
'ContentsFormat': formats['text'],
'EntryContext': {'FortiSandbox.Upload': {
'SubmissionId': submission["result"]["data"].get("sid"),
'FileName': file_name,
'SHA256': file_sha256,
'Status': 'Starting'
}
}
})
demisto.results(_parse_results(result))
elif demisto.command() == 'fortisandbox-upload-urls':
"""Upload CSV seperated URLs for scanning"""
result, file_name, file_sha256 = upload_urls(session, base_url)
submission = json.loads(result.text)
if 'result' in submission and 'data' in submission['result']:
demisto.results({
'Type': entryTypes['note'],
'Contents': submission,
'ContentsFormat': formats['text'],
'EntryContext': {'FortiSandbox.Upload': {
'SubmissionId': submission["result"]["data"].get("sid"),
'FileName': file_name,
'SHA256': file_sha256,
'Status': 'Starting'
}
}
})
demisto.results(_parse_results(result))
elif demisto.command() == 'fortisandbox-jobid-from-submission':
"""Get Job IDs from an uploaded Submission"""
# demisto.results("Starting JobID from Submission ID")
submission_result, submission_id = get_jobid_from_submissionid(session, base_url)
json_results = submission_result.json()
if 'result' in json_results and 'data' in json_results['result']:
# demisto.results("Results and data exists")
jids = json_results["result"]["data"].get("jids")
str_jids = [str(one_job_id) for one_job_id in jids]
if "," in submission_id:
demisto.results("Multiple submissions detected, waiting for polling to complete")
sys.exit(0)
demisto.results({
'Type': entryTypes['note'],
'ContentsFormat': formats['text'],
'Contents': "Job IDs:" + ",".join(str_jids),
'EntryContext': {'FortiSandbox.Upload(val.SubmissionId && val.SubmissionId == obj.SubmissionId)': {
'SubmissionId': submission_id,
'Status': 'In-Progress' if len(jids) > 0 else 'Starting',
'JobIds': str_jids
}
}
})
elif demisto.command() == 'fortisandbox-query-job-verdict':
"""Query job's verdict detail through its job id."""
api_result, jid = query_job_verdict(session, base_url)
verdict = json.loads(api_result.text)
if 'data' in verdict['result']:
file_sha256 = verdict['result']['data'].get("sha256")
fpn = verdict['result']['data'].get("false_positive_negative")
demisto.results({
'Type': entryTypes['note'],
'Contents': 'Scan Finished, Rating is ' + verdict['result']['data'].get("rating"),
'ContentsFormat': formats['text'],
'IgnoreAutoExtract': True,
'EntryContext': {'FortiSandbox.Upload(val.SHA256 && val.SHA256 == obj.SHA256)': {
'SHA256': file_sha256,
'Virus_ID': verdict['result']['data'].get("vid"),
'Rating': verdict['result']['data'].get("rating"),
'Infected_OS': verdict['result']['data'].get("infected_os"),
'Detection_OS': verdict['result']['data'].get("detection_os"),
'Score': verdict['result']['data'].get("score"),
'Untrusted': verdict['result']['data'].get("untrusted"),
'Malware_Name': verdict['result']['data'].get("malware_name"),
'Category': verdict['result']['data'].get("category"),
'Rating_Source': verdict['result']['data'].get("rating_source"),
'Detail_URL': verdict['result']['data'].get("detail_url"),
'Start_TS': verdict['result']['data'].get("start_ts"),
'Finish_TS': verdict['result']['data'].get("finish_ts"),
'FP_Or_FN': "False Positive" if fpn == 1 else "False Negative" if fpn == 2 else "N/A",
'Status': 'Done',
'JobIds': str(jid)
}
}
})
else:
demisto.results({
'Type': entryTypes['note'],
'Contents': 'Scan in Progress',
'ContentsFormat': formats['text'],
'EntryContext': {'FortiSandbox.Upload': {
'JobIds': str(jid),
'Status': 'In-Progress'
}
}
})
demisto.results(_parse_results(api_result))
elif demisto.command() == 'fortisandbox-get-pdf-report':
"""Get PDF Report of the Scan"""
result, file_sha256 = get_pdf_report(session, base_url)
report = json.loads(result.text)
if 'data' in report['result']:
demisto.results({
'Type': entryTypes['note'],
'Contents': 'Scan Finished, Report Available',
'ContentsFormat': formats['text'],
'EntryContext': {'FortiSandbox.Upload(val.SHA256 && val.SHA256 == obj.SHA256)': {
'SHA256': file_sha256,
'Status': 'Done'}
}
})
demisto.results(_file_entry(report['result']['data']['report'], report['result']['data']['report_name']))
else:
demisto.results("No Command Specified")
""" Clean-up Actions """
logout(session, base_url) # Log Out
session = "" # purge session variable
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | 1a40515ecfd0b8967b9f86257de8a909 | 33.307843 | 117 | 0.525176 | 3.955018 | false | false | false | false |
demisto/content | Packs/IronPort/Integrations/CiscoEmailSecurityApplianceIronPortV2/CiscoEmailSecurityApplianceIronPortV2.py | 1 | 64731 | from typing import Callable, Tuple
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
import uuid
JWT_TOKEN_EXPIRATION_PERIOD = 30
DEFAULT_FETCH = 50
TIMESTAMP_FORMAT = "%d %b %Y %H:%M:%S (%Z +00:00)"
QUARANTINE_TIMESTAMP_FORMAT = "%d %b %Y %H:%M (%Z +00:00)"
DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
CISCO_TIME_FORMAT = "%Y-%m-%dT%H:%M:00.000Z"
CISCO_REPORTING_TIME_FORMAT = "%Y-%m-%dT%H:00:00.000Z"
MIN_PAGE_NUMBER = 1
MIN_LIMIT = 1
MIN_PAGE_SIZE = 1
MAX_PAGE_SIZE = 100
REQUEST_MAX_PULL = 100
QUARANTINE_TYPE = "spam"
VIEW_ACTION = "view"
RELEASE_ACTION = "release"
ADD_ACTION = "add"
APPEND_ACTION = "append"
EDIT_ACTION = "edit"
class Client(BaseClient):
"""Client class to interact with Cisco ESA API."""
def __init__(
self, server_url: str, username: str, password: str, verify: bool, proxy: bool
):
super().__init__(base_url=server_url, headers={}, verify=verify, proxy=proxy)
self.username = username
self.password = password
self.handle_request_headers()
def handle_request_headers(self):
"""Retrieve and save to integration context JWT token for authorized client class API requests."""
integration_context = get_integration_context()
jwt_token = integration_context.get("jwt_token")
jwt_token_issued_time = integration_context.get("jwt_token_issued_time")
if jwt_token and jwt_token_issued_time >= datetime.timestamp(
datetime.now() - timedelta(minutes=JWT_TOKEN_EXPIRATION_PERIOD)
):
self._headers["jwtToken"] = jwt_token
else:
jwt_token = self.retrieve_jwt_token()
set_integration_context(
{"jwt_token": jwt_token, "jwt_token_issued_time": time.time()}
)
self._headers["jwtToken"] = jwt_token
def retrieve_jwt_token(self) -> str:
"""
Retrieve JWT token from Cisco ESA.
Returns:
str: JWT token from Cisco ESA.
"""
data = {
"data": {
"userName": b64_encode(self.username),
"passphrase": b64_encode(self.password),
}
}
try:
response = self._http_request("POST", "login", json_data=data)
return dict_safe_get(response, ["data", "jwtToken"])
except DemistoException as e:
if e.res.status_code == 401:
raise Exception(
"Authorization Error: make sure username and password are set correctly."
)
raise e
def spam_quarantine_message_search_request(
self,
quarantine_type: str,
start_date: str,
end_date: str,
offset: int,
limit: int,
filter_by: str = None,
filter_operator: str = None,
filter_value: str = None,
recipient_filter_operator: str = None,
recipient_filter_value: str = None,
order_by: str = None,
order_dir: str = None,
) -> Dict[str, Any]:
"""
Search spam quarantine messages.
Args:
quarantine_type (str): Quarantine type.
start_date (str): Start date in ISO format.
end_date (str): End date in ISO format.
offset (int): Offset of results to skip.
limit (int): Limit of results to retrieve.
filter_by (str, Optional): Filter by field.
Required if filter_operator/filter_value are specified. Defaults to None.
filter_operator (str, Optional): Filter operator.
Required if filter_by/filter_value are specified. Defaults to None.
filter_value (str, Optional): Filter value.
Required if filter_by/filter_operator are specified. Defaults to None.
recipient_filter_operator (str, Optional): Recipient filter operator.
Required if recipient_filter_value is specified. Defaults to None.
recipient_filter_value (str, Optional): Recipient address filter.
Required if recipient_filter_operator is specified. Defaults to None.
order_by (str, Optional): Results order by field.
Required if order_dir is specified. Defaults to None.
order_dir (str, Optional): Results order direction.
Required if order_by is specified. Defaults to None.
Returns:
Dict[str, Any]: API response from Cisco ESA.
"""
params = assign_params(
quarantineType=quarantine_type,
startDate=start_date,
endDate=end_date,
filterBy=filter_by,
filterOperator=filter_operator,
filterValue=filter_value,
envelopeRecipientFilterOperator=recipient_filter_operator,
envelopeRecipientFilterValue=recipient_filter_value,
offset=offset,
limit=limit,
orderBy=order_by,
orderDir=order_dir,
)
return self._http_request("GET", "quarantine/messages", params=params)
def spam_quarantine_message_get_request(
self, quarantine_type: str, message_id: str
) -> Dict[str, Any]:
"""
Get spam quarantine message.
Args:
quarantine_type (str): Quarantine Type.
message_id (str): Message ID.
Returns:
Dict[str, Any]: API response from Cisco ESA.
"""
params = assign_params(quarantineType=quarantine_type, mid=message_id)
return self._http_request("GET", "quarantine/messages/details", params=params)
def spam_quarantine_message_release_request(
self, action: str, quarantine_type: str, message_ids: List[int]
) -> Dict[str, Any]:
"""
Release spam quarantine message.
Args:
action (str): Release action.
quarantine_type (str): Quarantine type.
message_ids (List[int]): Message IDs list.
Returns:
Dict[str, Any]: API response from Cisco ESA.
"""
data = assign_params(
action=action, mids=message_ids, quarantineType=quarantine_type
)
return self._http_request("POST", "quarantine/messages", json_data=data)
def spam_quarantine_message_delete_request(
self, quarantine_type: str, message_ids: List[int]
) -> Dict[str, Any]:
"""
Delete spam quarantine message.
Args:
quarantine_type (str): Quarantine type.
message_ids (List[int]): Message IDs list.
Returns:
Dict[str, Any]: API response from Cisco ESA.
"""
data = assign_params(mids=message_ids, quarantineType=quarantine_type)
return self._http_request("DELETE", "quarantine/messages", json_data=data)
def list_entry_get_request(
self,
entry_type: str,
action: str,
limit: int,
offset: int,
quarantine_type: str,
view_by: str,
order_by: str = None,
order_dir: str = None,
search: str = None,
) -> Dict[str, Any]:
"""
List spam quarantine blocklist/safelist.
Args:
entry_type (str): Blocklist/Safelist list type.
action (str): List action.
limit (int): Limit of results to retrieve.
offset (int): Offset of results to skip.
quarantine_type (str): Quarantine type.
view_by (str): View list entry results by recipient/sender.
order_by (str, Optional): Results order by field.
Required if order_dir is specified. Defaults to None.
order_dir (str, Optional): Results order direction.
Required if order_by is specified. Defaults to None.
search (str, Optional): Search for results in blocklist/safelist. Defaults to None.
Returns:
Dict[str, Any]: API response from Cisco ESA.
"""
params = assign_params(
action=action,
limit=limit,
offset=offset,
orderBy=order_by,
orderDir=order_dir,
quarantineType=quarantine_type,
viewBy=view_by,
search=search,
)
return self._http_request("GET", f"quarantine/{entry_type}", params=params)
def list_entry_add_request(
self,
entry_type: str,
quarantine_type: str,
action: str,
view_by: str,
recipient_addresses: List[str] = None,
sender_list: List[str] = None,
sender_addresses: List[str] = None,
recipient_list: List[str] = None,
) -> Dict[str, Any]:
"""
Add spam quarantine blocklist/safelist entries.
Args:
entry_type (str): Blocklist/Safelist list type.
quarantine_type (str): Quarantine type.
action (str): Add action.
view_by (str): Add list entry results by recipient/sender.
recipient_addresses (List[str], Optional): Recipient addresses list to add. Defaults to None.
sender_list (List[str], Optional): Sender addresses list to add. Defaults to None.
sender_addresses (List[str], Optional): Sender addresses list to add. Defaults to None.
recipient_list (List[str], Optional): Recipient addresses list to add. Defaults to None.
Returns:
Dict[str, Any]: API response from Cisco ESA.
"""
data = assign_params(
action=action,
quarantineType=quarantine_type,
viewBy=view_by,
recipientAddresses=recipient_addresses,
senderAddresses=sender_addresses,
recipientList=recipient_list,
senderList=sender_list,
)
return self._http_request("POST", f"quarantine/{entry_type}", json_data=data)
def list_entry_append_request(
self,
entry_type: str,
quarantine_type: str,
action: str,
view_by: str,
recipient_addresses: List[str] = None,
sender_list: List[str] = None,
sender_addresses: List[str] = None,
recipient_list: List[str] = None,
) -> Dict[str, Any]:
"""
Append spam quarantine blocklist/safelist entries.
Args:
entry_type (str): Blocklist/Safelist list type.
quarantine_type (str): Quarantine type.
action (str): Append action.
view_by (str): Append list entry results by recipient/sender.
recipient_addresses (List[str], Optional): Recipient addresses list to append. Defaults to None.
sender_list (List[str], Optional): Sender addresses list to append. Defaults to None.
sender_addresses (List[str], Optional): Sender addresses list to append. Defaults to None.
recipient_list (List[str], Optional): Recipient addresses list to append. Defaults to None.
Returns:
Dict[str, Any]: API response from Cisco ESA.
"""
data = assign_params(
action=action,
quarantineType=quarantine_type,
viewBy=view_by,
recipientAddresses=recipient_addresses,
senderAddresses=sender_addresses,
recipientList=recipient_list,
senderList=sender_list,
)
return self._http_request("POST", f"quarantine/{entry_type}", json_data=data)
def list_entry_edit_request(
self,
entry_type: str,
quarantine_type: str,
action: str,
view_by: str,
recipient_addresses: List[str] = None,
sender_list: List[str] = None,
sender_addresses: List[str] = None,
recipient_list: List[str] = None,
) -> Dict[str, Any]:
"""
Edit spam quarantine blocklist/safelist entries.
Args:
entry_type (str): Blocklist/Safelist list type.
quarantine_type (str): Quarantine type.
action (str): Edit action.
view_by (str): Edit list entry results by recipient/sender.
recipient_addresses (List[str], Optional): Recipient addresses list to edit Defaults to None.
sender_list (List[str], Optional): Sender addresses list to edit Defaults to None.
sender_addresses (List[str], Optional): Sender addresses list to edit Defaults to None.
recipient_list (List[str], Optional): Recipient addresses list to edit Defaults to None.
Returns:
Dict[str, Any]: API response from Cisco ESA.
"""
data = assign_params(
action=action,
quarantineType=quarantine_type,
viewBy=view_by,
recipientAddresses=recipient_addresses,
senderAddresses=sender_addresses,
recipientList=recipient_list,
senderList=sender_list,
)
return self._http_request("POST", f"quarantine/{entry_type}", json_data=data)
def list_entry_delete_request(
self,
entry_type: str,
quarantine_type: str,
view_by: str,
recipient_list: List[str] = None,
sender_list: List[str] = None,
) -> Dict[str, Any]:
"""
Delete spam quarantine blocklist/safelist entries.
Args:
entry_type (str): Blocklist/Safelist list type.
quarantine_type (str): Quarantine type.
view_by (str): Delete list entry results by recipient/sender.
recipient_list (List[str], Optional): Recipient list to delete. Defaults to None.
sender_list (List[str], Optional): Sender list to delete. Defaults to None.
Returns:
Dict[str, Any]: API response from Cisco ESA.
"""
data = assign_params(
quarantineType=quarantine_type,
recipientList=recipient_list,
senderList=sender_list,
viewBy=view_by,
)
return self._http_request("DELETE", f"quarantine/{entry_type}", json_data=data)
def message_search_request(
self,
start_date: str,
end_date: str,
offset: int,
limit: int,
search_option: str,
cisco_host: str,
sender_filter_operator: str = None,
sender_filter_value: str = None,
recipient_filter_operator: str = None,
recipient_filter_value: str = None,
subject_filter_operator: str = None,
subject_filter_value: str = None,
attachment_name_operator: str = None,
attachment_name_value: str = None,
file_sha_256: str = None,
custom_query: str = None,
) -> Dict[str, Any]:
"""
Search tracking messages.
Args:
start_date (str): Start date in ISO format.
end_date (str): End date in ISO format.
offset (int): Offset of results to skip.
limit (int): Limit of results to retrieve.
search_option (str): Messages option.
cisco_host (str): Cisco host.
sender_filter_operator (str, Optional): Sender filter operator.
Required if sender_filter_value is specified. Defaults to None.
sender_filter_value (str, Optional): Sender address filter.
Required if sender_filter_operator is specified. Defaults to None.
recipient_filter_operator (str, Optional): Recipient filter operator.
Required if recipient_filter_value is specified. Defaults to None.
recipient_filter_value (str, Optional): Recipient address filter.
Required if recipient_filter_operator is specified. Defaults to None.
subject_filter_operator (str, Optional): Subject filter operator.
Required if subject_filter_value is specified. Defaults to None.
subject_filter_value (str, Optional): Subject address filter.
Required if subject_filter_operator is specified. Defaults to None.
attachment_name_operator (str, Optional): Attachment name operator.
Required if attachment_name_value is specified. Defaults to None.
attachment_name_value (str, Optional): Attachment name filter.
Required if attachment_name_operator is specified. Defaults to None.
file_sha_256 (str, Optional): SHA256 must be 64 characters long
and can contain only "0-9" and "a-f" symbols.
e.g. e0d123e5f316bef78bfdf5a008837577e0d123e5f316bef78bfdf5a008837577. Defaults to None.
custom_query (str, Optional): Custom query for cisco ESA advanced filters. Defaults to None.
Returns:
Dict[str, Any]: API response from Cisco ESA.
"""
params = assign_params(
envelopeSenderfilterOperator=sender_filter_operator,
envelopeSenderfilterValue=sender_filter_value,
envelopeRecipientfilterOperator=recipient_filter_operator,
envelopeRecipientfilterValue=recipient_filter_value,
subjectfilterOperator=subject_filter_operator,
subjectfilterValue=subject_filter_value,
ciscoHost=cisco_host,
searchOption=search_option,
offset=offset,
limit=limit,
fileSha256=file_sha_256,
attachmentNameOperator=attachment_name_operator,
attachmentNameValue=attachment_name_value,
**format_custom_query_args(custom_query),
)
return self._http_request(
"GET",
f"message-tracking/messages?startDate={start_date}&endDate={end_date}",
params=params,
)
def message_details_get_request(
self,
serial_number: str,
message_ids: List[int],
injection_connection_id: int = None,
) -> Dict[str, Any]:
"""
Get message details.
Args:
serial_number (str): mail Gateway serial number.
message_id (List[int]): Message ID List.
injection_connection_id (int, Optional): ICID, injection connection ID. Defaults to None.
Returns:
Dict[str, Any]: API response from Cisco ESA.
"""
params = assign_params(
serialNumber=serial_number,
mid=message_ids,
icid=injection_connection_id,
)
return self._http_request("GET", "message-tracking/details", params=params)
def message_amp_details_get_request(
self, serial_number: str, message_ids: List[int]
) -> Dict[str, Any]:
"""
Get message AMP report details.
Args:
serial_number (str): mail Gateway serial number.
message_id (List[int]): Message ID List.
Returns:
Dict[str, Any]: API response from Cisco ESA.
"""
params = assign_params(
serialNumber=serial_number,
mid=message_ids,
)
return self._http_request("GET", "message-tracking/amp-details", params=params)
def message_dlp_details_get_request(
self, serial_number: str, message_ids: List[int]
) -> Dict[str, Any]:
"""
Get message DLP report details.
Args:
serial_number (str): mail Gateway serial number.
message_id (List[int]): Message ID List.
Returns:
Dict[str, Any]: API response from Cisco ESA.
"""
params = assign_params(
serialNumber=serial_number,
mid=message_ids,
)
return self._http_request("GET", "message-tracking/dlp-details", params=params)
def message_url_details_get_request(
self, serial_number: str, message_ids: List[int]
) -> Dict[str, Any]:
"""
Get message URL report details.
Args:
serial_number (str): mail Gateway serial number.
message_id (List[int]): Message ID List.
Returns:
Dict[str, Any]: API response from Cisco ESA.
"""
params = assign_params(
serialNumber=serial_number,
mid=message_ids,
)
return self._http_request("GET", "message-tracking/url-details", params=params)
def report_get_request(
self,
report_type: str,
start_date: str,
end_date: str,
device_type: str,
order_by: str = None,
order_dir: str = None,
top: str = None,
filter_value: str = None,
filter_by: str = None,
filter_operator: str = None,
) -> Dict[str, Any]:
"""
Get statistics reports.
Args:
report_type (str): Report type.
start_date (str): Start date.
end_date (str): End date.
device_type (str): Device type.
order_by (str, optional): Order results by field. Defaults to None.
order_dir (str, optional): Order direction. Defaults to None.
top (str, optional): Number of records with the highest values to return. Defaults to None.
filter_value (str, optional): Filter value. Defaults to None.
filter_by (str, optional): Filter by field. Defaults to None.
filter_operator (str, optional): Filter operator. Defaults to None.
Returns:
Dict[str, Any]: API response from Cisco ESA.
"""
params = assign_params(
startDate=start_date,
endDate=end_date,
device_type=device_type,
orderBy=order_by,
orderDir=order_dir,
top=top,
filterValue=filter_value,
filterBy=filter_by,
filterOperator=filter_operator,
)
return self._http_request("GET", f"reporting/{report_type}", params=params)
def format_custom_query_args(custom_query: str = None) -> Dict[str, Any]:
"""
Format custom query arguments for tracking message advanced filters.
Args:
custom_query (str, optional): Custom query of advanced filters. Defaults to None.
Returns:
Dict[str, Any]: Formatted dictionary of custom query arguments.
"""
try:
if custom_query:
return {
key: value
for key, value in (
field.split("=") for field in custom_query.split(";")
)
}
else:
return {}
except ValueError:
raise ValueError(
'Please validate the format of argument "custom_query". '
'For example: "key1=value1;key2=value2".'
)
def format_datetime(time_expression: str) -> str:
"""
Format string from time expression to Cisco ESA datetime format.
Args:
time_expression (str): Time expression or ISO format datetime.
Returns:
str: Datetime formatted string.
"""
return arg_to_datetime(time_expression).strftime(CISCO_TIME_FORMAT) # type: ignore
def format_reporting_datetime(time_expression: str) -> str:
"""
Format string from time expression to Cisco ESA reporting datetime format.
Args:
time_expression (str): Time expression or ISO format datetime.
Returns:
str: Datetime formatted string.
"""
return arg_to_datetime(time_expression).strftime(CISCO_REPORTING_TIME_FORMAT) # type: ignore
def format_timestamp(timestamp: str) -> str:
"""
Format Cisco ESA timestamp to datetime string.
Args:
timestamp (str): Cisco ESA timestamp.
Returns:
str: Datetime formatted string.
"""
return datetime.strptime(timestamp, TIMESTAMP_FORMAT).strftime(DATETIME_FORMAT)
def format_quarantine_timestamp(timestamp: str) -> str:
"""
Format Cisco ESA Quarantine timestamp to datetime string.
Args:
timestamp (str): Cisco ESA quarantine timestamp.
Returns:
str: Datetime formatted string.
"""
return datetime.strptime(timestamp, QUARANTINE_TIMESTAMP_FORMAT).strftime(
DATETIME_FORMAT
)
def format_last_run(last_run: str) -> str:
"""
Format fetch incidents last run to Cisco ESA datetime format.
Args:
last_run (str): Fetch incidents last run.
Returns:
str: Datetime formatted string.
"""
return datetime.strptime(last_run, DATETIME_FORMAT).strftime(CISCO_TIME_FORMAT)
def format_number_list_argument(number_list_string: str) -> List[int]:
"""
Format number list argument to list of integer type.
Args:
number_list_string (str): Number list argument.
Returns:
List[int]: List of integers.
"""
return [arg_to_number(number) for number in argToList(number_list_string)] # type: ignore
def validate_pagination_arguments(
page: Optional[int] = None,
page_size: Optional[int] = None,
limit: Optional[int] = None
):
"""
Validate pagination arguments, raise error if argument is not valid.
Args:
page (int): Page.
page_size (int): Page Size.
limit (int): Limit.
"""
if page and page_size:
if page_size < MIN_PAGE_SIZE or page_size > MAX_PAGE_SIZE:
raise ValueError(
f"page size argument must be greater than or equal to {MIN_PAGE_SIZE} "
f"and smaller or equal to {MAX_PAGE_SIZE}."
)
if page < MIN_PAGE_NUMBER:
raise ValueError(f"page argument must be equal or greater than {MIN_PAGE_NUMBER}.")
else:
if limit and limit < MIN_LIMIT:
raise ValueError(f"limit argument must be equal or greater than {MIN_LIMIT}.")
def validate_related_arguments(
args: Dict[str, Any], related_arguments_list: List[List[str]]
):
"""
Validate correct usage of arguments that are related to each other.
Args:
args (Dict[str, Any]): Command arguments.
related_arguments_list (List[List[str]]): Related arguments list.
"""
args = {key: value for key, value in args.items() if value != ""}
for related_arguments in related_arguments_list:
exist_list = [argument in args for argument in related_arguments]
if not all(exist_list) and any(exist_list):
raise ValueError(
f"{', '.join(related_arguments)} arguments should be used together but one or more are empty."
)
def format_list_entry_arguments(view_by: str, args: Dict[str, Any]) -> Dict[str, Any]:
"""
Format list entry arguments.
Args:
view_by (str): View by recipient/sender.
args (Dict[str, Any]): Command arguments to format.
Returns:
Dict[str, Any]: Formatted list entry arguments.
"""
if view_by == "recipient":
if args.get("recipient_addresses") and args.get("sender_list"):
args["sender_addresses"] = None
args["recipient_list"] = None
else:
raise DemistoException(
"Please specify recipient_addresses and sender_list arguments when using view_by recipient."
)
elif view_by == "sender":
if args.get("sender_addresses") and args.get("recipient_list"):
args["recipient_addresses"] = None
args["sender_list"] = None
else:
raise DemistoException(
"Please specify sender_addresses and recipient_list arguments when using view_by sender."
)
else:
raise DemistoException(
f'Please check the value of argument "view_by". Valid values are recipient/sender, got {view_by}.'
)
return args
def pagination(request_command: Callable, args: Dict[str, Any], **kwargs) -> Tuple:
"""
Executing Manual Pagination (using the page and page size arguments)
or Automatic Pagination (display a number of total results).
Args:
request_command (Callable): The command to execute.
args (Dict[str, Any]): The command arguments.
Returns:
Tuple: output, pagination message for Command Results.
"""
page = arg_to_number(args.get("page"))
page_size = arg_to_number(args.get("page_size"))
limit = arg_to_number(args.get("limit"))
validate_pagination_arguments(page, page_size, limit)
if page and page_size:
offset = page_size * (page - 1)
output = request_command(offset=offset, limit=page_size, **kwargs).get("data")
pagination_message = f"Showing page {page}.\n Current page size: {page_size}."
elif limit:
output = []
offset = 0
while limit > 0:
page_size = limit if limit <= REQUEST_MAX_PULL else REQUEST_MAX_PULL
output.extend(
request_command(offset=offset, limit=page_size, **kwargs).get("data")
)
limit -= REQUEST_MAX_PULL
offset += REQUEST_MAX_PULL
pagination_message = f"Showing {len(output)} rows." if len(output) > 0 else None # type: ignore
return output, pagination_message
def spam_quarantine_message_search_command(
client: Client, args: Dict[str, Any]
) -> CommandResults:
"""
Search spam quarantine messages.
Args:
client (Client): Cisco ESA API client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: readable outputs for XSOAR.
"""
quarantine_type = QUARANTINE_TYPE
start_date = format_datetime(args["start_date"])
end_date = format_datetime(args["end_date"])
filter_by = args.get("filter_by")
filter_operator = args.get("filter_operator")
filter_value = args.get("filter_value")
recipient_filter_operator = args.get("recipient_filter_operator")
recipient_filter_value = args.get("recipient_filter_value")
order_by = args.get("order_by")
order_dir = args.get("order_dir")
validate_related_arguments(
args=args,
related_arguments_list=[
["filter_by", "filter_operator", "filter_value"],
["recipient_filter_operator", "recipient_filter_value"],
["order_by", "order_dir"],
],
)
output, pagination_message = pagination(
client.spam_quarantine_message_search_request,
args=args,
quarantine_type=quarantine_type,
start_date=start_date,
end_date=end_date,
filter_by=filter_by,
filter_operator=filter_operator,
filter_value=filter_value,
recipient_filter_operator=recipient_filter_operator,
recipient_filter_value=recipient_filter_value,
order_by=order_by,
order_dir=order_dir,
)
spam_quarantine_message_lists = [
dict(message.get("attributes", {}), **{"mid": message.get("mid")})
for message in output
]
readable_output = tableToMarkdown(
name="Spam Quarantine Messages List",
metadata=pagination_message,
t=spam_quarantine_message_lists,
headers=["mid", "date", "fromAddress", "toAddress", "subject", "size"],
headerTransform=pascalToSpace,
)
return CommandResults(
readable_output=readable_output,
outputs_prefix="CiscoESA.SpamQuarantineMessage",
outputs_key_field="mid",
outputs=spam_quarantine_message_lists,
raw_response=spam_quarantine_message_lists,
)
def spam_quarantine_message_get_command(
client: Client, args: Dict[str, Any]
) -> CommandResults:
"""
Get spam quarantine message details.
Args:
client (Client): Cisco ESA API client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: readable outputs for XSOAR.
"""
quarantine_type = QUARANTINE_TYPE
message_id = args["message_id"]
response: Dict[str, Any] = client.spam_quarantine_message_get_request(
quarantine_type, message_id
).get("data", {})
new_message = dict(response.get("attributes", {}), **{"mid": response.get("mid")})
readable_message = (
f'Found spam quarantine message with ID: {new_message.get("mid")}'
)
readable_output = tableToMarkdown(
name="Spam Quarantine Message",
metadata=readable_message,
t=new_message,
headers=["mid", "fromAddress", "toAddress", "date", "subject", "attachments"],
headerTransform=pascalToSpace,
removeNull=True,
)
return CommandResults(
readable_output=readable_output,
outputs_prefix="CiscoESA.SpamQuarantineMessage",
outputs_key_field="mid",
outputs=new_message,
raw_response=response,
)
def spam_quarantine_message_release_command(
client: Client, args: Dict[str, Any]
) -> List[CommandResults]:
"""
Release spam quarantine message.
Args:
client (Client): Cisco ESA API client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
List[CommandResults]: readable outputs for XSOAR.
"""
action = RELEASE_ACTION
quarantine_type = QUARANTINE_TYPE
message_ids = format_number_list_argument(args["message_ids"])
command_results_list = []
for message_id in message_ids:
response = client.spam_quarantine_message_release_request(
action, quarantine_type, [message_id]
)
if dict_safe_get(response, ["data", "totalCount"]) == 1:
readable_output = f"Quarantined message {message_id} successfully released."
else:
readable_output = f"Quarantined message {message_id} not found."
command_results_list.append(CommandResults(readable_output=readable_output))
return command_results_list
def spam_quarantine_message_delete_command(
client: Client, args: Dict[str, Any]
) -> List[CommandResults]:
"""
Delete spam quarantine message details.
Args:
client (Client): Cisco ESA API client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
List[CommandResults]: readable outputs for XSOAR.
"""
quarantine_type = QUARANTINE_TYPE
message_ids = format_number_list_argument(args["message_ids"])
command_results_list = []
for message_id in message_ids:
response = client.spam_quarantine_message_delete_request(
quarantine_type, [message_id]
)
if dict_safe_get(response, ["data", "totalCount"]) == 1:
readable_output = f"Quarantined message {message_id} successfully deleted."
else:
readable_output = f"Quarantined message {message_id} not found."
command_results_list.append(CommandResults(readable_output=readable_output))
return command_results_list
def list_entry_get_command(client: Client, args: Dict[str, Any]) -> CommandResults:
"""
List spam quarantine blocklist/safelist.
Args:
client (Client): Cisco ESA API client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: readable outputs for XSOAR.
"""
entry_type: str = args["entry_type"]
action = VIEW_ACTION
order_by = args.get("order_by")
order_dir = args.get("order_dir")
quarantine_type = QUARANTINE_TYPE
view_by = args.get("view_by")
search = args.get("search")
validate_related_arguments(
args=args, related_arguments_list=[["order_by", "order_dir"]]
)
output, pagination_message = pagination(
client.list_entry_get_request,
args=args,
entry_type=entry_type,
action=action,
quarantine_type=quarantine_type,
view_by=view_by,
order_by=order_by,
order_dir=order_dir,
search=search,
)
readable_output = tableToMarkdown(
name=f"{entry_type.title()} Entries",
metadata=pagination_message,
t=output,
headers=["recipientAddress", "senderList"]
if view_by == "recipient"
else ["senderAddress", "recipientList"],
headerTransform=pascalToSpace,
)
return CommandResults(
readable_output=readable_output,
outputs_prefix=f"CiscoESA.ListEntry.{entry_type.title()}",
outputs_key_field="recipientAddress"
if view_by == "recipient"
else "senderAddress",
outputs=output,
raw_response=output,
)
def list_entry_add_command(client: Client, args: Dict[str, Any]) -> CommandResults:
"""
Add spam quarantine blocklist/safelist entries.
Args:
client (Client): Cisco ESA API client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: readable outputs for XSOAR.
"""
entry_type = args["entry_type"]
quarantine_type = QUARANTINE_TYPE
action = ADD_ACTION
view_by = args["view_by"]
formatted_arguments = format_list_entry_arguments(view_by=view_by, args=args)
recipient_addresses = argToList(formatted_arguments.get("recipient_addresses"))
sender_list = argToList(formatted_arguments.get("sender_list"))
sender_addresses = argToList(formatted_arguments.get("sender_addresses"))
recipient_list = argToList(formatted_arguments.get("recipient_list"))
response = client.list_entry_add_request(
entry_type=entry_type,
quarantine_type=quarantine_type,
action=action,
view_by=view_by,
recipient_addresses=recipient_addresses,
sender_list=sender_list,
sender_addresses=sender_addresses,
recipient_list=recipient_list,
)
if view_by == "recipient":
readable_output = (
f'Successfully added {", ".join(sender_list)} senders to '
f'{", ".join(recipient_addresses)} recipients in {entry_type}.'
)
else:
readable_output = (
f'Successfully added {", ".join(recipient_list)} recipients to '
f'{", ".join(sender_addresses)} senders in {entry_type}.'
)
return CommandResults(readable_output=readable_output, raw_response=response)
def list_entry_append_command(client: Client, args: Dict[str, Any]) -> CommandResults:
"""
Append spam quarantine blocklist/safelist entries.
Args:
client (Client): Cisco ESA API client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: readable outputs for XSOAR.
"""
entry_type = args["entry_type"]
quarantine_type = QUARANTINE_TYPE
action = APPEND_ACTION
view_by = args["view_by"]
formatted_arguments = format_list_entry_arguments(view_by=view_by, args=args)
recipient_addresses = argToList(formatted_arguments.get("recipient_addresses"))
sender_list = argToList(formatted_arguments.get("sender_list"))
sender_addresses = argToList(formatted_arguments.get("sender_addresses"))
recipient_list = argToList(formatted_arguments.get("recipient_list"))
response = client.list_entry_append_request(
entry_type=entry_type,
quarantine_type=quarantine_type,
action=action,
view_by=view_by,
recipient_addresses=recipient_addresses,
sender_list=sender_list,
sender_addresses=sender_addresses,
recipient_list=recipient_list,
)
if view_by == "recipient":
readable_output = (
f'Successfully appended {", ".join(sender_list)} senders to '
f'{", ".join(recipient_addresses)} recipients in {entry_type}.'
)
else:
readable_output = (
f'Successfully appended {", ".join(recipient_list)} recipients to '
f'{", ".join(sender_addresses)} senders in {entry_type}.'
)
return CommandResults(readable_output=readable_output, raw_response=response)
def list_entry_edit_command(client: Client, args: Dict[str, Any]) -> CommandResults:
"""
Edit spam quarantine blocklist/safelist entries.
Args:
client (Client): Cisco ESA API client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: readable outputs for XSOAR.
"""
entry_type = args["entry_type"]
quarantine_type = QUARANTINE_TYPE
action = EDIT_ACTION
view_by = args["view_by"]
formatted_arguments = format_list_entry_arguments(view_by=view_by, args=args)
recipient_addresses = argToList(formatted_arguments.get("recipient_addresses"))
sender_list = argToList(formatted_arguments.get("sender_list"))
sender_addresses = argToList(formatted_arguments.get("sender_addresses"))
recipient_list = argToList(formatted_arguments.get("recipient_list"))
response = client.list_entry_edit_request(
entry_type=entry_type,
quarantine_type=quarantine_type,
action=action,
view_by=view_by,
recipient_addresses=recipient_addresses,
sender_list=sender_list,
sender_addresses=sender_addresses,
recipient_list=recipient_list,
)
if view_by == "recipient":
readable_output = (
f"Successfully edited {', '.join(recipient_addresses)} recipients' senders to "
f"{', '.join(sender_list)} in {entry_type}."
)
else:
readable_output = (
f"Successfully edited {', '.join(sender_addresses)} senders' recipients to "
f"{', '.join(recipient_list)} in {entry_type}."
)
return CommandResults(readable_output=readable_output, raw_response=response)
def list_entry_delete_command(client: Client, args: Dict[str, Any]) -> CommandResults:
"""
Delete spam quarantine blocklist/safelist entries.
Args:
client (Client): Cisco ESA API client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: readable outputs for XSOAR.
"""
entry_type = args["entry_type"]
quarantine_type = QUARANTINE_TYPE
view_by = args["view_by"]
recipient_list = argToList(args.get("recipient_list"))
sender_list = argToList(args.get("sender_list"))
if view_by == "recipient":
if recipient_list:
sender_list = None
else:
raise DemistoException(
"Please specify recipient_list argument when using view_by recipient."
)
else:
if sender_list:
recipient_list = None
else:
raise DemistoException(
"Please specify sender_list argument when using view_by sender."
)
response = client.list_entry_delete_request(
entry_type=entry_type,
quarantine_type=quarantine_type,
view_by=view_by,
recipient_list=recipient_list,
sender_list=sender_list,
)
deleted_entries = (
", ".join(recipient_list if view_by == "recipient" else sender_list)
)
return CommandResults(
readable_output=f"Successfully deleted {deleted_entries} {view_by}s from {entry_type}.",
raw_response=response,
)
def message_search_command(client: Client, args: Dict[str, Any]) -> CommandResults:
"""
Search tracking messages.
Args:
client (Client): Cisco ESA API client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: readable outputs for XSOAR.
"""
start_date = format_datetime(args["start_date"])
end_date = format_datetime(args["end_date"])
sender_filter_operator = args.get("sender_filter_operator")
sender_filter_value = args.get("sender_filter_value")
recipient_filter_operator = args.get("recipient_filter_operator")
recipient_filter_value = args.get("recipient_filter_value")
subject_filter_operator = args.get("subject_filter_operator")
subject_filter_value = args.get("subject_filter_value")
attachment_name_operator = args.get("attachment_name_operator")
attachment_name_value = args.get("attachment_name_value")
cisco_host = "All_Hosts"
search_option = "messages"
file_sha_256 = args.get("file_sha_256")
custom_query = args.get("custom_query")
validate_related_arguments(
args=args,
related_arguments_list=[
["sender_filter_operator", "sender_filter_value"],
["recipient_filter_operator", "recipient_filter_value"],
["subject_filter_operator", "subject_filter_value"],
["attachment_name_operator", "attachment_name_value"],
["order_by", "order_dir"],
],
)
output, pagination_message = pagination(
client.message_search_request,
args=args,
start_date=start_date,
end_date=end_date,
sender_filter_operator=sender_filter_operator,
sender_filter_value=sender_filter_value,
recipient_filter_operator=recipient_filter_operator,
recipient_filter_value=recipient_filter_value,
subject_filter_operator=subject_filter_operator,
subject_filter_value=subject_filter_value,
attachment_name_operator=attachment_name_operator,
attachment_name_value=attachment_name_value,
cisco_host=cisco_host,
search_option=search_option,
file_sha_256=file_sha_256,
custom_query=custom_query,
)
messages_lists = [
dict(
message.get("attributes", {}),
**{
"timestamp": format_timestamp(
dict_safe_get(message, ["attributes", "timestamp"])
),
"unique_message_id": "".join(
map(str, dict_safe_get(message, ["attributes", "mid"]))
),
},
)
for message in output
]
readable_output = tableToMarkdown(
name="Messages List",
metadata=pagination_message,
t=messages_lists,
headers=[
"mid",
"allIcid",
"serialNumber",
"sender",
"recipient",
"subject",
"messageStatus",
"timestamp",
"senderIp",
"sbrs",
],
headerTransform=pascalToSpace,
removeNull=True,
)
return CommandResults(
readable_output=readable_output,
outputs_prefix="CiscoESA.Message",
outputs_key_field="unique_message_id",
outputs=messages_lists,
raw_response=messages_lists,
)
def message_details_get_command(client: Client, args: Dict[str, Any]) -> CommandResults:
"""
Get message details.
Args:
client (Client): Cisco ESA API client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: readable outputs for XSOAR.
"""
serial_number = args["serial_number"]
message_ids = format_number_list_argument(args["message_ids"])
injection_connection_id = arg_to_number(args.get("injection_connection_id"))
response = (
client.message_details_get_request(
serial_number=serial_number,
message_ids=message_ids,
injection_connection_id=injection_connection_id,
)
.get("data", {})
.get("messages", {})
)
mid = response.get("mid")
if not mid or "N/A" in mid:
raise DemistoException(
f'Message ID {", ".join(map(str, message_ids))} was not found.\n'
f"Please check message IDs or Serial Number."
)
response["timestamp"] = format_timestamp(response.get("timestamp"))
response["unique_message_id"] = "".join(map(str, response.get("mid")))
for event in response.get("summary", ()):
event["timestamp"] = format_timestamp(event.get("timestamp"))
readable_output = tableToMarkdown(
name="Message Details",
metadata=f'Found message with ID {", ".join(map(str, response.get("mid")))}.',
t=response,
headers=[
"mid",
"allIcid",
"subject",
"sender",
"recipient",
"timestamp",
"messageSize",
"sendingHostSummary",
"messageStatus",
"direction",
"mailPolicy",
"senderGroup",
"showAMP",
"showDLP",
"showURL",
],
headerTransform=pascalToSpace,
)
summary_readable_output = tableToMarkdown(
name="Message Summary",
t=response.get("summary"),
headers=["description", "timestamp", "lastEvent"],
headerTransform=pascalToSpace,
removeNull=True,
)
return CommandResults(
readable_output=readable_output + summary_readable_output,
outputs_prefix="CiscoESA.Message",
outputs_key_field="unique_message_id",
outputs=response,
raw_response=response,
)
def message_amp_details_get_command(
client: Client, args: Dict[str, Any]
) -> CommandResults:
"""
Get message AMP report details.
Args:
client (Client): Cisco ESA API client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: readable outputs for XSOAR.
"""
serial_number = args["serial_number"]
message_ids = format_number_list_argument(args["message_ids"])
response = (
client.message_amp_details_get_request(
serial_number=serial_number, message_ids=message_ids
)
.get("data", {})
.get("messages", {})
)
mid = response.get("mid")
if not mid or "N/A" in mid:
raise DemistoException(
f'Message ID {", ".join(map(str, message_ids))} was not found.\n'
f"Please check message IDs or Serial Number."
)
response["timestamp"] = format_timestamp(response.get("timestamp"))
readable_output = tableToMarkdown(
name="Message AMP Report Details",
metadata=f'Found AMP details for message ID {", ".join(map(str, response.get("mid")))}.',
t=response,
headers=[
"mid",
"allIcid",
"subject",
"sender",
"recipient",
"attachments",
"timestamp",
"messageSize",
"messageStatus",
"direction",
"senderGroup",
],
headerTransform=pascalToSpace,
)
amp_summary: List[Dict[str, Any]] = response.get("ampDetails")
if amp_summary:
for event in amp_summary:
timestamp = event.get("timestamp")
if timestamp:
event["timestamp"] = format_timestamp(timestamp)
summary_readable_output = tableToMarkdown(
name="Message AMP Report Details Summary",
t=amp_summary,
headers=["description", "timestamp"],
headerTransform=pascalToSpace,
removeNull=True,
)
return CommandResults(
readable_output=readable_output + summary_readable_output,
outputs_prefix="CiscoESA.AMPDetail",
outputs_key_field="mid",
outputs=response,
raw_response=response,
)
def message_dlp_details_get_command(
client: Client, args: Dict[str, Any]
) -> CommandResults:
"""
Get message DLP report details.
Args:
client (Client): Cisco ESA API client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: readable outputs for XSOAR.
"""
serial_number = args["serial_number"]
message_ids = format_number_list_argument(args["message_ids"])
response = (
client.message_dlp_details_get_request(
serial_number=serial_number, message_ids=message_ids
)
.get("data", {})
.get("messages", {})
)
mid = response.get("mid")
if not mid or "N/A" in mid:
raise DemistoException(
f'Message ID {", ".join(map(str, message_ids))} was not found.\n'
f"Please check message IDs or Serial Number."
)
response["timestamp"] = format_timestamp(response.get("timestamp"))
readable_output = tableToMarkdown(
name="Message DLP Report Details",
metadata=f'Found DLP details for message ID {", ".join(map(str, response.get("mid")))}.',
t=response,
headers=[
"mid",
"allIcid",
"subject",
"sender",
"recipient",
"attachments",
"timestamp",
"messageSize",
"messageStatus",
"direction",
"senderGroup",
],
headerTransform=pascalToSpace,
removeNull=True,
)
dlp_summary: List[Dict[str, Any]] = response.get("dlpDetails")
summary_readable_output = tableToMarkdown(
name="Message DLP Report Details Summary",
t=dlp_summary,
headers=["mid", "violationSeverity", "riskFactor", "dlpPolicy"],
headerTransform=pascalToSpace,
)
return CommandResults(
readable_output=readable_output + summary_readable_output,
outputs_prefix="CiscoESA.DLPDetail",
outputs_key_field="mid",
outputs=response,
raw_response=response,
)
def message_url_details_get_command(
client: Client, args: Dict[str, Any]
) -> CommandResults:
"""
Get message URL report details.
Args:
client (Client): Cisco ESA API client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: readable outputs for XSOAR.
"""
serial_number = args["serial_number"]
message_ids = format_number_list_argument(args["message_ids"])
response = (
client.message_url_details_get_request(
serial_number=serial_number, message_ids=message_ids
)
.get("data", {})
.get("messages", {})
)
mid = response.get("mid")
if not mid or "N/A" in mid:
raise DemistoException(
f'Message ID {", ".join(map(str, message_ids))} was not found.\n'
f"Please check message IDs or Serial Number."
)
response["timestamp"] = format_timestamp(response.get("timestamp"))
url_summary: List[Dict[str, Any]] = response.get("urlDetails")
if url_summary:
for event in url_summary:
timestamp = event.get("timestamp")
if timestamp:
event["timestamp"] = format_timestamp(timestamp)
readable_output = tableToMarkdown(
name="Message URL Report Details",
metadata=f'Found URL details for message ID {", ".join(map(str, response.get("mid")))}.',
t=response,
headers=[
"mid",
"allIcid",
"subject",
"sender",
"recipient",
"attachments",
"timestamp",
"messageSize",
"messageStatus",
"direction",
"senderGroup",
],
headerTransform=pascalToSpace,
)
summary_readable_output = tableToMarkdown(
name="Message URL Report Details Summary",
t=url_summary,
headers=["description", "timestamp"],
headerTransform=pascalToSpace,
removeNull=True,
)
return CommandResults(
readable_output=readable_output + summary_readable_output,
outputs_prefix="CiscoESA.URLDetail",
outputs_key_field="mid",
outputs=response,
raw_response=response,
)
def report_get_command(client: Client, args: Dict[str, Any]) -> CommandResults:
"""
Get statistics reports.
Args:
client (Client): Cisco ESA API client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: readable outputs for XSOAR.
"""
report_type = args.get("custom_report_type", args.get("report_type"))
start_date = format_reporting_datetime(args["start_date"])
end_date = format_reporting_datetime(args["end_date"])
device_type = "esa"
order_by = args.get("order_by")
order_dir = args.get("order_dir")
top = args.get("top")
filter_value = args.get("filter_value")
filter_by = args.get("filter_by")
filter_operator = args.get("filter_operator")
response: Dict[str, Any] = client.report_get_request(
report_type=report_type,
start_date=start_date,
end_date=end_date,
device_type=device_type,
order_by=order_by,
order_dir=order_dir,
top=top,
filter_value=filter_value,
filter_by=filter_by,
filter_operator=filter_operator,
).get("data", {})
response["uuid"] = str(uuid.uuid4())
try:
table = {
k: v for results in response.get("resultSet", [{}]) for k, v in results.items()
}
except Exception:
table = response.get("resultSet", response)
readable_output = tableToMarkdown(
name=f'Report type: {response.get("type")}',
metadata=f'Report UUID: {response["uuid"]}',
t=table,
headerTransform=string_to_table_header,
removeNull=True,
)
return CommandResults(
readable_output=readable_output,
outputs_prefix="CiscoESA.Report",
outputs_key_field="uuid",
outputs=response,
raw_response=response,
)
def fetch_incidents(
client: Client,
max_fetch: int,
first_fetch: str,
last_run: Dict[str, Any],
filter_by: str = None,
filter_operator: str = None,
filter_value: str = None,
recipient_filter_operator: str = None,
recipient_filter_value: str = None,
) -> tuple:
"""
Fetch Cisco ESA quarantined messages as incidents.
Args:
client (Client): Cisco ESA API client.
max_fetch (int): Max number of messages to fetch.
first_fetch (str): From which time to fetch.
last_run (Dict[str, Any]): Last run info.
filter_by (str, optional): Filter results by message field. Defaults to None.
filter_operator (str, optional): Filter operator. Defaults to None.
filter_value (str, optional): Filter value. Defaults to None.
recipient_filter_operator (str, optional): Recipient filter operator. Defaults to None.
recipient_filter_value (str, optional): Recipient filter value. Defaults to None.
Returns:
tuple: Incidents and last run info.
"""
start_time = last_run.get("start_time")
start_date = (
format_last_run(start_time) if start_time else format_datetime(first_fetch)
)
end_date = format_datetime("now")
quarantine_type = QUARANTINE_TYPE
offset = 0
order_by = "date"
order_dir = "asc"
quarantine_messages: List[Dict[str, Any]] = client.spam_quarantine_message_search_request(
quarantine_type=quarantine_type,
start_date=start_date,
end_date=end_date,
offset=offset,
limit=max_fetch,
filter_by=filter_by,
filter_operator=filter_operator,
filter_value=filter_value,
recipient_filter_operator=recipient_filter_operator,
recipient_filter_value=recipient_filter_value,
order_by=order_by,
order_dir=order_dir,
).get("data", [])
incidents: List[Dict[str, Any]] = []
last_minute_incident_ids = last_run.get("last_minute_incident_ids", [])
for incident in quarantine_messages:
incident_datetime = format_quarantine_timestamp(
dict_safe_get(incident, ["attributes", "date"])
)
message_id = incident.get("mid")
if (
message_id and message_id not in last_minute_incident_ids
and start_date < incident_datetime
):
quarantine_message: Dict[str, Any] = client.spam_quarantine_message_get_request(
quarantine_type=quarantine_type,
message_id=message_id
).get("data", {})
incident_details = dict(
quarantine_message.get("attributes", {}),
**{"mid": quarantine_message.get("mid")},
)
incidents.append(
{
"name": incident_details.get("subject"),
"occurred": incident_datetime,
"rawJSON": json.dumps(incident_details, ensure_ascii=False),
}
)
if incidents:
start_time = incidents[-1].get("occurred")
last_run["start_time"] = start_time
last_run["last_minute_incident_ids"] = [
json.loads(incident.get("rawJSON", {})).get("mid")
for incident in incidents
if incident.get("occurred") == start_time
]
return incidents, last_run
def test_module(client: Client, **kwargs) -> str:
"""
Validates the correctness of the instance parameters and connectivity to Cisco ESA API service.
Args:
client (Client): Cisco ESA API client.
"""
arg_to_datetime(kwargs.get("first_fetch"))
validate_related_arguments(
kwargs,
[
["filter_by", "filter_operator", "filter_value"],
["recipient_filter_operator", "recipient_filter_value"],
],
)
start_date = format_datetime("1 month")
end_date = format_datetime("now")
offset = 0
limit = 1
search_option = "messages"
cisco_host = "All_Hosts"
client.message_search_request(
start_date=start_date,
end_date=end_date,
offset=offset,
limit=limit,
search_option=search_option,
cisco_host=cisco_host,
)
return "ok"
def main() -> None:
params: Dict[str, Any] = demisto.params()
args: Dict[str, Any] = demisto.args()
base_url = params.get("base_url")
username = params.get("credentials", {}).get("identifier")
password = params.get("credentials", {}).get("password")
max_fetch = arg_to_number(params.get("max_fetch", DEFAULT_FETCH))
first_fetch = params.get("first_fetch")
filter_by = params.get("filter_by")
filter_operator = params.get("filter_operator")
filter_value = params.get("filter_value")
recipient_filter_operator = params.get("recipient_filter_operator")
recipient_filter_value = params.get("recipient_filter_value")
verify_certificate: bool = not params.get("insecure", False)
proxy = params.get("proxy", False)
command = demisto.command()
commands = {
"cisco-esa-spam-quarantine-message-search": spam_quarantine_message_search_command,
"cisco-esa-spam-quarantine-message-get": spam_quarantine_message_get_command,
"cisco-esa-spam-quarantine-message-release": spam_quarantine_message_release_command,
"cisco-esa-spam-quarantine-message-delete": spam_quarantine_message_delete_command,
"cisco-esa-list-entry-get": list_entry_get_command,
"cisco-esa-list-entry-add": list_entry_add_command,
"cisco-esa-list-entry-append": list_entry_append_command,
"cisco-esa-list-entry-edit": list_entry_edit_command,
"cisco-esa-list-entry-delete": list_entry_delete_command,
"cisco-esa-message-search": message_search_command,
"cisco-esa-message-details-get": message_details_get_command,
"cisco-esa-message-amp-details-get": message_amp_details_get_command,
"cisco-esa-message-dlp-details-get": message_dlp_details_get_command,
"cisco-esa-message-url-details-get": message_url_details_get_command,
"cisco-esa-report-get": report_get_command,
}
try:
client: Client = Client(
urljoin(base_url, "/esa/api/v2.0"),
username,
password,
verify_certificate,
proxy,
)
if command == "test-module":
return_results(
test_module(
client,
max_fetch=max_fetch,
first_fetch=first_fetch,
filter_by=filter_by,
filter_operator=filter_operator,
filter_value=filter_value,
recipient_filter_operator=recipient_filter_operator,
recipient_filter_value=recipient_filter_value,
)
)
elif command == "fetch-incidents":
incidents, last_run = fetch_incidents(
client,
max_fetch, # type: ignore
first_fetch, # type: ignore
demisto.getLastRun(),
filter_by,
filter_operator,
filter_value,
recipient_filter_operator,
recipient_filter_value,
)
demisto.setLastRun(last_run)
demisto.incidents(incidents)
elif command in commands:
return_results(commands[command](client, args))
else:
raise NotImplementedError(f"{command} command is not implemented.")
except Exception as e:
return_error(str(e))
if __name__ in ["__main__", "builtin", "builtins"]:
main()
| mit | 47e86d13c92163d5cfefb9b5bca45f89 | 32.57417 | 110 | 0.600763 | 3.996728 | false | false | false | false |
demisto/content | Packs/ShiftManagement/Scripts/GetOnCallHoursPerUser/GetOnCallHoursPerUser_test.py | 2 | 2316 | import json
import demistomock as demisto
from GetOnCallHoursPerUser import main
ROLES = [
{
'name': 'Shift1',
'shifts': [
{'fromDay': 0, 'fromHour': 8, 'fromMinute': 0, 'toDay': 3, 'toHour': 12, 'toMinute': 0},
{'fromDay': 4, 'fromHour': 16, 'fromMinute': 0, 'toDay': 6, 'toHour': 20, 'toMinute': 0}
]
},
{
'name': 'Administrator',
'shifts': None
},
{
'name': 'Shift2',
'shifts': [
{'fromDay': 0, 'fromHour': 8, 'fromMinute': 0, 'toDay': 3, 'toHour': 12, 'toMinute': 0},
{'fromDay': 4, 'fromHour': 16, 'fromMinute': 0, 'toDay': 6, 'toHour': 20, 'toMinute': 0},
{'fromDay': 1, 'fromHour': 3, 'fromMinute': 0, 'toDay': 4, 'toHour': 6, 'toMinute': 0}
]
}
]
GET_ROLES_RESPONSE = [{
'Type': 1,
'Contents': ROLES
}]
USERS = [
{
'Type': 1,
'Contents': [
{
'id': 'user1',
'name': 'User1',
'roles': {
'demisto': ['Shift1']
}
},
{
'id': 'user2',
'name': 'User2',
'roles': {
'demisto': ['Shift1']
}
},
{
'id': 'user3',
'name': 'User3',
'roles': {
'demisto': ['Shift2']
}
},
{
'id': 'admin',
'name': 'Admin',
'roles': {
'demisto': ['Administrator']
}
}
]
}
]
def execute_command(name, args=None):
if name == 'getRoles':
return GET_ROLES_RESPONSE
elif name == 'getUsers':
return USERS
else:
return None
def test_get_oncall_hours_per_user(mocker):
mocker.patch.object(demisto, 'executeCommand', side_effect=execute_command)
mocker.patch.object(demisto, 'results')
main()
results = demisto.results.call_args[0]
assert len(results) == 1
assert json.loads(results[0]) == [
{'data': [128], 'name': 'User1'},
{'data': [128], 'name': 'User2'},
{'data': [0], 'name': 'Admin'},
{'data': [203], 'name': 'User3'}
]
| mit | 8042995753f7122d12d87a7c72332d63 | 24.733333 | 101 | 0.412349 | 3.472264 | false | false | false | false |
demisto/content | Packs/CommonScripts/Scripts/MarkAsNoteByTag/MarkAsNoteByTag.py | 2 | 1144 | import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
def mark_as_note(entries):
if isError(entries[0]):
demisto.results(
{'Type': entryTypes['error'], 'ContentsFormat': formats['text'], 'Contents': 'Unable to retrieve entries'})
else:
ids = ''
for e in entries:
tags = e.get('Metadata', {}).get('tags')
if not tags:
tags = []
if demisto.getArg('tag') in tags:
if ids == '':
ids = e['Metadata']['id']
else:
ids += ',' + e['Metadata']['id']
if ids != '':
demisto.results(demisto.executeCommand('markAsNote', {'entryIDs': ids}))
else:
demisto.results({'Type': entryTypes['error'], 'ContentsFormat': formats['text'],
'Contents': "No entries with '" + demisto.getArg('tag') + "' found"})
def main():
entries = demisto.executeCommand('getEntries', {})
mark_as_note(entries)
if __name__ in ('__main__', '__builtin__', 'builtins'): # pragma: no cover
main()
| mit | f33ac595ba7b2c8ee5cedd4f45ec0400 | 33.666667 | 119 | 0.51049 | 4.071174 | false | false | false | false |
demisto/content | Packs/CommonScripts/Scripts/RemoveKeyFromList/RemoveKeyFromList.py | 2 | 2173 | """RemoveKeyFromList
Removes a Key from a JSON-backed List
"""
import demistomock as demisto
from CommonServerPython import * # noqa # pylint: disable=unused-wildcard-import
from CommonServerUserPython import * # noqa
from typing import Dict, Any
import traceback
''' STANDALONE FUNCTION '''
def remove_key_from_list(list_name: str, key_name: str) -> str:
res = demisto.executeCommand('getList', {'listName': list_name})
if (
not isinstance(res, list)
or 'Contents' not in res[0]
or not isinstance(res[0]['Contents'], str)
or res[0]['Contents'] == 'Item not found (8)'
):
raise ValueError(f'Cannot retrieve list {list_name}')
list_data: Dict = {}
data: str = res[0]['Contents']
if data and len(data) > 0:
try:
list_data = json.loads(data)
except json.decoder.JSONDecodeError as e:
raise ValueError(f'List does not contain valid JSON data: {e}')
elem = list_data.pop(key_name, None)
if not elem:
return f'Key {key_name} not found in list {list_name}, cannot remove.'
demisto.executeCommand('setList', {'listName': list_name, 'listData': json.dumps(list_data)})
return f'Successfully removed key {key_name} from list {list_name}.'
''' COMMAND FUNCTION '''
def remove_key_from_list_command(args: Dict[str, Any]) -> CommandResults:
list_name = args.get('listName', None)
if not list_name:
raise ValueError('listName must be specified')
key_name = args.get('keyName', None)
if not key_name:
raise ValueError('keyName must be specified')
# Call the standalone function and get the raw response
result = remove_key_from_list(list_name, key_name)
return CommandResults(
readable_output=result
)
''' MAIN FUNCTION '''
def main():
try:
return_results(remove_key_from_list_command(demisto.args()))
except Exception as ex:
demisto.error(traceback.format_exc()) # print the traceback
return_error(f'Failed to execute RemoveKeyFromList. Error: {str(ex)}')
''' ENTRY POINT '''
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | 7351a7d1257b8d1acb64adce3aa9799b | 26.858974 | 97 | 0.64381 | 3.621667 | false | false | false | false |
demisto/content | Tests/scripts/create_artifacts_graph/create_artifacts.py | 2 | 2802 | from argparse import ArgumentParser
from pathlib import Path
from demisto_sdk.commands.content_graph.interface.neo4j.neo4j_graph import Neo4jContentGraphInterface
from demisto_sdk.commands.common.constants import MarketplaceVersions
from demisto_sdk.commands.content_graph.objects.repository import ContentDTO
from Tests.scripts.utils.log_util import install_logging
import logging as logger
from demisto_sdk.commands.common.logger import logging_setup
from demisto_sdk.commands.common.tools import get_content_path
import json
logging_setup(3)
install_logging("create_artifacts.log", logger=logger)
def create_zips(content_dto: ContentDTO, output: Path, marketplace: str, zip: bool):
content_dto.dump(output, marketplace, zip)
def create_dependencies(content_dto: ContentDTO, output: Path):
pack_dependencies = {}
for pack in content_dto.packs:
dependencies = pack.depends_on
first_level_dependencies = {}
all_level_dependencies = []
for dependency in dependencies:
all_level_dependencies.append(dependency.content_item.object_id)
if dependency.is_direct:
first_level_dependencies[dependency.content_item.object_id] = {
"display_name": dependency.content_item.name,
"mandatory": dependency.mandatorily,
}
pack_dependencies[pack.object_id] = {
"path": str(pack.path.relative_to(get_content_path())),
"fullPath": str(pack.path),
"dependencies": first_level_dependencies,
"displayedImages": list(first_level_dependencies.keys()),
"allLevelDependencies": all_level_dependencies,
}
with open(output, "w") as f:
json.dump(pack_dependencies, f, indent=4)
def main():
parser = ArgumentParser()
parser.add_argument("-mp", "--marketplace", type=MarketplaceVersions, help="marketplace version", default="xsoar")
parser.add_argument("-ao", "--artifacts-output", help="Artifacts output directory", required=True)
parser.add_argument("-do", "--dependencies-output", help="Dependencies output directory", required=True)
parser.add_argument("--zip", default=True, action="store_true")
parser.add_argument("--no-zip", dest="zip", action="store_false")
args = parser.parse_args()
with Neo4jContentGraphInterface() as interface:
content_dto: ContentDTO = interface.marshal_graph(args.marketplace, all_level_dependencies=True)
logger.info("Creating content artifacts zips")
create_zips(content_dto, Path(args.artifacts_output), args.marketplace, args.zip)
logger.info("Creating pack dependencies mapping")
create_dependencies(content_dto, Path(args.dependencies_output))
if __name__ == "__main__":
main()
| mit | 0ffcc1afe4957a6eabc17f24ca0ffe7d | 42.78125 | 118 | 0.697359 | 3.902507 | false | false | false | false |
demisto/content | Packs/QRadar/Scripts/QRadarPrintAssets/QRadarPrintAssets.py | 2 | 1126 | import json
import demistomock as demisto # noqa: F401
import yaml
from CommonServerPython import * # noqa: F401
def main():
try:
incident = demisto.incident()
assets = incident.get('CustomFields', {}).get('assettable', {})
if not assets:
return ''
if not isinstance(assets, dict):
assets = json.loads(assets)
if not isinstance(assets, list):
assets = [assets]
for asset in assets:
if "interfaces" in asset:
if isinstance(asset["interfaces"], str):
asset["interfaces"] = json.loads(asset["interfaces"])
# using yaml to prettify the output of the field
asset["interfaces"] = yaml.dump(asset["interfaces"])
markdown = tableToMarkdown("Asset Table", assets)
return {'ContentsFormat': formats['markdown'], 'Type': entryTypes['note'], 'Contents': markdown}
except Exception as exp:
return_error('could not parse QRadar assets', error=exp)
if __name__ in ('__main__', '__builtin__', 'builtins'):
return_results(main())
| mit | 02a98023483d47a32e6d0409060e7f76 | 29.432432 | 104 | 0.587034 | 4.398438 | false | false | false | false |
demisto/content | Packs/ThreatGrid/Integrations/FeedCiscoSecureMalwareAnalytics/FeedCiscoSecureMalwareAnalytics_test.py | 2 | 3414 | from FeedCiscoSecureMalwareAnalytics import Client, fetch_indicators, fetch_indicators_command, \
create_entity_relationships
from CommonServerPython import *
from test_data.feed_data import banking_dns_response, sinkholed_ip_dns_response
def test_fetch_indicators(requests_mock):
"""Unit test
Given
- fetch incidents command
- feed name
- command raw response
When
- run the fetch indicators command.
Then
- Validate creates indicators and unifies if they are the same
"""
first_fetch = arg_to_datetime(arg='today', arg_name='First fetch')
client = Client(api_key='1234', verify=False, feed_name=['sinkholed-ip-dns'], first_fetch=first_fetch,
tlp_color="", feed_tags="", create_relationships=True)
requests_mock.get(
f'https://panacea.threatgrid.com/api/v3/feeds/sinkholed-ip-dns.json?api_key={client._api_key}',
json=sinkholed_ip_dns_response,
)
indicators, status = fetch_indicators(client, None)
assert len(indicators) == 15
def test_fetch_indicators_command_list(requests_mock, mocker):
"""Unit test
Given
- fetch incidents command
- list of feed names
- command raw response
When
- run the fetch indicators command with 2 feed names.
Then
- Validate creates indicators and unifies if they are the same
_ Validate that the fields: 'FeedRelatedIndicators' and 'tags' have been updated properly
"""
first_fetch = arg_to_datetime(arg='today', arg_name='First fetch')
client = Client(
api_key='1234', verify=False, feed_name=['sinkholed-ip-dns', 'banking-dns'], first_fetch=first_fetch,
tlp_color="", feed_tags="", create_relationships=True)
requests_mock.get(
f'https://panacea.threatgrid.com/api/v3/feeds/sinkholed-ip-dns.json?api_key={client._api_key}',
json=sinkholed_ip_dns_response,
)
requests_mock.get(
f'https://panacea.threatgrid.com/api/v3/feeds/banking-dns.json?api_key={client._api_key}',
json=banking_dns_response,
)
a = mocker.patch.object(demisto, "createIndicators")
fetch_indicators_command(client)
fetch_indicators_command(client)
for indicator in a.call_args.args[0]:
if indicator['value'] == 'Example1.com':
assert len(indicator['fields']['FeedRelatedIndicators']) == 14
assert len(indicator['relationships']) == 14
assert len(indicator['fields']['Tags']) == 2
if indicator['value'] == 'Example3.com':
assert len(indicator['fields']['FeedRelatedIndicators']) == 4
assert len(indicator['relationships']) == 4
assert len(indicator['fields']['Tags']) == 1
assert len(a.call_args.args[0]) == 28
def test_create_entity_relationships():
"""
Given
- indicator domain name
- related indicators
When
- run the fetch incidents command
Then
- Validate created relationships
"""
domain_name = "test.com"
relevant_indicators = [
{
'type': 'IP',
'value': '1.1.1.1'
}
]
relationships = create_entity_relationships(relevant_indicators, domain_name)
assert relationships[0].get('entityB') == '1.1.1.1'
assert relationships[0].get('entityBType') == 'IP'
assert relationships[0].get('entityA') == 'test.com'
assert relationships[0].get('entityAType') == 'Domain'
| mit | b87d81fbaad0749aa41feacb45d402bc | 34.5625 | 109 | 0.650557 | 3.659164 | false | true | false | false |
demisto/content | Packs/Cybersixgill-DVE/Integrations/CybersixgillDVEEnrichment/CybersixgillDVEEnrichment_test.py | 2 | 21133 | import requests
import pytest
import json
import demistomock as demisto
cve_enrich = {
"created": "2020-06-26T00:00:00.001000Z",
"description": "A vulnerability exists that could allow the execution of unauthorized code or operating "
"system commands on systems running exacqVision Web Service versions 20.06.3.0 and prior "
"and exacqVision Enterprise Manager versions 20.06.4.0 and prior. An attacker with "
"administrative privileges could potentially download and run a malicious executable "
"that could allow OS command injection on the system.",
"external_references": [{"external_id": "CVE-2020-9047", "source_name": "cve"}],
"id": "vulnerability--143fb02c-accf-947e-4619-e0befa4e7068",
"last_activity_date": "2021-03-28T02:05:19Z",
"name": "CVE-2020-9047",
"type": "vulnerability",
"x_sixgill_info": {
"attributes": [
{
"description": "This CVE was mentioned at least once by the actor Metasploit",
"name": "Metasploit_attribute",
"value": False,
},
{
"description": "This CVE is currently recently trending in the cyber Underground",
"name": "Is_Trend_Underground_attribute",
"value": False,
},
{
"description": "This CVE scanned at least once by hacktivism collective “Anonymous”",
"name": "Is_Scanned_by_Anonymous_attribute",
"value": False,
},
{
"description": "This CVE is currently trending in the Chinese Underground",
"name": "Is_Trend_Chinese_attribute",
"value": False,
},
{
"description": "This CVE has at least one published Proof of Concept (POC) exploit.",
"name": "Has_POC_exploit_attribute",
"value": False,
},
{
"description": "The CVE is part of an at least one exploit kit",
"name": "Has_Exploit_kit_attribute",
"value": False,
},
{
"description": "This CVE is currently trending in the Russian Underground",
"name": "Is_Trend_Russian_attribute",
"value": False,
},
{
"description": "This CVE is currently trending in the Arab Underground",
"name": "Is_Trend_Arabic_attribute",
"value": False,
},
{
"description": "This CVE is currently trending in the Farsi Underground",
"name": "Is_Trend_Farsi_attribute",
"value": False,
},
{
"description": "This CVE is currently trending on GitHub",
"name": "Is_Trend_GitHub_General_attribute",
"value": False,
},
{
"description": "This CVE is currently trending on Twitter",
"name": "Is_Trend_Twitter_attribute",
"value": False,
},
],
"github": {
"activity": {"first_date": "2020-06-26T12:46:26Z", "last_date": "2021-03-28T02:05:19Z"},
"github_forks": 5,
"github_projects": 2,
"github_watchers": 38,
"projects": [
{"link": "https://github.com/xqx12/daily-info", "name": "xqx12/daily-info"},
{"link": "https://github.com/norrismw/CVE-2020-9047", "name": "norrismw/CVE-2020-9047"},
],
},
"mentions": {
"first_mention": "2019-12-08T13:03:54Z",
"last_mention": "2021-01-02T22:19:18Z",
"mentions_total": 17,
},
"nvd": {
"configurations": {
"nodes": [
{
"cpe_match": [
{
"cpe23Uri": "cpe:2.3:a:exacq:exacq:*:*:*:*:*:*:*:*",
"versionEndIncluding": "20.06.3.0",
"vulnerable": True,
},
{
"cpe23Uri": "cpe:2.3:a:exacq:exacqvision_enterprise_manager:*:*:*:*:*:*:*:*",
"versionEndIncluding": "20.06.4.0",
"vulnerable": True,
},
],
"operator": "OR",
}
],
"version": "4.0",
},
"link": "https://nvd.nist.gov/vuln/detail/CVE-2020-9047",
"modified": "2020-08-17T17:43:00Z",
"published": "2020-06-26T19:15:00Z",
"v2": {
"accessVector": "NETWORK",
"attackComplexity": None,
"attackVector": None,
"authentication": "SINGLE",
"availabilityImpact": "COMPLETE",
"confidentialityImpact": "COMPLETE",
"current": 9.0,
"exploitabilityScore": 8.0,
"impactScore": 10.0,
"integrityImpact": "COMPLETE",
"obtainAllPrivilege": False,
"obtainOtherPrivilege": False,
"obtainUserPrivilege": False,
"privilegesRequired": None,
"severity": "HIGH",
"userInteraction": None,
"userInteractionRequired": False,
"vector": "AV:N/AC:L/Au:S/C:C/I:C/A:C",
},
"v3": {
"accessVector": None,
"attackComplexity": "LOW",
"attackVector": "NETWORK",
"authentication": None,
"availabilityImpact": "HIGH",
"confidentialityImpact": "HIGH",
"current": 7.2,
"exploitabilityScore": 1.2,
"impactScore": 5.9,
"integrityImpact": "HIGH",
"privilegesRequired": "HIGH",
"severity": "HIGH",
"userInteraction": "NONE",
"vector": "CVSS:3.1/AV:N/AC:L/PR:H/UI:N/S:U/C:H/I:H/A:H",
},
},
"score": {
"current": 4.83,
"highest": {"date": "2020-07-14T00:00:00Z", "value": 8.21},
"history": [
{"current": 4.83, "date": "2021-04-01T00:00:00Z", "previouslyExploited": 0.0},
{"current": 4.83, "date": "2021-03-01T00:00:00Z", "previouslyExploited": 0.0},
{"current": None, "date": "2021-02-01T00:00:00Z", "previouslyExploited": 0.0},
{"current": None, "date": "2021-01-01T00:00:00Z", "previouslyExploited": 0.0},
{"current": None, "date": "2020-12-01T00:00:00Z", "previouslyExploited": 0.0},
{"current": None, "date": "2020-11-01T00:00:00Z", "previouslyExploited": 0.0},
{"current": 4.59, "date": "2020-10-01T00:00:00Z", "previouslyExploited": 0.0},
{"current": 6.72, "date": "2020-09-01T00:00:00Z", "previouslyExploited": 0.0},
{"current": 6.74, "date": "2020-08-01T00:00:00Z", "previouslyExploited": 0.0},
{"current": 7.1, "date": "2020-07-01T00:00:00Z", "previouslyExploited": 0.55},
{"current": 5.74, "date": "2020-06-01T00:00:00Z", "previouslyExploited": 1.3},
],
"previouslyExploited": 0.0,
},
},
}
expected_enrich_output = [
{
'value': 'CVE-2020-9047',
'Description': 'A vulnerability exists that could allow the execution of unauthorized code or operating '
'system commands on systems running exacqVision Web Service versions 20.06.3.0 and prior '
'and exacqVision Enterprise Manager versions 20.06.4.0 and prior. '
'An attacker with administrative privileges could potentially download and '
'run a malicious executable that could allow OS command injection on the system.',
'Created': '2020-06-26T00:00:00.001000Z',
'Modified': '',
'Cybersixgill_DVE_score_current': 4.83,
'Cybersixgill_DVE_score_highest_ever_date': '2020-07-14T00:00:00Z',
'Cybersixgill_DVE_score_highest_ever': 8.21,
'Cybersixgill_Previously_exploited_probability': 0.0,
'Previous_Level': '',
'CVSS_3_1_score': 1.2,
'CVSS_3_1_severity': 'HIGH',
'NVD_Link': 'https://nvd.nist.gov/vuln/detail/CVE-2020-9047',
'NVD_last_modified_date': '2020-08-17T17:43:00Z',
'NVD_publication_date': '2020-06-26T19:15:00Z',
'CVSS_2_0_score': 8.0,
'CVSS_2_0_severity': 'HIGH',
'NVD_Vector_V2_0': 'AV:N/AC:L/Au:S/C:C/I:C/A:C',
'NVD_Vector_V3_1': 'CVSS:3.1/AV:N/AC:L/PR:H/UI:N/S:U/C:H/I:H/A:H',
'rawJSON': {
'created': '2020-06-26T00:00:00.001000Z',
'description': 'A vulnerability exists that could allow the execution of unauthorized '
'code or operating system commands on systems running exacqVision Web '
'Service versions 20.06.3.0 and prior and exacqVision Enterprise Manager '
'versions 20.06.4.0 and prior. An attacker with administrative '
'privileges could potentially download and run a malicious executable '
'that could allow OS command injection on the system.',
'external_references': [{
'external_id': 'CVE-2020-9047',
'source_name': 'cve'
}],
'id': 'vulnerability--143fb02c-accf-947e-4619-e0befa4e7068',
'last_activity_date': '2021-03-28T02:05:19Z',
'name': 'CVE-2020-9047',
'type': 'vulnerability',
'x_sixgill_info': {
'attributes': [{
'description': 'This CVE was mentioned at least once by the actor Metasploit',
'name': 'Metasploit_attribute',
'value': False
}, {
'description': 'This CVE is currently recently trending in the cyber Underground',
'name': 'Is_Trend_Underground_attribute',
'value': False
}, {
'description': 'This CVE scanned at least once by hacktivism collective “Anonymous”',
'name': 'Is_Scanned_by_Anonymous_attribute',
'value': False
}, {
'description': 'This CVE is currently trending in the Chinese Underground',
'name': 'Is_Trend_Chinese_attribute',
'value': False
}, {
'description': 'This CVE has at least one published Proof of Concept (POC) exploit.',
'name': 'Has_POC_exploit_attribute',
'value': False
}, {
'description': 'The CVE is part of an at least one exploit kit',
'name': 'Has_Exploit_kit_attribute',
'value': False
}, {
'description': 'This CVE is currently trending in the Russian Underground',
'name': 'Is_Trend_Russian_attribute',
'value': False
}, {
'description': 'This CVE is currently trending in the Arab Underground',
'name': 'Is_Trend_Arabic_attribute',
'value': False
}, {
'description': 'This CVE is currently trending in the Farsi Underground',
'name': 'Is_Trend_Farsi_attribute',
'value': False
}, {
'description': 'This CVE is currently trending on GitHub',
'name': 'Is_Trend_GitHub_General_attribute',
'value': False
}, {
'description': 'This CVE is currently trending on Twitter',
'name': 'Is_Trend_Twitter_attribute',
'value': False
}],
'github': {
'activity': {
'first_date': '2020-06-26T12:46:26Z',
'last_date': '2021-03-28T02:05:19Z'
},
'github_forks': 5,
'github_projects': 2,
'github_watchers': 38,
'projects': [{
'link': 'https://github.com/xqx12/daily-info',
'name': 'xqx12/daily-info'
}, {
'link': 'https://github.com/norrismw/CVE-2020-9047',
'name': 'norrismw/CVE-2020-9047'
}]
},
'mentions': {
'first_mention': '2019-12-08T13:03:54Z',
'last_mention': '2021-01-02T22:19:18Z',
'mentions_total': 17
},
'nvd': {
'configurations': {
'nodes': [{
'cpe_match': [{
'cpe23Uri': 'cpe:2.3:a:exacq:exacq:*:*:*:*:*:*:*:*',
'versionEndIncluding': '20.06.3.0',
'vulnerable': True
}, {
'cpe23Uri': 'cpe:2.3:a:exacq:exacqvision_enterprise_manager:*:*:*:*:*:*:*:*',
'versionEndIncluding': '20.06.4.0',
'vulnerable': True
}],
'operator': 'OR'
}],
'version': '4.0'
},
'link': 'https://nvd.nist.gov/vuln/detail/CVE-2020-9047',
'modified': '2020-08-17T17:43:00Z',
'published': '2020-06-26T19:15:00Z',
'v2': {
'accessVector': 'NETWORK',
'attackComplexity': None,
'attackVector': None,
'authentication': 'SINGLE',
'availabilityImpact': 'COMPLETE',
'confidentialityImpact': 'COMPLETE',
'current': 9.0,
'exploitabilityScore': 8.0,
'impactScore': 10.0,
'integrityImpact': 'COMPLETE',
'obtainAllPrivilege': False,
'obtainOtherPrivilege': False,
'obtainUserPrivilege': False,
'privilegesRequired': None,
'severity': 'HIGH',
'userInteraction': None,
'userInteractionRequired': False,
'vector': 'AV:N/AC:L/Au:S/C:C/I:C/A:C'
},
'v3': {
'accessVector': None,
'attackComplexity': 'LOW',
'attackVector': 'NETWORK',
'authentication': None,
'availabilityImpact': 'HIGH',
'confidentialityImpact': 'HIGH',
'current': 7.2,
'exploitabilityScore': 1.2,
'impactScore': 5.9,
'integrityImpact': 'HIGH',
'privilegesRequired': 'HIGH',
'severity': 'HIGH',
'userInteraction': 'NONE',
'vector': 'CVSS:3.1/AV:N/AC:L/PR:H/UI:N/S:U/C:H/I:H/A:H'
}
},
'score': {
'current': 4.83,
'highest': {
'date': '2020-07-14T00:00:00Z',
'value': 8.21
},
'history': [{
'current': 4.83,
'date': '2021-04-01T00:00:00Z',
'previouslyExploited': 0.0
}, {
'current': 4.83,
'date': '2021-03-01T00:00:00Z',
'previouslyExploited': 0.0
}, {
'current': None,
'date': '2021-02-01T00:00:00Z',
'previouslyExploited': 0.0
}, {
'current': None,
'date': '2021-01-01T00:00:00Z',
'previouslyExploited': 0.0
}, {
'current': None,
'date': '2020-12-01T00:00:00Z',
'previouslyExploited': 0.0
}, {
'current': None,
'date': '2020-11-01T00:00:00Z',
'previouslyExploited': 0.0
}, {
'current': 4.59,
'date': '2020-10-01T00:00:00Z',
'previouslyExploited': 0.0
}, {
'current': 6.72,
'date': '2020-09-01T00:00:00Z',
'previouslyExploited': 0.0
}, {
'current': 6.74,
'date': '2020-08-01T00:00:00Z',
'previouslyExploited': 0.0
}, {
'current': 7.1,
'date': '2020-07-01T00:00:00Z',
'previouslyExploited': 0.55
}, {
'current': 5.74,
'date': '2020-06-01T00:00:00Z',
'previouslyExploited': 1.3
}],
'previouslyExploited': 0.0
}
}
}
}
]
mock_response = ""
mocked_get_token_response = """{"access_token": "fababfafbh"}"""
args = {"cve_id": "CVE-2020-9047"}
channel_code = "d5cd46c205c20c87006b55a18b106428"
class MockedResponse(object):
def __init__(self, status_code, text, reason=None, url=None, method=None):
self.status_code = status_code
self.text = text
self.reason = reason
self.url = url
self.request = requests.Request("GET")
self.ok = True if self.status_code == 200 else False
def json(self):
return json.loads(self.text)
def init_params():
return {"client_id": "WRONG_CLIENT_ID_TEST", "client_secret": "CLIENT_SECRET_TEST"}
def mocked_request(*args, **kwargs):
global mock_response
request = kwargs.get("request", {})
end_point = request.path_url
method = request.method
mock_response = json.dumps(cve_enrich)
response_dict = {
"POST": {"/auth/token": MockedResponse(200, mocked_get_token_response)},
"GET": {"/dve_enrich/CVE-2020-9047": MockedResponse(200, mock_response)},
}
response_dict = response_dict.get(method)
response = response_dict.get(end_point)
return response
def test_test_module_command_raise_exception(mocker):
mocker.patch.object(demisto, "params", return_value=init_params())
mocker.patch("requests.sessions.Session.send", return_value=MockedResponse(400, "error"))
from CybersixgillDVEEnrichment import test_module
with pytest.raises(Exception):
test_module()
def test_test_module_command(mocker):
mocker.patch.object(demisto, "params", return_value=init_params())
mocker.patch("requests.sessions.Session.send", return_value=MockedResponse(200, "ok"))
from CybersixgillDVEEnrichment import test_module
test_module(
demisto.params()["client_id"], demisto.params()["client_secret"], channel_code, requests.Session(), True
)
def test_stix_to_indicator(mocker):
mocker.patch.object(demisto, "params", return_value=init_params())
mocker.patch("requests.sessions.Session.send", new=mocked_request)
from CybersixgillDVEEnrichment import stix_to_indicator
output = []
cve_data = stix_to_indicator(cve_enrich)
output.append(cve_data)
assert output == expected_enrich_output
def test_cve_enrich_command(mocker):
mocker.patch.object(demisto, "params", return_value=init_params())
mocker.patch.object(demisto, "args", return_value=args)
mocker.patch("requests.sessions.Session.send", new=mocked_request)
from CybersixgillDVEEnrichment import cve_enrich_command
from sixgill.sixgill_enrich_client import SixgillEnrichClient
client = SixgillEnrichClient(
demisto.params()["client_id"], demisto.params()["client_secret"], channel_code, demisto
)
output = cve_enrich_command(client, demisto.args())
assert output[0].outputs == expected_enrich_output
| mit | 2e55478549d57515bb9f8673fd54d491 | 43.102296 | 113 | 0.466982 | 3.904084 | false | false | false | false |
demisto/content | Packs/Nmap/Integrations/Nmap/Nmap.py | 2 | 2589 | import demistomock as demisto
from CommonServerPython import *
from libnmap.process import NmapProcess
from libnmap.parser import NmapParser
from libnmap.reportjson import ReportEncoder
if demisto.command() == 'test-module':
demisto.results('ok')
sys.exit(0)
if demisto.command() == 'nmap-scan':
nm = NmapProcess(argToList(demisto.args()['targets']), options=demisto.args()['options'])
rc = nm.run()
if rc != 0:
demisto.results({
'Type': entryTypes['error'],
'ContentsFormat': formats['text'],
'Contents': 'Unable to execute - ' + nm.stderr
})
sys.exit(0)
r = NmapParser.parse(nm.stdout)
md = '## ' + r.summary + '\n'
hosts = []
try:
scan_type = r.scan_type
except KeyError:
scan_type = None
for host in r.hosts:
h = {}
if len(host.hostnames):
tmp_host = host.hostnames.pop()
h['Hostname'] = tmp_host
else:
tmp_host = host.address
h['Address'] = host.address
h['Status'] = host.status
svc = []
md += "### Nmap scan report for {0}".format(tmp_host) + \
(" ({0})\n".format(host.address) if tmp_host != host.address else "\n")
md += "#### Host is {0}.\n".format(host.status)
for serv in host.services:
svc.append({
'Port': serv.port,
'Protocol': serv.protocol,
'State': serv.state,
'Service': serv.service,
'Banner': serv.banner
})
extras = []
for hostscript in host._extras.get('hostscript', []):
extras.append({
'ID': hostscript.get('id'),
'Output': hostscript.get('output'),
'Elements': hostscript.get('elements'),
})
md += tableToMarkdown('Services', svc, ['Port', 'Protocol', 'State', 'Service', 'Banner'])
md += tableToMarkdown('Script Results', extras, ['ID', 'Output', 'Elements'])
h['Services'] = svc
h['ScriptResults'] = extras
hosts.append(h)
scan = {
'Summary': r.summary,
'Version': r.version,
'Started': r.started,
'Ended': r.endtime,
'CommandLine': r.commandline,
'ScanType': scan_type,
'Hosts': hosts}
demisto.results({
'ContentsFormat': formats['json'],
'Type': entryTypes['note'],
'Contents': json.dumps(r, cls=ReportEncoder),
'HumanReadable': md,
'EntryContext': {'NMAP.Scan': scan}
})
| mit | 42267751f93074624afd05f1b1e10174 | 32.192308 | 98 | 0.527231 | 3.829882 | false | false | false | false |
demisto/content | Packs/Binalyze/Integrations/BinalyzeAIR/BinalyzeAIR.py | 2 | 6625 | import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
from typing import Dict, Any
import requests
requests.packages.urllib3.disable_warnings()
class Client(BaseClient):
def test_api(self):
return self._http_request(
method='GET',
url_suffix='/api/public/endpoints?filter[organizationIds]=0'
)
def air_acquire(self, hostname: str, profile: str, case_id: str, organization_id: int) -> Dict[str, str]:
''' Makes a POST request /api/public/acquisitions/acquire endpoint to verify acquire evidence
:param hostname str: endpoint hostname to start acquisition.
:param profile str: predefined 5 acquisiton profile name.
:param case_id str: The Case ID to associate with in AIR Server.
:param organization_id int: Organizsation ID of the endpoint.
Create a payload with the parameters
:return JSON response from /api/app/info endpoint
:rtype Dict[str, Any]
'''
payload: Dict[str, Any] = {
"caseId": case_id,
"droneConfig": {
"autoPilot": False,
"enabled": False
},
"taskConfig": {
"choice": "use-policy"
},
"acquisitionProfileId": profile,
"filter": {
"name": hostname,
"organizationIds": [organization_id]
}
}
return self._http_request(
method='POST',
url_suffix='/api/public/acquisitions/acquire',
json_data=payload
)
def air_isolate(self, hostname: str, organization_id: int, isolation: str) -> Dict[str, str]:
''' Makes a POST request /api/public/acquisitions/acquire endpoint to verify acquire evidence
:param hostname str: endpoint hostname to start acquisition.
:param isolation str: To isolate enable, to disable isolate use disable
:param organization_id int: Organization ID of the endpoint.
Create a payload with the parameters
:return JSON response from /api/public/endpoints/tasks/isolation endpoint
:rtype Dict[str, Any]
'''
payload: Dict[str, Any] = {
"enabled": True,
"filter": {
"name": hostname,
"organizationIds": [organization_id]
}
}
if isolation == 'disable':
disable = {"enabled": False}
payload.update(disable)
return self._http_request(
method='POST',
url_suffix='/api/public/endpoints/tasks/isolation',
json_data=payload
)
def test_connection(client: Client) -> str:
'''Command for test-connection'''
try:
client.test_api()
except DemistoException as ex:
if 'Unauthorized' in str(ex):
return demisto.results(f'Authorization Error: Make sure API Key is correctly set.{str(ex)}')
if 'ConnectionError' in str(ex):
return demisto.results(f'Connection Error: Test connection failed. {str(ex)}')
else:
raise ex
return demisto.results('ok')
def air_acquire_command(client: Client, args: Dict[str, Any]) -> CommandResults:
'''Command handler for acquire command'''
hostname = args.get('hostname', '')
profile = args.get('profile', '')
case_id = args.get('case_id', '')
organization_id = args.get('organization_id', '')
result: Dict[str, Any] = client.air_acquire(hostname, profile, case_id, organization_id)
readable_output = tableToMarkdown('Binalyze AIR Isolate Results', result,
headers=('success', 'result', 'statusCode', 'errors'),
headerTransform=string_to_table_header)
if result.get('statusCode') == 404:
return CommandResults(readable_output='No contex for queried hostname.')
return CommandResults(
outputs_prefix='BinalyzeAIR.Acquisition',
outputs_key_field='hostname',
outputs={
'Result': result['result'],
'Success': result['success']
},
readable_output=readable_output,
)
def air_isolate_command(client: Client, args: Dict[str, Any]) -> CommandResults:
''' Command handler isolate '''
hostname = args.get('hostname', '')
organization_id = args.get('organization_id', '')
isolation = args.get('isolation', '')
result: Dict[str, Any] = client.air_isolate(hostname, organization_id, isolation)
readable_output = tableToMarkdown('Binalyze AIR Isolate Results', result,
headers=('success', 'result', 'statusCode', 'errors'),
headerTransform=string_to_table_header)
if result.get('statusCode') == 404:
return CommandResults(readable_output='No contex for queried hostname.')
return CommandResults(
outputs_prefix='BinalyzeAIR.Isolate',
outputs_key_field='hostname',
outputs={
'Result': result['result'],
'Success': result['success']
},
readable_output=readable_output,
)
''' Entrypoint '''
def main() -> None: # pragma: no cover
api_key: str = demisto.params().get('api_key')
base_url: str = demisto.params()['server']
verify_certificate: bool = not demisto.params().get('insecure', False)
proxy: bool = demisto.params().get('proxy', False)
command: str = demisto.command()
args: Dict[str, Any] = demisto.args()
headers: Dict[str, Any] = {
'Authorization': f'Bearer {api_key}',
'User-Agent': 'Binalyze AIR',
'Content-type': 'application/json',
'Accept-Charset': 'UTF-8'
}
try:
demisto.debug(f'Command being called is {demisto.command()}')
client: Client = Client(
base_url=base_url,
verify=verify_certificate,
headers=headers,
proxy=proxy,
ok_codes=(404, 200)
)
if command == 'test-module':
return_results(test_connection(client))
elif command == 'binalyze-air-acquire':
return_results(air_acquire_command(client, args))
elif command == 'binalyze-air-isolate':
return_results(air_isolate_command(client, args))
except Exception as ex:
demisto.error(traceback.format_exc()) # print the traceback
return_error(f'Failed to execute "{command}". Error: {str(ex)}')
if __name__ in ('__main__', '__builtin__', 'builtins'): # pragma: no cover
main()
| mit | 267828e430566291fc8607e42c3e5076 | 34.810811 | 109 | 0.589736 | 4.161432 | false | false | false | false |
demisto/content | Packs/AbuseDB/Integrations/AbuseDB/AbuseDB.py | 2 | 11031 | import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
''' IMPORTS '''
import csv
import os
import requests
# disable insecure warnings
requests.packages.urllib3.disable_warnings()
''' GLOBALS '''
VERBOSE = True
SERVER = demisto.params().get('server')
if not SERVER.endswith('/'):
SERVER += '/'
API_KEY = demisto.params().get('apikey')
MAX_AGE = demisto.params().get('days')
THRESHOLD = demisto.params().get('threshold')
INSECURE = demisto.params().get('insecure')
TEST_IP = "127.0.0.2"
BLACKLIST_SCORE = 3
CHECK_CMD = "check"
CHECK_BLOCK_CMD = "check-block"
REPORT_CMD = "report"
BLACKLIST_CMD = 'blacklist'
ANALYSIS_TITLE = "AbuseIPDB Analysis"
BLACKLIST_TITLE = "AbuseIPDB Blacklist"
REPORT_SUCCESS = "IP address reported successfully."
API_QUOTA_REACHED_MESSAGE = 'Too many requests (possibly bad API key). Status code: 429'
HEADERS = {
'Key': API_KEY,
'Accept': 'application/json'
}
PROXY = demisto.params().get('proxy')
if not demisto.params().get('proxy', False):
del os.environ['HTTP_PROXY']
del os.environ['HTTPS_PROXY']
del os.environ['http_proxy']
del os.environ['https_proxy']
CATEGORIES_NAME = {
1: 'DNS_Compromise',
2: 'DNS_Poisoning',
3: 'Frad_Orders',
4: 'DDoS_Attack',
5: 'FTP_Brute-Force',
6: 'Ping of Death',
7: 'Phishing',
8: 'Fraud VoIP',
9: 'Open_Proxy',
10: 'Web_Spam',
11: 'Email_Spam',
12: 'Blog_Spam',
13: 'VPN IP',
14: 'Port_Scan',
15: 'Hacking',
16: 'SQL Injection',
17: 'Spoofing',
18: 'Brute_Force',
19: 'Bad_Web_Bot',
20: 'Exploited_Host',
21: 'Web_App_Attack',
22: 'SSH',
23: 'IoT_Targeted'
}
CATEGORIES_ID = {
"Frad_Orders": "3",
"DDoS_Attack": "4",
"FTP_Brute": "5",
"Ping of Death": "6",
"Phishing": "7",
"Fraud VoIP": "8",
"Open_Proxy": "9",
"Web_Spam": "10",
"Email_Spam": "11",
"Blog_Spam": "12",
"VPN IP": "13",
"Port_Scan": "14",
"Hacking": "15",
"SQL Injection": "16",
"Spoofing": "17",
"Brute_Force": "18",
"Bad_Web_Bot": "19",
"Exploited_Host": "20",
"Web_App_Attack": "21",
"SSH": "22",
"IoT_Targeted": "23"
}
session = requests.session()
''' HELPER FUNCTIONS '''
def http_request(method, url_suffix, params=None, headers=HEADERS, threshold=THRESHOLD):
LOG('running request with url=%s' % (SERVER + url_suffix))
try:
analysis = session.request(method, SERVER + url_suffix, headers=headers, params=params, verify=not INSECURE)
if analysis.status_code not in {200, 204, 429}:
return_error('Bad connection attempt. Status code: ' + str(analysis.status_code))
if analysis.status_code == 429:
if demisto.params().get('disregard_quota'):
return API_QUOTA_REACHED_MESSAGE
else:
return_error(API_QUOTA_REACHED_MESSAGE)
return REPORT_SUCCESS if url_suffix == REPORT_CMD else analysis.json()
except Exception as e:
LOG(e)
return_error(str(e))
def analysis_to_entry(info, reliability, threshold=THRESHOLD, verbose=VERBOSE):
if not isinstance(info, list):
info = [info]
context_ip_generic, context_ip, human_readable, dbot_scores, timeline = [], [], [], [], []
for analysis in info:
ip_ec = {
"Address": analysis.get("ipAddress"),
"Geo": {"Country": analysis.get("countryName") or analysis.get("countryCode")}
}
abuse_ec = {
"IP": {
"Address": analysis.get("ipAddress"),
"Geo": {"Country": analysis.get("countryName") or analysis.get("countryCode")},
"AbuseConfidenceScore": analysis.get('abuseConfidenceScore'),
"TotalReports": analysis.get("totalReports") or analysis.get("numReports") or "0",
"ISP": analysis.get("isp"),
"UsageType": analysis.get("usageType"),
"Domain": analysis.get("domain")
}
}
if verbose:
reports = sum([report_dict.get("categories") for report_dict in analysis.get("reports")], []) # type: list
categories = set(filter(lambda category_id: category_id in CATEGORIES_NAME.keys(), reports))
abuse_ec["IP"]["Reports"] = {CATEGORIES_NAME[c]: reports.count(c) for c in categories}
human_readable.append(abuse_ec['IP'])
dbot_score = getDBotScore(analysis, threshold)
if dbot_score == 3:
ip_ec["Malicious"] = abuse_ec["IP"]["Malicious"] = {
'Vendor': "AbuseIPDB",
'Detections': 'The address was reported as Malicious by AbuseIPDB.',
'Description': 'The address was reported as Malicious by AbuseIPDB.'
}
dbot_scores.append({
"Score": dbot_score,
"Vendor": "AbuseIPDB",
"Indicator": analysis.get("ipAddress"),
"Type": "ip",
"Reliability": reliability
})
context_ip.append(abuse_ec)
context_ip_generic.append(ip_ec)
ip_address = analysis.get('ipAddress')
ip_rep = scoreToReputation(dbot_score)
timeline.append({
'Value': ip_address,
'Message': 'AbuseIPDB marked the indicator "{}" as *{}*'.format(ip_address, ip_rep),
'Category': 'Integration Update'
})
return createEntry(context_ip, context_ip_generic, human_readable, dbot_scores, timeline, title=ANALYSIS_TITLE)
def blacklist_to_entry(data, saveToContext):
if not isinstance(data, list):
data = [data]
ips = [d.get("ipAddress") for d in data]
context = {"Blacklist": ips}
temp = demisto.uniqueFile()
with open(demisto.investigation()['id'] + '_' + temp, 'w') as f:
wr = csv.writer(f, quoting=csv.QUOTE_ALL)
for ip in ips:
wr.writerow([ip])
entry = {
'HumanReadable': '',
'Contents': ips,
'ContentsFormat': formats['json'],
'Type': entryTypes['file'],
'File': "Blacklist.csv",
'FileID': temp,
'EntryContext': {'AbuseIPDB': createContext(context if saveToContext else None, removeNull=True)}
}
return entry
def getDBotScore(analysis, threshold=THRESHOLD):
total_reports = analysis.get("totalReports") or analysis.get("numReports") or 0
abuse_score = int(analysis.get("abuseConfidenceScore"))
dbot_score = 0 if total_reports == 0 else 1 if abuse_score < 20 else 2 if abuse_score < int(threshold) else 3
return dbot_score
def createEntry(context_ip, context_ip_generic, human_readable, dbot_scores, timeline, title):
entry = {
'ContentsFormat': formats['json'],
'Type': entryTypes['note'],
'Contents': context_ip,
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown(title, human_readable, removeNull=True),
'EntryContext': {
'IP(val.Address && val.Address == obj.Address)': createContext(context_ip_generic, removeNull=True),
'AbuseIPDB(val.IP.Address && val.IP.Address == obj.IP.Address)': createContext(context_ip, removeNull=True),
'DBotScore': createContext(dbot_scores, removeNull=True)
},
'IndicatorTimeline': timeline
}
return entry
''' FUNCTIONS '''
def check_ip_command(reliability, ip, days=MAX_AGE, verbose=VERBOSE, threshold=THRESHOLD):
params = {
"maxAgeInDays": days
}
if verbose:
params['verbose'] = "verbose"
ip_list = argToList(ip)
entry_list = []
for current_ip in ip_list:
params["ipAddress"] = current_ip
analysis = http_request("GET", url_suffix=CHECK_CMD, params=params)
if analysis == API_QUOTA_REACHED_MESSAGE:
continue
analysis_data = analysis.get("data")
entry_list.append(analysis_to_entry(analysis_data, reliability, verbose=verbose, threshold=threshold))
return entry_list
def check_block_command(reliability, network, limit, days=MAX_AGE, threshold=THRESHOLD):
params = {
"network": network,
"maxAgeInDays": days
}
analysis = http_request("GET", url_suffix=CHECK_BLOCK_CMD, params=params).get("data").get("reportedAddress")
return analysis_to_entry(analysis[:int(limit) if limit.isdigit() else 40], verbose=False, threshold=threshold,
reliability=reliability)
def report_ip_command(ip, categories):
params = {
"ip": ip,
"categories": ",".join([CATEGORIES_ID[c] if c in CATEGORIES_ID else c for c in categories.split()])
}
analysis = http_request("POST", url_suffix=REPORT_CMD, params=params)
return analysis
def get_blacklist_command(limit, days, confidence, saveToContext):
params = {
'maxAgeInDays': days,
'confidenceMinimum': confidence,
'limit': limit
}
analysis = http_request("GET", url_suffix=BLACKLIST_CMD, params=params)
return analysis if type(analysis) is str else blacklist_to_entry(analysis.get("data"), saveToContext)
def test_module(reliability):
try:
check_ip_command(ip=TEST_IP, verbose=False, reliability=reliability)
except Exception as e:
LOG(e)
return_error(str(e))
demisto.results('ok')
def get_categories_command():
categories = {str(key): value for key, value in CATEGORIES_NAME.items()}
entry = {
'ContentsFormat': formats['json'],
'Type': entryTypes['note'],
'Contents': categories,
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown("AbuseIPDB report categories", categories, removeNull=True),
'EntryContext': {'AbuseIPDB.Categories(val && val == obj)': createContext(categories, removeNull=True),
}
}
return entry
try:
reliability = demisto.params().get('integrationReliability', 'C - Fairly reliable')
if DBotScoreReliability.is_valid_type(reliability):
reliability = DBotScoreReliability.get_dbot_score_reliability_from_str(reliability)
else:
raise Exception("Please provide a valid value for the Source Reliability parameter.")
if demisto.command() == 'test-module':
# Tests connectivity and credentails on login
test_module(reliability)
elif demisto.command() == 'ip':
demisto.results(check_ip_command(reliability, **demisto.args()))
elif demisto.command() == 'abuseipdb-check-cidr-block':
demisto.results(check_block_command(reliability, **demisto.args()))
elif demisto.command() == 'abuseipdb-report-ip':
demisto.results(report_ip_command(**demisto.args()))
elif demisto.command() == 'abuseipdb-get-blacklist':
demisto.results(get_blacklist_command(**demisto.args()))
elif demisto.command() == 'abuseipdb-get-categories':
demisto.results(get_categories_command(**demisto.args())) # type:ignore
except Exception as e:
LOG.print_log()
return_error(str(e))
| mit | 0c2a7bc7f304baff822e58549d89811d | 33.151703 | 120 | 0.616807 | 3.475425 | false | false | false | false |
demisto/content | Packs/FeedFireEye/Integrations/FeedFireEye/FeedFireEye_test.py | 2 | 10505 | import random
from typing import Optional
import pytest
import requests_mock
from FeedFireEye import Client, STIX21Processor, FE_CONFIDENCE_TO_REPUTATION, parse_timestamp, \
handle_first_fetch_timestamp
from freezegun import freeze_time
import demistomock as demisto
def create_client(public_key: str = 'public_key', private_key: str = 'secret_key', threshold: int = 70,
reputation_interval: int = 30, polling_timeout: int = 20, insecure: bool = False, proxy: bool = False,
tags: list = [], tlp_color: Optional[str] = 'AMBER'):
return Client(public_key, private_key, threshold, reputation_interval, polling_timeout, insecure, proxy)
def test_get_access_token_with_valid_token_in_context():
"""
Given:
- Entry context with a valid authentication token
When:
- Getting authentication token
Then:
- Returns the auth token from context
"""
demisto.setIntegrationContext(
{
'auth_token': 'Token',
'expiration_time': 11740347200
}
)
client = create_client()
assert client.get_access_token() == 'Token'
def test_get_access_token_with_invalid_token_in_context(mocker):
"""
Given:
- Entry context with an invalid authentication token
When:
- Getting authentication token
Then:
- Returns a new fetched auth token
"""
mocker.patch.object(Client, 'fetch_new_access_token', return_value='New Access Token')
demisto.setIntegrationContext(
{
'auth_token': 'Token',
'expiration_time': 740347200
}
)
client = create_client()
assert client.get_access_token() == 'New Access Token'
@freeze_time("1993-06-17 11:00:00 GMT")
def test_parse_access_token_expiration_time():
"""
Given:
- Authentication token validity period
When:
- Fetching new authentication token
Then:
- Returns the expiration time of the newly fetched token
"""
for i in range(5):
random_value = random.randint(0, 1000)
# 740314800 is the epoch converted time of 1993-06-17 11:00:00
assert Client.parse_access_token_expiration_time(random_value) - 740314800 == random_value
FETCH_INDICATORS_PACKAGE = [
(
'https://api.intelligence.fireeye.com/collections/indicators/objects?length=1000',
200,
{
'objects': [
{
'type': 'indicator'
},
{
'type': 'relationship',
'id': 'relationship1'
},
{
'type': 'malware',
'id': 'malware1'
},
{
'type': 'indicator'
},
]
},
(
[{'type': 'indicator'}, {'type': 'indicator'}],
{'relationship1': {'type': 'relationship', 'id': 'relationship1'}},
{'malware1': {'type': 'malware', 'id': 'malware1'}},
None
)
),
(
'https://api.intelligence.fireeye.com/collections/indicators/objects?length=1000',
204,
{},
([], {}, {}, None)
),
(
'https://api.intelligence.fireeye.com/collections/indicators/objects?length=1000',
202,
{},
([], {}, {}, None)
)
]
@pytest.mark.parametrize('url, status_code, json_data, expected_result', FETCH_INDICATORS_PACKAGE)
def test_fetch_indicators_from_api(mocker, url, status_code, json_data, expected_result):
"""
Given:
- Response status code
- Response data
When:
- Fetching indicators from API
Then:
- Returns the processed tuple of raw indicators, entities and relationships
"""
with requests_mock.Mocker() as m:
mocker.patch.object(Client, 'fetch_new_access_token', return_value='New Access Token')
mocker.patch.object(demisto, 'info')
mocker.patch.object(demisto, 'debug')
m.get(url, status_code=status_code, json=json_data)
client = create_client()
if status_code in [200, 204]:
fetch_result = client.fetch_all_indicators_from_api(-1)
for i in range(4):
assert fetch_result[i] == expected_result[i]
if status_code == 204:
assert demisto.info.call_args[0][0] == \
'FireEye Feed info - API Status Code: 204 No Content Available for this timeframe.'
else:
with pytest.raises(SystemExit) as e:
# return_error reached
client.fetch_all_indicators_from_api(-1)
if not e:
assert False
FETCH_REPORTS_PACKAGE = [
(
'https://api.intelligence.fireeye.com/collections/reports/objects?length=100',
200,
{
'objects': [
{
'type': 'report',
'id': 'report1'
},
{
'type': 'report',
'id': 'report2'
},
]
},
([{'type': 'report', 'id': 'report1'}, {'type': 'report', 'id': 'report2'}], None)
),
(
'https://api.intelligence.fireeye.com/collections/reports/objects?length=100',
204,
{},
([], None)
)
]
@pytest.mark.parametrize('url, status_code, json_data, expected_result', FETCH_REPORTS_PACKAGE)
def test_fetch_reports_from_api(mocker, url, status_code, json_data, expected_result):
"""
Given:
- Response status code
- Response data
When:
- Fetching reports from API
Then:
- Returns the processed list of raw reports
"""
with requests_mock.Mocker() as m:
mocker.patch.object(Client, 'fetch_new_access_token', return_value='New Access Token')
mocker.patch.object(demisto, 'debug')
m.get(url, status_code=status_code, json=json_data)
client = create_client()
if status_code == 200:
fetch_result = client.fetch_all_reports_from_api(-1)
assert fetch_result == expected_result
else:
with pytest.raises(SystemExit) as e:
# return_error reached
client.fetch_all_reports_from_api(-1)
if not e:
assert False
PROCESS_INDICATOR_VALUE_PACKAGE = [
(
"[file:hashes.MD5='1234' OR "
"file:hashes.'SHA-1'='12345' OR "
"file:hashes.'SHA-256'='123456']",
(
['file'],
['1234'],
{
'MD5': '1234',
'SHA-1': '12345',
'SHA-256': '123456'
}
)
),
(
"[file:hashes.'SHA-1'='12345' OR "
"file:hashes.'SHA-256'='123456']",
(
['file'],
['12345'],
{
'SHA-1': '12345',
'SHA-256': '123456'
}
)
),
(
"[file:hashes.'ssdeep'='12345' OR "
"file:hashes.'SHA-256'='123456']",
(
['file'],
['123456'],
{
'ssdeep': '12345',
'SHA-256': '123456'
}
)
),
(
"[file:'fake'='12345' OR "
"file:hashes.'SHA-1'='123456']",
(
['file'],
['123456'],
{
'SHA-1': '123456'
}
)
),
(
"[domain-name:value='1234.com']",
(['domain-name'], ['1234.com'], {})
),
(
"[domain-name:value='1234.com' AND url:value='www.abc.1245.com']",
(['domain-name', 'url'], ['1234.com', 'www.abc.1245.com'], {})
)
]
@pytest.mark.parametrize('pattern_value, expected_result', PROCESS_INDICATOR_VALUE_PACKAGE)
def test_process_indicator_value(pattern_value, expected_result):
"""
Given:
- Indicator raw value from response
When:
- Processing raw indicators to real indicators
Then:
- Returns extracted value and hashes
"""
process_result = STIX21Processor.process_indicator_value(pattern_value)
for i in range(3):
assert process_result[i] == expected_result[i]
REPUTATION_CALCULATION_PACKAGE = [
(100, '1993-05-27T17:43:41.000Z', 70, 30, 3),
(100, '1992-05-27T17:43:41.000Z', 0, 30, 2),
(100, '1993-04-27T17:43:41.000Z', 0, 100, 3),
(51, '1993-04-27T17:43:41.000Z', 50, 100, 3),
(1, '1993-04-27T17:43:41.000Z', 50, 100, 0),
(100, '1993-04-27T17:43:41.000Z', 50, 20, 2),
]
@pytest.mark.parametrize('confidence, date, threshold, reputation_interval, expected', REPUTATION_CALCULATION_PACKAGE)
@freeze_time("1993-06-17 11:00:00 GMT")
def test_reputation_calculation(confidence, date, threshold, reputation_interval, expected):
"""
Given:
- Confidence level according to FE
- Indicator publish date
When:
- Processing raw indicators to real indicators
Then:
- Returns DBot Score
"""
FE_CONFIDENCE_TO_REPUTATION[3] = threshold
assert STIX21Processor.calculate_indicator_reputation(confidence, date, reputation_interval) == expected
def test_parse_timestamp():
"""
Given:
- Next URL value from FE response
When:
- Saving the last timestamp fetched to context
Then:
- Returns decoded timestamp
"""
assert parse_timestamp(
'https://api.intelligence.fireeye.com/collections/indicators/objects?length=1000&'
'last_id_modified_timestamp=MTU4MDgwOTIxOTcyODY0NixpbmRpY2F0b3ItLTA5MWI3OWQxLTllOWQtNWExYS04ODMzLTZlNTkyZmNj'
'MmM1NQ%3D%3D&added_after=1580764458'
) == 1580809219
@pytest.mark.parametrize('param_input, expected_result', [
('1 month', '737636400'),
('2 months', '735044400'),
('1 day', '740228400'),
('3 weeks', '738500400'),
('', None),
(None, None),
])
@freeze_time("1993-06-17 11:00:00 GMT")
def test_handle_first_fetch_timestamp(mocker, param_input, expected_result):
"""
Given:
- first_fetch_timestamp parameter from user input
When:
- Calculating the first fetch timestamp value
Then:
- str value of the required time, or None if empty
"""
mocker.patch.object(demisto, 'params', return_value={'first_fetch_timestamp': param_input})
assert handle_first_fetch_timestamp() == expected_result
| mit | cc50a06bca5bc690efa6803a5e536684 | 25.867008 | 120 | 0.548786 | 3.74777 | false | false | false | false |
demisto/content | Packs/illuminate/Integrations/illuminate/illuminate.py | 2 | 14661 | import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
''' IMPORTS '''
import requests
import traceback
from typing import Dict, Optional, List, Any, Callable, Collection
# Disable insecure warnings
requests.packages.urllib3.disable_warnings()
''' CONSTANTS '''
# Integration information
INTEGRATION_NAME = 'illuminate'
INTEGRATION_CONTEXT_BRAND = 'Illuminate'
MALICIOUS_DATA: Dict[str, str] = {
'Vendor': 'illuminate',
'Description': 'illuminate has determined that this indicator is malicious via internal analysis.'
}
''' HELPER FUNCTIONS '''
class IdNamePair(object):
def __init__(self, unique_id: int, name: str):
self.id = unique_id
self.name = name
def __str__(self):
return f'id = {self.id}, name = {self.name}'
class EnrichmentOutput(object):
def __init__(self, illuminate_context_data: dict, raw_data: dict, indicator_type: str) -> None:
self.illuminate_context_data = illuminate_context_data
self.raw_data = raw_data
self.indicator_type = indicator_type
self.reputation_context: dict = {}
def get_human_readable_output(self) -> str:
human_readable_data = self.illuminate_context_data.copy()
human_readable_data['Actors'] = [IdNamePair(d['id'], d['name']) for d in human_readable_data['Actors']]
human_readable_data['Malwares'] = [IdNamePair(d['id'], d['name']) for d in human_readable_data['Malwares']]
return tableToMarkdown(
t=human_readable_data,
name=f'{INTEGRATION_NAME} {self.indicator_type.capitalize()} Information',
removeNull=True
)
def build_illuminate_context(self) -> dict:
return {
f'{INTEGRATION_CONTEXT_BRAND}.{self.indicator_type.capitalize()}(val.ID && val.ID === obj.ID)':
self.illuminate_context_data
}
def generate_reputation_context(
self,
primary_key: str,
indicator_value: str,
indicator_type: str,
reputation_key: str,
extra_context: Optional[dict] = None
):
if self.has_context_data():
reputation_context: Dict[str, Any] = {primary_key: indicator_value}
if extra_context is not None:
reputation_context.update(extra_context)
malicious = Client.is_indicator_malicious(self.raw_data)
if malicious:
reputation_context['Malicious'] = MALICIOUS_DATA
self.add_reputation_context(
f'{reputation_key}(val.{primary_key} && val.{primary_key} === obj.{primary_key})',
reputation_context
)
self.add_reputation_context('DBotScore', {
'Indicator': indicator_value,
'Score': 3 if malicious else 1,
'Type': indicator_type,
'Vendor': INTEGRATION_NAME
})
def build_all_context(self) -> dict:
all_context = {}
all_context.update(self.build_illuminate_context())
if len(self.reputation_context) > 0:
all_context.update(self.reputation_context)
return all_context
def return_outputs(self):
# We need to use the underlying demisto.results function call rather than using return_outputs because
# we need to add the IgnoreAutoExtract key to ensure that our illuminate links are not marked as indicators
entry = {
"Type": entryTypes["note"],
"HumanReadable": self.get_human_readable_output(),
"ContentsFormat": formats["json"],
"Contents": self.raw_data,
"EntryContext": self.build_all_context(),
"IgnoreAutoExtract": True
}
demisto.results(entry)
def add_illuminate_context(self, key: str, data: Any):
self.illuminate_context_data[key] = data
def add_reputation_context(self, key: str, context: dict):
self.reputation_context[key] = context
def has_context_data(self):
return len(self.illuminate_context_data) > 0
class Client(BaseClient):
def __init__(self, server: str, username: str, password: str, insecure: bool, proxy: bool):
# NB: 404 is a valid response since that just means no entries, and we want the UI to respect that and show "No Entries"
super().__init__(
base_url=f'https://{server}/api/1_0/',
verify=not insecure,
proxy=proxy,
auth=(username, password),
ok_codes=(200, 404)
)
def indicator_search(self, indicator_type: str, indicator: str) -> dict:
params = {'type': indicator_type, 'value': indicator}
return self._http_request(method='GET', url_suffix='indicator/match', params=params)
def perform_test_request(self):
data: dict = self._http_request(method='GET', url_suffix='')
if data['links'] is None:
raise DemistoException('Invalid URL or Credentials. JSON structure not recognized')
def enrich_indicator(self, indicator: str, indicator_type: str) -> EnrichmentOutput:
raw_data: dict = self.indicator_search(indicator_type, indicator)
if raw_data is None:
return EnrichmentOutput({}, {}, indicator_type)
context_data = self.get_context_from_response(raw_data)
return EnrichmentOutput(context_data, raw_data, indicator_type)
@staticmethod
def get_data_key(data: dict, key: str) -> Optional[Any]:
return None if key not in data else data[key]
@staticmethod
def get_nested_data_key(data: dict, key: str, nested_key: str) -> Optional[Any]:
top_level = Client.get_data_key(data, key)
return None if top_level is None or nested_key not in top_level else top_level[nested_key]
@staticmethod
def get_data_key_as_date(data: dict, key: str, fmt: str) -> Optional[str]:
value = Client.get_data_key(data, key)
return None if value is None else datetime.fromtimestamp(value / 1000.0).strftime(fmt)
@staticmethod
def get_data_key_as_list(data: dict, key: str) -> List[Any]:
data_list = Client.get_data_key(data, key)
return [] if data_list is None or not isinstance(data[key], (list,)) else data_list
@staticmethod
def get_data_key_as_list_of_values(data: dict, key: str, value_key: str) -> List[Any]:
data_list = Client.get_data_key_as_list(data, key)
return [value_data[value_key] for value_data in data_list]
@staticmethod
def get_data_key_as_list_of_dicts(data: dict, key: str, dict_creator: Callable) -> Collection[Any]:
data_list = Client.get_data_key_as_list(data, key)
return {} if len(data_list) == 0 else [dict_creator(value_data) for value_data in data_list]
@staticmethod
def is_indicator_malicious(data: dict) -> bool:
benign = Client.get_nested_data_key(data, 'benign', 'value')
return False if benign is None or benign is True else True
@staticmethod
def get_context_from_response(data: dict) -> dict:
result_dict = {
'ID': Client.get_data_key(data, 'id'),
'Indicator': Client.get_nested_data_key(data, 'value', 'name'),
'EvidenceCount': Client.get_data_key(data, 'reportCount'),
'Active': Client.get_data_key(data, 'active'),
'HitCount': Client.get_data_key(data, 'hitCount'),
'ConfidenceLevel': Client.get_nested_data_key(data, 'confidenceLevel', 'value'),
'FirstHit': Client.get_data_key_as_date(data, 'firstHit', '%Y-%m-%d'),
'LastHit': Client.get_data_key_as_date(data, 'lastHit', '%Y-%m-%d'),
'ReportedDates': Client.get_data_key_as_list_of_values(data, 'reportedDates', 'date'),
'ActivityDates': Client.get_data_key_as_list_of_values(data, 'activityDates', 'date'),
'Malwares': Client.get_data_key_as_list_of_dicts(data, 'malwares', lambda d: {'id': d['id'], 'name': d['name']}),
'Actors': Client.get_data_key_as_list_of_dicts(data, 'actors', lambda d: {'id': d['id'], 'name': d['name']}),
'Benign': Client.get_nested_data_key(data, 'benign', 'value'),
'IlluminateLink': None
}
links_list = Client.get_data_key_as_list(data, 'links')
result_dict['IlluminateLink'] = next((
link['href'].replace("api/1_0/indicator/", "indicators/")
for link in links_list
if 'rel' in link and link['rel'] == 'self' and 'href' in link
), None)
return result_dict
def build_client(demisto_params: dict) -> Client:
server: str = str(demisto_params.get('server'))
proxy: bool = demisto_params.get('proxy', False)
insecure: bool = demisto_params.get('insecure', False)
credentials: dict = demisto_params.get('credentials', {})
username: str = str(credentials.get('identifier'))
password: str = str(credentials.get('password'))
return Client(server, username, password, insecure, proxy)
''' COMMAND EXECUTION '''
def perform_test_module(client: Client):
client.perform_test_request()
def domain_command(client: Client, args: dict) -> List[EnrichmentOutput]:
domains: List[str] = argToList(args.get('domain'))
enrichment_data_list: List[EnrichmentOutput] = []
for domain in domains:
enrichment_data: EnrichmentOutput = client.enrich_indicator(domain, 'domain')
if enrichment_data.has_context_data():
extra_context = {}
ip_resolution = Client.get_nested_data_key(enrichment_data.raw_data, 'ipResolution', 'name')
if ip_resolution is not None:
enrichment_data.add_illuminate_context('IpResolution', ip_resolution)
extra_context['DNS'] = ip_resolution
enrichment_data.generate_reputation_context('Name', domain, 'domain', 'Domain', extra_context)
enrichment_data_list.append(enrichment_data)
return enrichment_data_list
def email_command(client: Client, args: dict) -> List[EnrichmentOutput]:
emails: List[str] = argToList(args.get('email'))
enrichment_data_list: List[EnrichmentOutput] = []
for email in emails:
enrichment_data: EnrichmentOutput = client.enrich_indicator(email, 'email')
if enrichment_data.has_context_data():
enrichment_data.generate_reputation_context('From', email, 'email', 'Email')
enrichment_data_list.append(enrichment_data)
return enrichment_data_list
def ip_command(client: Client, args: dict) -> List[EnrichmentOutput]:
ips: List[str] = argToList(args.get('ip'))
enrichment_data_list: List[EnrichmentOutput] = []
for ip in ips:
enrichment_data: EnrichmentOutput = client.enrich_indicator(ip, 'ip')
if enrichment_data.has_context_data():
enrichment_data.generate_reputation_context('Address', ip, 'ip', 'IP')
enrichment_data_list.append(enrichment_data)
return enrichment_data_list
def file_command(client: Client, args: dict) -> List[EnrichmentOutput]:
files: List[str] = argToList(args.get('file'))
enrichment_data_list: List[EnrichmentOutput] = []
for file in files:
enrichment_data: EnrichmentOutput = client.enrich_indicator(file, 'file')
if enrichment_data.has_context_data():
hash_type = get_hash_type(file)
if hash_type != 'Unknown':
enrichment_data.generate_reputation_context(hash_type.upper(), file, 'file', 'File')
enrichment_data_list.append(enrichment_data)
return enrichment_data_list
def illuminate_enrich_string_command(client: Client, args: dict) -> List[EnrichmentOutput]:
strings: List[str] = argToList(args.get('string'))
enrichment_data_list: List[EnrichmentOutput] = []
for string in strings:
enrichment_data_list.append(client.enrich_indicator(string, 'string'))
return enrichment_data_list
def illuminate_enrich_ipv6_command(client: Client, args: dict) -> List[EnrichmentOutput]:
ips: List[str] = argToList(args.get('ip'))
enrichment_data_list: List[EnrichmentOutput] = []
for ip in ips:
enrichment_data_list.append(client.enrich_indicator(ip, 'ipv6'))
return enrichment_data_list
def illuminate_enrich_mutex_command(client: Client, args: dict) -> List[EnrichmentOutput]:
mutexes: List[str] = argToList(args.get('mutex'))
enrichment_data_list: List[EnrichmentOutput] = []
for mutex in mutexes:
enrichment_data_list.append(client.enrich_indicator(mutex, 'mutex'))
return enrichment_data_list
def illuminate_enrich_http_request_command(client: Client, args: dict) -> List[EnrichmentOutput]:
http_requests: List[str] = argToList(args.get('http-request'))
enrichment_data_list: List[EnrichmentOutput] = []
for http_request in http_requests:
enrichment_data_list.append(client.enrich_indicator(http_request, 'httpRequest'))
return enrichment_data_list
def url_command(client: Client, args: dict) -> List[EnrichmentOutput]:
urls: List[str] = argToList(args.get('url'))
enrichment_data_list: List[EnrichmentOutput] = []
for url in urls:
enrichment_data: EnrichmentOutput = client.enrich_indicator(url, 'url')
if enrichment_data.has_context_data():
enrichment_data.generate_reputation_context('Data', url, 'url', 'URL')
enrichment_data_list.append(enrichment_data)
return enrichment_data_list
''' EXECUTION '''
def main():
commands = {
'domain': domain_command,
'email': email_command,
'file': file_command,
'ip': ip_command,
'url': url_command,
'illuminate-enrich-string': illuminate_enrich_string_command,
'illuminate-enrich-ipv6': illuminate_enrich_ipv6_command,
'illuminate-enrich-mutex': illuminate_enrich_mutex_command,
'illuminate-enrich-http-request': illuminate_enrich_http_request_command
}
command: str = demisto.command()
LOG(f'command is {command}')
try:
client = build_client(demisto.params())
if command == 'test-module':
perform_test_module(client)
demisto.results('ok')
elif command in commands:
enrichment_outputs: List[EnrichmentOutput] = commands[command](client, demisto.args())
[e.return_outputs() for e in enrichment_outputs]
except Exception as e:
err_msg = f'Error in {INTEGRATION_NAME} Integration [{e}]\nTrace:\n{traceback.format_exc()}'
return_error(err_msg, error=e)
if __name__ in ['__main__', 'builtin', 'builtins']:
main()
| mit | 005b63ed8450d56e69433e0636405423 | 36.981865 | 128 | 0.63836 | 3.489883 | false | false | false | false |
demisto/content | Utils/test_upload_flow/TestUploadFlow/Integrations/TestUploadFlow/TestUploadFlow.py | 2 | 2641 | from typing import Dict
import demistomock as demisto # noqa: F401
from CommonServerPython import *
class Client(BaseClient): # type: ignore
pass
def test_module(client: Client) -> str:
"""Tests API connectivity and authentication'
Returning 'ok' indicates that the integration works like it is supposed to.
Connection to the service is successful.
Raises exceptions if something goes wrong.
:type client: ``Client``
:param Client: client to use
:return: 'ok' if test passed, anything else will fail the test.
:rtype: ``str``
"""
message: str = ''
try:
message = 'ok'
except DemistoException as e: # type: ignore
if 'Forbidden' in str(e) or 'Authorization' in str(e):
message = 'Authorization Error: make sure API Key is correctly set'
else:
raise e
return message
def main() -> None:
"""main function, parses params and runs command functions
:return:
:rtype:
"""
# api_key = demisto.params().get('credentials', {}).get('password')
# get the service API url
base_url = urljoin(demisto.params()['url'], '/api/v1') # type: ignore
# if your Client class inherits from BaseClient, SSL verification is
# handled out of the box by it, just pass ``verify_certificate`` to
# the Client constructor
verify_certificate = not demisto.params().get('insecure', False)
# if your Client class inherits from BaseClient, system proxy is handled
# out of the box by it, just pass ``proxy`` to the Client constructor
proxy = demisto.params().get('proxy', False)
demisto.debug(f'Command being called is {demisto.command()}')
try:
# (i.e. "Authorization": {api key})
headers: Dict = {}
client = Client(
base_url=base_url,
verify=verify_certificate,
headers=headers,
proxy=proxy)
if demisto.command() == 'test-module':
# This is the call made when pressing the integration Test button.
result = test_module(client)
return_results(result) # type: ignore
elif demisto.command() == 'hello':
return_results(CommandResults( # type: ignore
outputs_prefix='TestUploadFlow',
outputs_key_field='',
outputs="hello",
))
# Log exceptions and return errors
except Exception as e:
return_error(f'Failed to execute {demisto.command()} command.\nError:\n{str(e)}') # type: ignore
''' ENTRY POINT '''
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | 0964e862d1cd2326d8b112e32f44fc37 | 29.011364 | 105 | 0.61454 | 4.159055 | false | true | false | false |
demisto/content | Packs/BreachRx/Integrations/BreachRx/BreachRx.py | 2 | 8796 | from CommonServerPython import * # noqa # pylint: disable=unused-wildcard-import
from CommonServerUserPython import * # noqa
from collections.abc import Callable
import requests
import traceback
from urllib.parse import urlparse
from gql import gql, Client
from gql.transport.requests import RequestsHTTPTransport
from requests.auth import HTTPBasicAuth
# Disable insecure warnings
requests.packages.urllib3.disable_warnings() # pylint: disable=no-member
create_incident_mutation = gql("""
mutation CreateIncident(
$severity: String!,
$name: String!,
$type: String!,
$description: String
) {
createIncident(
type: $type,
severity: $severity,
name: $name,
description: $description
) {
id
name
severity {
name
}
types {
type {
name
}
}
description
identifier
}
}""")
get_incident_severities = gql("""{
incidentSeverities {
name
ordering
}
}""")
get_incident_types = gql("""{
types {
name
}
}""")
get_actions_for_incident = gql("""
query GetActionsForIncident($incidentId: Int!) {
actions(where: {
incidentId: {
equals: $incidentId
}
}) {
id
name
description
phase {
name
id
}
user {
fullName
email
}
}
}""")
get_incident_by_name = gql("""
query GetIncidentByName($name: String, $identifier: String) {
incidents(first: 1, where: {
name: {
contains: $name,
mode: insensitive
}
identifier: {
contains: $identifier,
mode: insensitive
}
}) {
id
name
severity {
name
}
types {
type {
name
}
}
description
identifier
}
}""")
class BreachRxClient:
def __init__(self, base_url: str, api_key: str, secret_key: str, org_name: str, verify: bool):
self.api_key = api_key
self.secret_key = secret_key
self.org_name = org_name
auth = HTTPBasicAuth(api_key, secret_key)
transport = RequestsHTTPTransport(
url=base_url,
auth=auth,
headers={"orgname": org_name},
timeout=60,
verify=verify
)
self.client = Client(
transport=transport, fetch_schema_from_transport=False
)
def get_incident_severities(self):
return self.client.execute(get_incident_severities)["incidentSeverities"]
def get_incident_types(self):
return self.client.execute(get_incident_types)["types"]
def create_incident(self, name: Optional[str], description: Optional[str]):
severities = self.get_incident_severities()
types = self.get_incident_types()
params = {
"severity": severities[0]["name"],
"name": name,
"type": types[0]["name"],
"description": description
}
return self.client.execute(create_incident_mutation, params)["createIncident"]
def get_incident(self, name: Optional[str], identifier: Optional[str]):
params = {
"name": name,
"identifier": identifier
}
results = self.client.execute(get_incident_by_name, params)['incidents']
if results:
return results.pop()
else:
return None
def get_actions_for_incident(self, incident_id):
params = {
"incidentId": incident_id
}
return self.client.execute(get_actions_for_incident, params)["actions"]
def test_module(client: BreachRxClient):
try:
client.get_incident_severities()
return "ok"
except Exception:
raise Exception("Authorization Error: make sure your API Key and Secret Key are correctly set")
def create_incident_command(
client: BreachRxClient,
incident_name: str = None,
description: str = None
) -> CommandResults:
if not incident_name:
incident_name = demisto.incident().get("name")
if not description:
description = (
f"""An Incident copied from the Palo Alto Networks XSOAR platform.
<br>
<br>
XSOAR Incident Name: {demisto.incident().get('name')}"""
)
response = client.create_incident(incident_name, description)
incident_name = response["name"]
return CommandResults(
outputs_prefix="BreachRx.Incident",
outputs_key_field="id",
outputs=response,
raw_response=response,
readable_output=f"Incident created with name={incident_name}."
)
def get_incident_actions_command(
client: BreachRxClient,
incident_name: str = None,
incident_identifier: str = None
) -> Union[CommandResults, str]:
incidents = demisto.dt(demisto.context(), 'BreachRx.Incident')
if not incidents:
if not incident_name and not incident_identifier:
raise Exception(
"Error: No BreachRx privacy Incident associated with this Incident,"
" and no Incident search terms provided."
)
incidents = [client.get_incident(incident_name, incident_identifier)]
if not incidents:
raise Exception("Error: No BreachRx privacy Incident found using the search terms provided.")
if type(incidents) is not list:
incidents = [incidents]
for incident in incidents:
incident["actions"] = client.get_actions_for_incident(incident['id'])
for action in incident["actions"]:
action["phase_name"] = action["phase"]["name"]
readable_output = ""
for incident in incidents:
actions_markdown_table = tableToMarkdown("Actions", incident["actions"], headers=["name", "phase_name"])
readable_output += f"# {incident['name']} ({incident['id']})\n" + actions_markdown_table + "\n"
return CommandResults(
outputs_prefix="BreachRx.Incident",
outputs_key_field="id",
outputs=incidents,
raw_response=incidents,
readable_output=readable_output
)
def import_incident_command(
client: BreachRxClient,
incident_name: str = None,
incident_identifier: str = None
) -> Union[CommandResults, str]:
incident = client.get_incident(incident_name, incident_identifier)
if not incident:
raise Exception("Error: No BreachRx privacy Incident found using the search terms provided.")
return CommandResults(
outputs_prefix="BreachRx.Incident",
outputs_key_field="id",
outputs=incident,
raw_response=incident,
readable_output=f"Incident imported with name={incident.get('name')}."
)
def get_incident_command(
client: BreachRxClient,
incident_name: str = None,
incident_identifier: str = None
) -> Union[CommandResults, str]:
incident = client.get_incident(incident_name, incident_identifier)
if incident:
return CommandResults(
raw_response=incident,
readable_output=f'Incident found with name="{incident.get("name")}" and identifier="{incident.get("identifier")}".'
)
else:
return "No Incident found with those search terms."
COMMANDS = {
"test-module": test_module,
"breachrx-incident-create": create_incident_command,
"breachrx-incident-actions-get": get_incident_actions_command,
"breachrx-incident-import": import_incident_command,
"breachrx-incident-get": get_incident_command,
}
def is_valid_url(url):
try:
result = urlparse(url)
return all([result.scheme, result.netloc])
except ValueError:
return False
def main() -> None: # pragma: no cover
try:
base_url = demisto.params()["api_url"]
org_name = demisto.params()["url"].split(".")[0].replace("https://", "")
api_key = demisto.params().get("credentials", {}).get("identifier")
secret_key = demisto.params().get("credentials", {}).get("password")
verify = demisto.params().get("insecure", False)
if not is_valid_url(base_url):
raise Exception("The GraphQL API URL is not a valid URL.")
if not is_valid_url(demisto.params()["url"]):
raise Exception("The BreachRx instance URL is not a valid URL.")
client = BreachRxClient(base_url, api_key, secret_key, org_name, verify)
command_func: Any[Callable, None] = COMMANDS.get(demisto.command())
if command_func:
return_results(command_func(client, **demisto.args()))
else:
raise NotImplementedError(f'{demisto.command()} command is not implemented.')
except Exception as e:
demisto.error(traceback.format_exc())
return_error(f"Failed to execute {demisto.command()} command.\nError:\n{str(e)}")
if __name__ in ("__main__", "__builtin__", "builtins"):
main()
| mit | 6cd0c82d88e0959f5d9bb8af1a88408e | 26.232198 | 127 | 0.620282 | 3.895483 | false | false | false | false |
demisto/content | Packs/EmailCommunication/Scripts/SummarizeEmailThreads/SummarizeEmailThreads.py | 2 | 4870 | import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
no_entries_message = """<!DOCTYPE html>
<html>
<body>
<h3>This Incident does not contain any email threads yet.</h3>
</body>
</html>
"""
def fetch_email_threads(incident_id):
"""Fetch all Email Threads stored in the current incident context
Args:
incident_id (str): The incident ID
Returns:
list of dicts. List of email thread entries
"""
# Get current email threads from context if any are present
try:
incident_context = demisto.executeCommand("getContext", {'id': incident_id})
threads = dict_safe_get(incident_context[0], ['Contents', 'context', 'EmailThreads'])
if not threads:
return None
# Return a list of dicts, even if there is only one thread entry and the context item is a list
threads = [threads] if isinstance(threads, dict) else threads
return threads
except Exception as e:
return_error(f'An exception occurred while fetching email threads: {e}')
def format_threads(email_threads):
"""Process list of email thread context entries and return a table in MD format
Args:
email_threads (list of dicts): List of email thread entry dictionaries
Returns:
md (str): Markdown formatted table
"""
try:
thread_summary_rows: Dict = {}
for thread_entry in email_threads:
# Loop through thread entries.
# Trim off thread ID code for readability
thread_number = int(thread_entry['EmailCommsThreadNumber'])
email_original_subject = thread_entry['EmailSubject'].split('<')[-1].split('>')[1].strip()
cc_addresses = thread_entry['EmailCC']
bcc_addresses = thread_entry['EmailBCC']
recipients = thread_entry['EmailTo']
if thread_number in thread_summary_rows:
# Table row already exists for this thread - just append recipients, if needed
thread_recipients = thread_summary_rows[thread_number]['Recipients']
thread_cc = thread_summary_rows[thread_number]['CC']
thread_bcc = thread_summary_rows[thread_number]['BCC']
for recipient in recipients.split(","):
if recipient not in thread_recipients:
thread_summary_rows[thread_number]['Recipients'] += f', {recipient}'
if cc_addresses and len(cc_addresses) > 0:
for cc_address in cc_addresses.split(","):
if cc_address not in thread_cc and len(thread_cc) == 0:
thread_summary_rows[thread_number]['CC'] = cc_address
elif cc_address not in thread_cc:
thread_summary_rows[thread_number]['CC'] += f', {cc_address}'
if bcc_addresses and len(bcc_addresses) > 0:
for bcc_address in bcc_addresses.split(","):
if bcc_address not in thread_bcc and len(thread_bcc) == 0:
thread_summary_rows[thread_number]['BCC'] = bcc_address
elif bcc_address not in thread_bcc:
thread_summary_rows[thread_number]['BCC'] += f', {bcc_address}'
else:
table_row = {
'Thread Number': thread_number,
'Subject': email_original_subject,
'Recipients': recipients,
'CC': cc_addresses,
'BCC': bcc_addresses
}
thread_summary_rows[thread_number] = table_row
# Convert dict of dict into list of dicts for MD formatting
thread_summary_table = [entry for entry in thread_summary_rows.values()]
table_name = 'Email Thread List'
table_headers = ['Thread Number', 'Subject', 'Recipients', 'CC', 'BCC']
md = tableToMarkdown(name=table_name, t=thread_summary_table, headers=table_headers)
return md
except Exception as e:
return_error(f'An exception occurred while generating the thread summary table: {e}')
def main():
incident = demisto.incident()
incident_id = incident.get('id')
email_threads = fetch_email_threads(incident_id)
if email_threads:
thread_summary_rows = format_threads(email_threads)
else:
return_results({
'ContentsFormat': EntryFormat.HTML,
'Type': EntryType.NOTE,
'Contents': no_entries_message
})
return None
demisto.results({
'ContentsFormat': EntryFormat.TABLE,
'Type': EntryType.NOTE,
'Contents': thread_summary_rows,
'HumanReadable': thread_summary_rows
})
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | cca6fef6bb5b3a2f761848a2332d196e | 38.274194 | 103 | 0.585216 | 4.30212 | false | false | false | false |
demisto/content | Packs/HealthCheck/Scripts/HealthCheckWorkers/HealthCheckWorkers.py | 2 | 4112 | import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
from datetime import datetime
DESCRIPTION = '{} busy workers has reached {} of total workers'
RESOLUTION = 'Performance Tuning of Cortex XSOAR Server: https://docs.paloaltonetworks.com/cortex/cortex-xsoar/6-0/' \
'cortex-xsoar-admin/cortex-xsoar-overview/performance-tuning-of-cortex-xsoar-server'
def analyze_data(res):
workers_thresholds = {
0.9: 'High',
0.8: 'Medium',
0.5: 'Low',
}
for threshold, severity in workers_thresholds.items():
if res['Busy'] > res['Total'] * threshold:
return [{
'category': 'Workers analysis',
'severity': severity,
'description': DESCRIPTION.format(res['Busy'], threshold),
'resolution': RESOLUTION[0],
}]
return []
def nano_to_secs(table):
for entry in table:
secs = int(entry['Duration'] / 1000000000)
if secs < 60:
entry['Duration'] = str(secs) + " Seconds"
else:
minutes = int(secs / 60)
mod_sec = secs % 60
entry['Duration'] = "{} Minutes and {} seconds".format(str(minutes), mod_sec)
def format_time(table):
for entry in table:
startedAt = datetime.strptime(entry['StartedAt'][:-4], '%Y-%m-%dT%H:%M:%S.%f')
entry['StartTime'] = startedAt.strftime("%Y-%m-%d %H:%M:%S")
def format_details(table):
for entry in table:
details = entry['Details']
getdetails = re.compile(
r'task \[(?P<taskid>[\d]+)\]\s\[(?P<taskname>[\w\d\s!@#$%^&*()_+-={}]+)], playbook\s \
\[(?P<pbname>[\w\d\s!@#$%^&*()_+-={}]+)],\sinvestigation\s\[(?P<investigationid>[\d]+)\]')
all_images = [m.groups() for m in getdetails.finditer(details)]
for item in all_images:
newdetails = {"TaskID": item[0], "TaskName": item[1], 'PlaybookName': item[2],
'InvestigationID': item[3]}
entry['TaskID'] = newdetails['TaskID']
entry['TaskName'] = newdetails['TaskName']
entry['PlaybookName'] = newdetails['PlaybookName']
entry['InvestigationID'] = newdetails['InvestigationID']
def main(args):
incident = demisto.incident()
account_name = incident.get('account')
account_name = f"acc_{account_name}/" if account_name != "" else ""
is_widget = argToBoolean(args.get('isWidget', True))
if is_widget is True:
workers = demisto.executeCommand("demisto-api-get", {"uri": f"{account_name}workers/status"})[0]['Contents']
if not workers['response']['ProcessInfo']:
table = [{'Details': '-', 'Duration': '-', 'StartedAt': '-'}]
else:
table = workers['response']['ProcessInfo']
nano_to_secs(table)
format_time(table)
format_details(table)
md = tableToMarkdown('Workers Status', table, headers=[
'InvestigationID', 'PlaybookName', 'TaskID', 'TaskName', 'StartTime', 'Duration'])
dmst_entry = {'Type': entryTypes['note'],
'Contents': md,
'ContentsFormat': formats['markdown'],
'HumanReadable': md,
'ReadableContentsFormat': formats['markdown'],
'EntryContext': {'workers': table}}
return dmst_entry
else:
workers = demisto.executeCommand("demisto-api-get", {"uri": "/workers/status"})[0]['Contents']
demisto.executeCommand("setIncident", {
'healthcheckworkerstotal': workers['response']['Total'],
'healthcheckworkersbusy': workers['response']['Busy']
})
add_actions = analyze_data(workers['response'])
results = CommandResults(
readable_output="HealthCheckWorkers Done",
outputs_prefix="HealthCheck.ActionableItems",
outputs=add_actions)
return results
if __name__ in ('__main__', '__builtin__', 'builtins'): # pragma: no cover
return_results(main(demisto.args()))
| mit | b88a550b0a9f4031f4e91e2dc30ce78f | 37.792453 | 118 | 0.566391 | 3.786372 | false | false | false | false |
demisto/content | Packs/DigitalGuardian/Integrations/DigitalGuardian/DigitalGuardian.py | 2 | 20822 | from CommonServerPython import *
import json
import requests
import urllib3
from typing import List, Dict, Any, Union
# Disable insecure warnings
urllib3.disable_warnings()
'''Constants'''
CATEGORY = 'alarm' # only focus on alarms
AUTH_SERVER = demisto.getParam('auth_url')
AUTH_URL = AUTH_SERVER + '/as/token.oauth2'
ARC_URL = demisto.getParam('arc_url')
ARC_URL += '/rest/1.0'
CLIENT_ID = demisto.getParam('client_id')
CLIENT_SECRET = demisto.getParam('client_secret')
VERIFY_CERT = not demisto.params().get('insecure', False)
AUTH_HEADERS = {'Content-Type': 'application/x-www-form-urlencoded'}
CLIENT_HEADERS = {'Authorization': ''}
PROXY = demisto.getParam('proxy')
def request_api_token():
"""
Request an API token from the authentication server.
Token is injected into global CLIENT_HEADERS.
"""
payload = {
'client_id': CLIENT_ID,
'client_secret': CLIENT_SECRET,
'grant_type': 'client_credentials',
'scope': 'client'
}
r = requests.post(url=AUTH_URL, headers=AUTH_HEADERS, data=payload, verify=VERIFY_CERT)
response_json = r.json()
if 200 <= r.status_code <= 299:
api_key = response_json['access_token']
CLIENT_HEADERS['Authorization'] = 'Bearer ' + api_key
else:
return_error(f'Error in request_api_token [{r.status_code}] - {r.reason}')
def test_module():
"""
Performs basic get request to get item samples
"""
r = requests.get(url=ARC_URL + '/watchlists', headers=CLIENT_HEADERS, verify=VERIFY_CERT)
try:
_ = r.json() if r.text else {}
if not r.ok:
return_error(f'Cannot connect to ARC, Response {r.status_code}: {r.text}')
demisto.results('ok')
except TypeError as ex:
return_error(str(ex))
def get_watchlist_id(watchlist_name: str) -> str:
"""
Get the watchlist id by watchlist name.
:param watchlist_name:
:return: watchlist id
"""
full_url = ARC_URL + '/watchlists/'
r = requests.get(url=full_url, headers=CLIENT_HEADERS, verify=VERIFY_CERT)
json_text = json.loads(r.text)
list_id = None
if 200 <= r.status_code <= 299:
for item in json_text:
if item.get('display_name').lower() == watchlist_name.lower():
list_id = item.get('name')
else:
return_error(f'Error retrieving watchlist_id for {watchlist_name}, {r.status_code}: {r.text}')
if not list_id:
return_error(f'Unable to find watchlist_id for {watchlist_name}')
return str(list_id)
def get_list_id(list_name: str, list_type: str) -> str:
"""
Get List ID by name and type
"""
full_url = ARC_URL + '/lists/' + list_type
r = requests.get(url=full_url, headers=CLIENT_HEADERS, verify=VERIFY_CERT)
json_text = json.loads(r.text)
list_id = None
if 200 <= r.status_code <= 299:
for jText in json_text:
if str(jText['name']).lower() == list_name.lower():
list_id = jText['id']
else:
return_error(f'Error retrieving list_id for {list_name}, {r.status_code}: {r.text}')
if not list_id:
return_error(f'List id not found for name {list_name} and type {list_type}')
return str(list_id)
def get_watchlist_entry_id(watchlist_name: str, watchlist_entry: str) -> str:
"""
Get watchlist entry id by list name and entry
:param watchlist_name:
:param watchlist_entry:
:return: ID of watchlist entry
"""
if watchlist_name is None or watchlist_entry is None:
return_error('Please provide both watchlist_name and watchlist_entry')
watchlist_entry_id = None
watchlist_id = get_watchlist_id(watchlist_name)
if watchlist_id:
full_url = ARC_URL + '/watchlists/'
r = requests.get(url=full_url + watchlist_id + '/values?limit=100000', headers=CLIENT_HEADERS,
verify=VERIFY_CERT)
json_text = json.loads(r.text)
if r.status_code != requests.codes.ok:
return_error('Unable to retrieve watchlist entries')
for jText in json_text:
if str(jText['value_name']).lower() == watchlist_entry.lower():
watchlist_entry_id = jText['value_id']
return str(watchlist_entry_id)
def add_entry_to_componentlist():
"""
Add componentlist_entry to component list identified by componentlist_name
"""
componentlist_name = demisto.args().get('componentlist_name', None)
componentlist_entry = demisto.args().get('componentlist_entry', None)
if componentlist_name is None or componentlist_entry is None:
return_error('Please provide both componentlist_name and componentlist_entry')
else:
list_id = get_list_id(componentlist_name, 'component_list')
CLIENT_HEADERS['Content-Type'] = 'application/json'
if list_id:
full_url = ARC_URL + '/remediation/lists/'
list_entry_json = '{"items":["' + componentlist_entry + '"]}'
r = requests.put(url=full_url + list_id + '/append', headers=CLIENT_HEADERS, data=list_entry_json,
verify=VERIFY_CERT)
if 200 <= r.status_code <= 299:
demisto.results('added componentlist entry ({}) to componentlist name ({})'.format(componentlist_entry,
componentlist_name))
else:
return_error(
'Failed to add componentlist entry({}) to componentlist name ({}). The response failed with status '
'code {}. The '
'response was: {}'.format(componentlist_entry, componentlist_name, r.status_code, r.text))
else:
return_error('Failed to find componentlist name ({})').format(componentlist_name)
def check_componentlist_entry():
"""
Does the componentlist_entry exist in the component list identified by componentlist_name
Sets DigitalGuardian.Componentlist.Found flag.
"""
componentlist_name = demisto.args().get('componentlist_name', None)
componentlist_entry = demisto.args().get('componentlist_entry', None)
if componentlist_name is None or componentlist_entry is None:
return_error('Please provide both componentlist_name and componentlist_entry')
componentlist = None
list_id = get_list_id(componentlist_name, 'component_list')
if list_id:
full_url = ARC_URL + '/lists/'
r = requests.get(url=full_url + list_id + '/values?limit=100000', headers=CLIENT_HEADERS, verify=VERIFY_CERT)
json_text = json.loads(r.text)
if 200 <= r.status_code <= 299:
for jText in json_text:
if str(jText['content_value']).lower() == componentlist_entry.lower():
componentlist = jText['content_value']
else:
return_error(f'Unable to find componentlist named {componentlist_name}, {r.status_code}')
if componentlist:
return_outputs(readable_output='Componentlist found', outputs={
'DigitalGuardian.Componentlist.Found': True}, raw_response='Componentlist entry not found')
else:
return_outputs(readable_output='Componentlist not found', outputs={
'DigitalGuardian.Componentlist.Found': False}, raw_response='Componentlist entry not found')
def rm_entry_from_componentlist():
"""
Remove entry from component list
"""
componentlist_name = demisto.args().get('componentlist_name', None)
componentlist_entry = demisto.args().get('componentlist_entry', None)
if componentlist_name is None or componentlist_entry is None:
return_error('Please provide either componentlist_name and componentlist_entry')
list_id = get_list_id(componentlist_name, 'component_list')
full_url = ARC_URL + '/remediation/lists/'
CLIENT_HEADERS['Content-Type'] = 'application/json'
list_entry_json = '{"items":["' + componentlist_entry + '"]}'
r = requests.post(url=full_url + list_id + '/delete', headers=CLIENT_HEADERS, data=list_entry_json,
verify=VERIFY_CERT)
if 200 <= r.status_code <= 299:
demisto.results('removed componentlist entry ({}) from componentlist name ({})'.format(componentlist_entry,
componentlist_name))
else:
return_error(
'Failed to remove componentlist entry({}) from componentlist name ({}). The response failed with '
'status code {}. The response was: {}'.format(componentlist_entry, componentlist_name, r.status_code,
r.text))
def add_entry_to_watchlist():
"""
Add watchlist_entry to watchlist_name
"""
watchlist_name = demisto.args().get('watchlist_name', None)
watchlist_entry = demisto.args().get('watchlist_entry', None)
if watchlist_name is None or watchlist_entry is None:
return_error('Please provide both watchlist_name and watchlist_entry')
watchlist_id = get_watchlist_id(watchlist_name)
watchlist_entry_json = '[{"value_name":"' + watchlist_entry + '"}]'
full_url = ARC_URL + '/watchlists/'
r = requests.post(url=full_url + watchlist_id + '/values/', data=watchlist_entry_json,
headers=CLIENT_HEADERS, verify=VERIFY_CERT)
if 200 <= r.status_code <= 299:
demisto.results('added watchlist entry ({}) to watchlist name ({})'.format(watchlist_entry, watchlist_name))
else:
return_error(
'Failed to add watchlist entry({}) to watchlist name ({}). The response failed with status code {}. '
'The response was: {}'.format(watchlist_entry, watchlist_name, r.status_code, r.text))
def check_watchlist_entry():
"""
Does the watchlist_entry exist in the watchlist identified by watchlist_name?
Sets DigitalGuardian.Watchlist.Found flag
"""
watchlist_name = demisto.args().get('watchlist_name', None)
watchlist_entry = demisto.args().get('watchlist_entry', None)
if watchlist_name is None or watchlist_entry is None:
return_error('Please provide both watchlist_name and watchlist_entry')
watchlist_entry_id = get_watchlist_entry_id(watchlist_name, watchlist_entry)
if watchlist_entry_id:
return_outputs(readable_output='Watchlist found', outputs={'DigitalGuardian.Watchlist.Found': True},
raw_response='Watchlist found')
else:
return_outputs(readable_output='Watchlist not found', outputs={
'DigitalGuardian.Watchlist.Found': False}, raw_response='Watchlist not found')
def rm_entry_from_watchlist():
"""
Remove watchlist_entry from watchlist identified by watchlist_name
"""
watchlist_name = demisto.args().get('watchlist_name', None)
watchlist_entry = demisto.args().get('watchlist_entry', None)
if watchlist_name is None or watchlist_entry is None:
return_error('Please provide both watchlist_name and watchlist_entry')
watchlist_id = get_watchlist_id(watchlist_name)
watchlist_entry_id = get_watchlist_entry_id(watchlist_name, watchlist_entry)
demisto.debug('wli= ' + str(watchlist_entry_id) + ' wld=' + str(watchlist_id))
full_url = ARC_URL + '/watchlists/'
r = requests.delete(url=full_url + watchlist_id + '/values/' + watchlist_entry_id,
headers=CLIENT_HEADERS, verify=VERIFY_CERT)
if 200 <= r.status_code <= 299:
demisto.results(
'removed watchlist entry ({}) from watchlist name ({})'.format(watchlist_entry, watchlist_name))
else:
return_error(
'Failed to remove watchlist entry({}) from watchlist name ({}). The response failed with status code {}. '
'The response was: {}'.format(watchlist_entry, watchlist_name, r.status_code, r.text))
def get_items_request():
"""
Request data from export profile.
"""
incident_list = []
oldname = ''
export_profile = demisto.params().get('export_profile', None)
if export_profile is None:
return_error('Export Profile parameter is required')
full_url = ARC_URL + '/export_profiles/' + export_profile + '/export_and_ack'
r = requests.post(url=full_url, headers=CLIENT_HEADERS, verify=VERIFY_CERT)
json_text = json.loads(r.text)
if r.status_code == 200:
header_field = []
for field in json_text['fields']:
header_field.append(field['name'])
exportdata = []
if json_text['total_hits'] == 0:
DEBUG('found no data')
else:
DEBUG('found data')
for data in json_text['data']:
entry_line = {}
header_position = 0
for dataValue in data:
entry_line[header_field[header_position]] = dataValue
header_position += 1
exportdata.append(entry_line)
for items in exportdata:
if not (items['dg_alert.dg_detection_source']) == 'alert' and items['dg_tags']:
comm = items['dg_alarm_name'].find(',')
if comm == -1:
comm = 100
name = '{alarm_name}-{id}'.format(alarm_name=items['dg_alarm_name'][0:comm], id=items['dg_guid'])
DEBUG(name + " != " + oldname)
if name != oldname:
DEBUG("create_artifacts...")
artifacts_creation_msg = create_artifacts(alert=items)
if artifacts_creation_msg:
incident_list.append(artifacts_creation_msg)
oldname = name
return incident_list
else:
return_error('DigitalGuardian ARC Export Failed '
'Please check authentication related parameters. ' + json.dumps(r.json(), indent=4,
sort_keys=True))
def convert_to_demisto_severity(dg_severity: str) -> int:
"""
Convert dg_severity to demisto severity
:param dg_severity:
:return: int demisto severity
"""
severity_map = {
'Critical': 4,
'High': 3,
'Medium': 2,
}
return severity_map.get(dg_severity, 1)
def convert_to_demisto_class(was_classified: str) -> int:
"""
Convert was_classified to demisto classification
:param was_classified:
:return:
"""
if was_classified == 'Yes':
demisto_class = 1
else:
demisto_class = 0
return demisto_class
def convert_to_demisto_sensitivity(dg_classification: str) -> str:
"""
Convert dg_classification to demisto sensitivity
:param dg_classification:
:return:
"""
demisto_sensitivity = 'none'
if dg_classification:
if dg_classification[-3:] == 'ext':
demisto_sensitivity = 'Critical'
elif dg_classification[-3:] == 'IGH':
demisto_sensitivity = 'High'
elif dg_classification[-3:] == 'MED':
demisto_sensitivity = 'Medium'
elif dg_classification[-3:] == 'LOW':
demisto_sensitivity = 'Low'
return demisto_sensitivity
def DEBUG(msg: str):
demisto.debug("-----=====***** " + msg + " *****=====-----")
def create_artifacts(alert):
"""
Create artifacts
:param alert:
:return:
"""
artifacts_list = {}
specific_alert_mapping = {
'alarm': {
'Alarm_Name': ('dg_alarm_name', []),
'Alarm_Severity': ('dg_alarm_sev', []),
'Threat_Type': ('dg_tags', []),
'Detection_Name': ('dg_det_name', []),
'Alert_Category': ('dg_alert.dg_category_name', []),
'Policy_Name': ('dg_alert.dg_alert.dg_alert.dg_policy.dg_name', []),
'Action_Was_Blocked': ('dg_alert.dg_hc', []),
'File_Name': ('dg_src_file_name', ['fileName']),
'File_Size': ('dg_alert.dg_total_size', ['fileSize']),
'File_Was_Classified': ('dg_hc', []),
'Classification': ('dg_class.dg_name', []),
'File_Type': ('dg_src_file_ext', []),
'File_Path': ('dg_alert.uad_sp', []),
'Destination_File_Path': ('dg_alert.uad_dp', []),
'Process_Name': ('dg_proc_file_name', []),
'Parent_Process_Name': ('dg_parent_name', []),
'Process_Path': ('pi_fp', []),
'Command_Line': ('pi_cmdln', []),
'MD5': ('dg_md5', ['filehash']),
'SHA1': ('dg_sha1', ['filehash']),
'SHA256': ('dg_sha256', ['filehash']),
'VirusTotal_Status': ('dg_vt_status', []),
'Attachment_File_Name': ('dg_attachments.dg_src_file_name', []),
'Attachment_Was_Classified': ('dg_attachments.uad_sfc', []),
'Email_Subject': ('ua_msb', []),
'Email_Sender': ('ua_ms', []),
'Email_Recipient': ('dg_recipients.uad_mr', []),
'Email_Recipient_Domain': ('dg_recipients.dg_rec_email_domain', []),
'Destination_Address': ('ua_ra', []),
'Request_URL': ('ua_up', []),
'Destination_DNS_Domain': ('ua_hn', []),
'Remote_Port': ('ua_rp', []),
'Computer_Name': ('dg_machine_name', []),
'Computer_Type': ('dg_machine_type', []),
'Source_Host_Name': ('dg_shn', []),
'Source_IP': ('ua_sa', []),
'Source_Address': ('ua_sa', []),
'User_Name': ('dg_user', []),
'NTDomain': ('ua_dn', []),
'dgarcUID': ('dg_guid', []),
'dg_process_time': ('dg_process_time', []),
'Activity': ('dg_utype', []),
'os_version': ('os_version', []),
'Policy': ('dg_alert.dg_policy.dg_name', []),
'Printer_Name': ('uad_pn', []),
'os': ('os', []),
'browser': ('browser', []),
'App_Category': ('appcategory', []),
}
}
DEBUG("before alert")
DEBUG(json.dumps(alert))
if CATEGORY in specific_alert_mapping:
temp_dict: Dict[Union[str, Any], Union[Union[str, int], Any]] = {}
cef: Dict[Union[str, Any], Union[Union[str, int], Any]] = {}
cef_types = {}
cef['Vendor ID'] = 'DG'
cef['Vendor Product'] = 'Digital Guardian'
cef['severity'] = convert_to_demisto_severity(alert['dg_alarm_sev'])
cef['sensitivity'] = convert_to_demisto_sensitivity(alert['dg_class.dg_name'])
DEBUG("cef: " + json.dumps(cef))
for artifact_key, artifact_tuple in specific_alert_mapping.get(CATEGORY).items(): # type: ignore
if alert.get(artifact_tuple[0]):
cef[artifact_key] = alert[artifact_tuple[0]]
cef_types[artifact_key] = artifact_tuple[1]
if cef:
comm = alert['dg_alarm_name'].find(',')
if comm == -1:
comm = 100
name = '{alarm_name}-{id}'.format(alarm_name=alert['dg_alarm_name'][0:comm], id=alert['dg_guid'])
temp_dict['name'] = name
temp_dict['severity'] = convert_to_demisto_severity(alert['dg_alarm_sev'])
temp_dict['type'] = alert['dg_tags']
temp_dict['occurred'] = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
temp_dict['rawJSON'] = json.dumps(cef)
artifacts_list.update(temp_dict)
return artifacts_list
def fetch_incidents():
"""
Fetch incidents from the ARC
:return:
"""
incidents = [] # type: List
demisto.debug(incidents)
incidents = get_items_request()
DEBUG('fetching incidents')
demisto.debug(incidents)
demisto.incidents(incidents)
def main():
"""
Main
"""
commands = {
'test-module': test_module,
'fetch-incidents': fetch_incidents,
'digitalguardian-add-watchlist-entry': add_entry_to_watchlist,
'digitalguardian-check-watchlist-entry': check_watchlist_entry,
'digitalguardian-remove-watchlist-entry': rm_entry_from_watchlist,
'digitalguardian-add-componentlist-entry': add_entry_to_componentlist,
'digitalguardian-check-componentlist-entry': check_componentlist_entry,
'digitalguardian-remove-componentlist-entry': rm_entry_from_componentlist,
}
try:
handle_proxy()
command = demisto.command()
LOG(f'Command being called is {command}')
if command not in commands:
return_error(f'Command "{command}" not implemented')
command_fn = commands[command]
request_api_token()
command_fn()
except Exception as e:
return_error(e)
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | 497fee97740d7483ed7d5f87a3c7d98e | 37.487985 | 120 | 0.584238 | 3.738241 | false | false | false | false |
demisto/content | Packs/Ansible_Powered_Integrations/Integrations/CiscoNXOS/CiscoNXOS.py | 2 | 18209 | import json
import traceback
from typing import Dict, cast
import ansible_runner
import demistomock as demisto # noqa: F401
import ssh_agent_setup
from CommonServerPython import * # noqa: F401
# Dict to Markdown Converter adapted from https://github.com/PolBaladas/torsimany/
def dict2md(json_block, depth=0):
markdown = ""
if isinstance(json_block, dict):
markdown = parseDict(json_block, depth)
if isinstance(json_block, list):
markdown = parseList(json_block, depth)
return markdown
def parseDict(d, depth):
markdown = ""
for k in d:
if isinstance(d[k], (dict, list)):
markdown += addHeader(k, depth)
markdown += dict2md(d[k], depth + 1)
else:
markdown += buildValueChain(k, d[k], depth)
return markdown
def parseList(rawlist, depth):
markdown = ""
for value in rawlist:
if not isinstance(value, (dict, list)):
index = rawlist.index(value)
markdown += buildValueChain(index, value, depth)
else:
markdown += parseDict(value, depth)
return markdown
def buildHeaderChain(depth):
list_tag = '* '
htag = '#'
chain = list_tag * (bool(depth)) + htag * (depth + 1) + \
' value ' + (htag * (depth + 1) + '\n')
return chain
def buildValueChain(key, value, depth):
tab = " "
list_tag = '* '
chain = tab * (bool(depth - 1)) + list_tag + \
str(key) + ": " + str(value) + "\n"
return chain
def addHeader(value, depth):
chain = buildHeaderChain(depth)
chain = chain.replace('value', value.title())
return chain
# Remove ansible branding from results
def rec_ansible_key_strip(obj):
if isinstance(obj, dict):
return {key.replace('ansible_', ''): rec_ansible_key_strip(val) for key, val in obj.items()}
return obj
# COMMAND FUNCTIONS
def generic_ansible(integration_name, command, args: Dict[str, Any]) -> CommandResults:
readable_output = ""
sshkey = ""
fork_count = 1 # default to executing against 1 host at a time
if args.get('concurrency'):
fork_count = cast(int, args.get('concurrency'))
inventory: Dict[str, dict] = {}
inventory['all'] = {}
inventory['all']['hosts'] = {}
if type(args['host']) is list:
# host arg can be a array of multiple hosts
hosts = args['host']
else:
# host arg could also be csv
hosts = [host.strip() for host in args['host'].split(',')]
for host in hosts:
new_host = {}
new_host['ansible_host'] = host
if ":" in host:
address = host.split(':')
new_host['ansible_port'] = address[1]
new_host['ansible_host'] = address[0]
else:
new_host['ansible_host'] = host
if demisto.params().get('port'):
new_host['ansible_port'] = demisto.params().get('port')
# Cisco NXOS
# Different credential options
# SSH Key saved in credential manager selection
sshkey = ""
if demisto.params().get('creds', {}).get('credentials').get('sshkey'):
username = demisto.params().get('creds', {}).get('credentials').get('user')
sshkey = demisto.params().get('creds', {}).get('credentials').get('sshkey')
new_host['ansible_user'] = username
# Password saved in credential manager selection
elif demisto.params().get('creds', {}).get('credentials').get('password'):
username = demisto.params().get('creds', {}).get('credentials').get('user')
password = demisto.params().get('creds', {}).get('credentials').get('password')
new_host['ansible_user'] = username
new_host['ansible_password'] = password
# username/password individually entered
else:
username = demisto.params().get('creds', {}).get('identifier')
password = demisto.params().get('creds', {}).get('password')
new_host['ansible_user'] = username
new_host['ansible_password'] = password
new_host['ansible_connection'] = 'network_cli'
new_host['ansible_network_os'] = 'nxos'
new_host['ansible_become'] = 'yes'
new_host['ansible_become_method'] = 'enable'
inventory['all']['hosts'][host] = new_host
module_args = ""
# build module args list
for arg_key, arg_value in args.items():
# skip hardcoded host arg, as it doesn't related to module
if arg_key == 'host':
continue
module_args += "%s=\"%s\" " % (arg_key, arg_value)
r = ansible_runner.run(inventory=inventory, host_pattern='all', module=command, quiet=True,
omit_event_data=True, ssh_key=sshkey, module_args=module_args, forks=fork_count)
results = []
for each_host_event in r.events:
# Troubleshooting
# demisto.log("%s: %s\n" % (each_host_event['event'], each_host_event))
if each_host_event['event'] in ["runner_on_ok", "runner_on_unreachable", "runner_on_failed"]:
# parse results
result = json.loads('{' + each_host_event['stdout'].split('{', 1)[1])
host = each_host_event['stdout'].split('|', 1)[0].strip()
status = each_host_event['stdout'].replace('=>', '|').split('|', 3)[1]
# if successful build outputs
if each_host_event['event'] == "runner_on_ok":
if 'fact' in command:
result = result['ansible_facts']
else:
if result.get(command) is not None:
result = result[command]
else:
result.pop("ansible_facts", None)
result = rec_ansible_key_strip(result)
if host != "localhost":
readable_output += "# %s - %s\n" % (host, status)
else:
# This is integration is not host based
readable_output += "# %s\n" % status
readable_output += dict2md(result)
# add host and status to result
result['host'] = host
result['status'] = status
results.append(result)
if each_host_event['event'] == "runner_on_unreachable":
msg = "Host %s unreachable\nError Details: %s" % (host, result)
return_error(msg)
if each_host_event['event'] == "runner_on_failed":
msg = "Host %s failed running command\nError Details: %s" % (host, result)
return_error(msg)
return CommandResults(
readable_output=readable_output,
outputs_prefix=integration_name + '.' + command,
outputs_key_field='',
outputs=results
)
# MAIN FUNCTION
def main() -> None:
"""main function, parses params and runs command functions
:return:
:rtype:
"""
# SSH Key integration requires ssh_agent to be running in the background
ssh_agent_setup.setup()
try:
if demisto.command() == 'test-module':
# This is the call made when pressing the integration Test button.
return_results('ok')
elif demisto.command() == 'nxos-aaa-server':
return_results(generic_ansible('cisconx-os', 'nxos_aaa_server', demisto.args()))
elif demisto.command() == 'nxos-aaa-server-host':
return_results(generic_ansible('cisconx-os', 'nxos_aaa_server_host', demisto.args()))
elif demisto.command() == 'nxos-acl':
return_results(generic_ansible('cisconx-os', 'nxos_acl', demisto.args()))
elif demisto.command() == 'nxos-acl-interface':
return_results(generic_ansible('cisconx-os', 'nxos_acl_interface', demisto.args()))
elif demisto.command() == 'nxos-banner':
return_results(generic_ansible('cisconx-os', 'nxos_banner', demisto.args()))
elif demisto.command() == 'nxos-bfd-global':
return_results(generic_ansible('cisconx-os', 'nxos_bfd_global', demisto.args()))
elif demisto.command() == 'nxos-bfd-interfaces':
return_results(generic_ansible('cisconx-os', 'nxos_bfd_interfaces', demisto.args()))
elif demisto.command() == 'nxos-bgp':
return_results(generic_ansible('cisconx-os', 'nxos_bgp', demisto.args()))
elif demisto.command() == 'nxos-bgp-af':
return_results(generic_ansible('cisconx-os', 'nxos_bgp_af', demisto.args()))
elif demisto.command() == 'nxos-bgp-neighbor':
return_results(generic_ansible('cisconx-os', 'nxos_bgp_neighbor', demisto.args()))
elif demisto.command() == 'nxos-bgp-neighbor-af':
return_results(generic_ansible('cisconx-os', 'nxos_bgp_neighbor_af', demisto.args()))
elif demisto.command() == 'nxos-command':
return_results(generic_ansible('cisconx-os', 'nxos_command', demisto.args()))
elif demisto.command() == 'nxos-config':
return_results(generic_ansible('cisconx-os', 'nxos_config', demisto.args()))
elif demisto.command() == 'nxos-evpn-global':
return_results(generic_ansible('cisconx-os', 'nxos_evpn_global', demisto.args()))
elif demisto.command() == 'nxos-evpn-vni':
return_results(generic_ansible('cisconx-os', 'nxos_evpn_vni', demisto.args()))
elif demisto.command() == 'nxos-facts':
return_results(generic_ansible('cisconx-os', 'nxos_facts', demisto.args()))
elif demisto.command() == 'nxos-feature':
return_results(generic_ansible('cisconx-os', 'nxos_feature', demisto.args()))
elif demisto.command() == 'nxos-gir':
return_results(generic_ansible('cisconx-os', 'nxos_gir', demisto.args()))
elif demisto.command() == 'nxos-gir-profile-management':
return_results(generic_ansible('cisconx-os', 'nxos_gir_profile_management', demisto.args()))
elif demisto.command() == 'nxos-hsrp':
return_results(generic_ansible('cisconx-os', 'nxos_hsrp', demisto.args()))
elif demisto.command() == 'nxos-igmp':
return_results(generic_ansible('cisconx-os', 'nxos_igmp', demisto.args()))
elif demisto.command() == 'nxos-igmp-interface':
return_results(generic_ansible('cisconx-os', 'nxos_igmp_interface', demisto.args()))
elif demisto.command() == 'nxos-igmp-snooping':
return_results(generic_ansible('cisconx-os', 'nxos_igmp_snooping', demisto.args()))
elif demisto.command() == 'nxos-install-os':
return_results(generic_ansible('cisconx-os', 'nxos_install_os', demisto.args()))
elif demisto.command() == 'nxos-interface-ospf':
return_results(generic_ansible('cisconx-os', 'nxos_interface_ospf', demisto.args()))
elif demisto.command() == 'nxos-interfaces':
return_results(generic_ansible('cisconx-os', 'nxos_interfaces', demisto.args()))
elif demisto.command() == 'nxos-l2-interfaces':
return_results(generic_ansible('cisconx-os', 'nxos_l2_interfaces', demisto.args()))
elif demisto.command() == 'nxos-l3-interfaces':
return_results(generic_ansible('cisconx-os', 'nxos_l3_interfaces', demisto.args()))
elif demisto.command() == 'nxos-lacp':
return_results(generic_ansible('cisconx-os', 'nxos_lacp', demisto.args()))
elif demisto.command() == 'nxos-lacp-interfaces':
return_results(generic_ansible('cisconx-os', 'nxos_lacp_interfaces', demisto.args()))
elif demisto.command() == 'nxos-lag-interfaces':
return_results(generic_ansible('cisconx-os', 'nxos_lag_interfaces', demisto.args()))
elif demisto.command() == 'nxos-lldp':
return_results(generic_ansible('cisconx-os', 'nxos_lldp', demisto.args()))
elif demisto.command() == 'nxos-lldp-global':
return_results(generic_ansible('cisconx-os', 'nxos_lldp_global', demisto.args()))
elif demisto.command() == 'nxos-logging':
return_results(generic_ansible('cisconx-os', 'nxos_logging', demisto.args()))
elif demisto.command() == 'nxos-ntp':
return_results(generic_ansible('cisconx-os', 'nxos_ntp', demisto.args()))
elif demisto.command() == 'nxos-ntp-auth':
return_results(generic_ansible('cisconx-os', 'nxos_ntp_auth', demisto.args()))
elif demisto.command() == 'nxos-ntp-options':
return_results(generic_ansible('cisconx-os', 'nxos_ntp_options', demisto.args()))
elif demisto.command() == 'nxos-nxapi':
return_results(generic_ansible('cisconx-os', 'nxos_nxapi', demisto.args()))
elif demisto.command() == 'nxos-ospf':
return_results(generic_ansible('cisconx-os', 'nxos_ospf', demisto.args()))
elif demisto.command() == 'nxos-ospf-vrf':
return_results(generic_ansible('cisconx-os', 'nxos_ospf_vrf', demisto.args()))
elif demisto.command() == 'nxos-overlay-global':
return_results(generic_ansible('cisconx-os', 'nxos_overlay_global', demisto.args()))
elif demisto.command() == 'nxos-pim':
return_results(generic_ansible('cisconx-os', 'nxos_pim', demisto.args()))
elif demisto.command() == 'nxos-pim-interface':
return_results(generic_ansible('cisconx-os', 'nxos_pim_interface', demisto.args()))
elif demisto.command() == 'nxos-pim-rp-address':
return_results(generic_ansible('cisconx-os', 'nxos_pim_rp_address', demisto.args()))
elif demisto.command() == 'nxos-ping':
return_results(generic_ansible('cisconx-os', 'nxos_ping', demisto.args()))
elif demisto.command() == 'nxos-reboot':
return_results(generic_ansible('cisconx-os', 'nxos_reboot', demisto.args()))
elif demisto.command() == 'nxos-rollback':
return_results(generic_ansible('cisconx-os', 'nxos_rollback', demisto.args()))
elif demisto.command() == 'nxos-rpm':
return_results(generic_ansible('cisconx-os', 'nxos_rpm', demisto.args()))
elif demisto.command() == 'nxos-smu':
return_results(generic_ansible('cisconx-os', 'nxos_smu', demisto.args()))
elif demisto.command() == 'nxos-snapshot':
return_results(generic_ansible('cisconx-os', 'nxos_snapshot', demisto.args()))
elif demisto.command() == 'nxos-snmp-community':
return_results(generic_ansible('cisconx-os', 'nxos_snmp_community', demisto.args()))
elif demisto.command() == 'nxos-snmp-contact':
return_results(generic_ansible('cisconx-os', 'nxos_snmp_contact', demisto.args()))
elif demisto.command() == 'nxos-snmp-host':
return_results(generic_ansible('cisconx-os', 'nxos_snmp_host', demisto.args()))
elif demisto.command() == 'nxos-snmp-location':
return_results(generic_ansible('cisconx-os', 'nxos_snmp_location', demisto.args()))
elif demisto.command() == 'nxos-snmp-traps':
return_results(generic_ansible('cisconx-os', 'nxos_snmp_traps', demisto.args()))
elif demisto.command() == 'nxos-snmp-user':
return_results(generic_ansible('cisconx-os', 'nxos_snmp_user', demisto.args()))
elif demisto.command() == 'nxos-static-route':
return_results(generic_ansible('cisconx-os', 'nxos_static_route', demisto.args()))
elif demisto.command() == 'nxos-system':
return_results(generic_ansible('cisconx-os', 'nxos_system', demisto.args()))
elif demisto.command() == 'nxos-telemetry':
return_results(generic_ansible('cisconx-os', 'nxos_telemetry', demisto.args()))
elif demisto.command() == 'nxos-udld':
return_results(generic_ansible('cisconx-os', 'nxos_udld', demisto.args()))
elif demisto.command() == 'nxos-udld-interface':
return_results(generic_ansible('cisconx-os', 'nxos_udld_interface', demisto.args()))
elif demisto.command() == 'nxos-user':
return_results(generic_ansible('cisconx-os', 'nxos_user', demisto.args()))
elif demisto.command() == 'nxos-vlans':
return_results(generic_ansible('cisconx-os', 'nxos_vlans', demisto.args()))
elif demisto.command() == 'nxos-vpc':
return_results(generic_ansible('cisconx-os', 'nxos_vpc', demisto.args()))
elif demisto.command() == 'nxos-vpc-interface':
return_results(generic_ansible('cisconx-os', 'nxos_vpc_interface', demisto.args()))
elif demisto.command() == 'nxos-vrf':
return_results(generic_ansible('cisconx-os', 'nxos_vrf', demisto.args()))
elif demisto.command() == 'nxos-vrf-af':
return_results(generic_ansible('cisconx-os', 'nxos_vrf_af', demisto.args()))
elif demisto.command() == 'nxos-vrf-interface':
return_results(generic_ansible('cisconx-os', 'nxos_vrf_interface', demisto.args()))
elif demisto.command() == 'nxos-vrrp':
return_results(generic_ansible('cisconx-os', 'nxos_vrrp', demisto.args()))
elif demisto.command() == 'nxos-vtp-domain':
return_results(generic_ansible('cisconx-os', 'nxos_vtp_domain', demisto.args()))
elif demisto.command() == 'nxos-vtp-password':
return_results(generic_ansible('cisconx-os', 'nxos_vtp_password', demisto.args()))
elif demisto.command() == 'nxos-vtp-version':
return_results(generic_ansible('cisconx-os', 'nxos_vtp_version', demisto.args()))
elif demisto.command() == 'nxos-vxlan-vtep':
return_results(generic_ansible('cisconx-os', 'nxos_vxlan_vtep', demisto.args()))
elif demisto.command() == 'nxos-vxlan-vtep-vni':
return_results(generic_ansible('cisconx-os', 'nxos_vxlan_vtep_vni', demisto.args()))
# Log exceptions and return errors
except Exception as e:
demisto.error(traceback.format_exc()) # print the traceback
return_error(f'Failed to execute {demisto.command()} command.\nError:\n{str(e)}')
# ENTRY POINT
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | f08a8d5f21941c237e20696d92898eed | 46.792651 | 107 | 0.59866 | 3.603602 | false | false | false | false |
demisto/content | Packs/FeedSOCRadarThreatFeed/Integrations/FeedSOCRadarThreatFeed/FeedSOCRadarThreatFeed_test.py | 2 | 9049 | import io
import json
import pytest
from CommonServerPython import DemistoException, CommandResults, FeedIndicatorType
SOCRADAR_API_ENDPOINT = 'https://platform.socradar.com/api'
def util_load_json(path):
with io.open(path, mode='r', encoding='utf-8') as f:
return json.loads(f.read())
def test_test_module(requests_mock):
"""Tests the test_module validation command.
"""
from FeedSOCRadarThreatFeed import Client, test_module
mock_socradar_api_key = "APIKey"
auth_suffix = f'threat/intelligence/check/auth?key={mock_socradar_api_key}'
mock_response = util_load_json('test_data/check_auth_response.json')
requests_mock.get(f'{SOCRADAR_API_ENDPOINT}/{auth_suffix}', json=mock_response)
collection_name_list = ['MockCollectionName']
indicator_suffix = f'threat/intelligence/socradar_collections?key={mock_socradar_api_key}' \
f'&collection_names={collection_name_list[0]}' \
f'&limit=1' \
f'&offset=0'
mock_response = util_load_json('test_data/get_indicators_response.json')
requests_mock.get(f'{SOCRADAR_API_ENDPOINT}/{indicator_suffix}', json=mock_response)
client = Client(
base_url=SOCRADAR_API_ENDPOINT,
api_key=mock_socradar_api_key,
tlp_color="",
tags="",
verify=False,
proxy=False
)
response = test_module(client, collection_name_list)
assert response == 'ok'
def test_test_module_handles_authorization_error(requests_mock):
"""Tests the test_module validation command authorization error.
"""
from FeedSOCRadarThreatFeed import Client, test_module, MESSAGES
mock_socradar_api_key = "WrongAPIKey"
suffix = f'threat/intelligence/check/auth?key={mock_socradar_api_key}'
mock_response = util_load_json('test_data/check_auth_response_auth_error.json')
requests_mock.get(f'{SOCRADAR_API_ENDPOINT}/{suffix}', json=mock_response, status_code=401)
client = Client(
base_url=SOCRADAR_API_ENDPOINT,
api_key=mock_socradar_api_key,
tlp_color="",
tags="",
verify=False,
proxy=False
)
with pytest.raises(DemistoException, match=MESSAGES['AUTHORIZATION_ERROR']):
test_module(client, [])
def test_fetch_indicators(requests_mock):
"""Tests the fetch-indicators function.
Configures requests_mock instance to generate the appropriate
SOCRadar Threat Intelligence Collections API response, loaded from a local JSON file. Checks
the output of the command function with the expected output.
"""
from FeedSOCRadarThreatFeed import Client, fetch_indicators
mock_socradar_api_key = "APIKey"
mock_response = util_load_json('test_data/fetch_indicators_response.json')
suffix = f'threat/intelligence/socradar_collections?key={mock_socradar_api_key}' \
f'&collection_names=MockCollectionName'
requests_mock.get(f'{SOCRADAR_API_ENDPOINT}/{suffix}', json=mock_response)
client = Client(
base_url=SOCRADAR_API_ENDPOINT,
api_key=mock_socradar_api_key,
tlp_color="GREEN",
tags=["TEST"],
verify=False,
proxy=False
)
collections_to_fetch = ['MockCollectionName']
indicators = fetch_indicators(
client=client,
collections_to_fetch=collections_to_fetch,
limit=1
)
expected_output = util_load_json('test_data/fetch_indicators_expected_output.json')
assert indicators == expected_output
assert len(indicators) == 1
def test_fetch_indicators_handles_error(requests_mock):
"""Tests the fetch_indicators function.
Configures requests_mock instance to generate the appropriate
SOCRadar SOCRadar Threat Intelligence Collections API response, loaded from a local JSON file. Checks
the output of the command function with the expected output.
"""
from FeedSOCRadarThreatFeed import Client, fetch_indicators
mock_socradar_api_key = "APIKey"
mock_response = util_load_json('test_data/fetch_indicators_response_error.json')
suffix = f'threat/intelligence/socradar_collections?key={mock_socradar_api_key}' \
f'&collection_names=MockCollectionName'
requests_mock.get(f'{SOCRADAR_API_ENDPOINT}/{suffix}', json=mock_response)
client = Client(
base_url=SOCRADAR_API_ENDPOINT,
api_key=mock_socradar_api_key,
tlp_color="GREEN",
tags=["TEST"],
verify=False,
proxy=False
)
collections_to_fetch = ['MockCollectionName']
indicators = fetch_indicators(
client=client,
collections_to_fetch=collections_to_fetch,
limit=1
)
assert len(indicators) == 0
def test_get_indicators_command(requests_mock):
"""Tests the get_indicators_command function.
Configures requests_mock instance to generate the appropriate
SOCRadar Threat Intelligence Collections API response, loaded from a local JSON file. Checks
the output of the command function with the expected output.
"""
from FeedSOCRadarThreatFeed import Client, get_indicators_command
mock_socradar_api_key = "APIKey"
mock_response = util_load_json('test_data/get_indicators_response.json')
suffix = f'threat/intelligence/socradar_collections?key={mock_socradar_api_key}' \
f'&collection_names=MockCollectionName'
requests_mock.get(f'{SOCRADAR_API_ENDPOINT}/{suffix}', json=mock_response)
client = Client(
base_url=SOCRADAR_API_ENDPOINT,
api_key=mock_socradar_api_key,
tlp_color="GREEN",
tags=["TEST"],
verify=False,
proxy=False
)
mock_args = {
'limit': 1,
'collections_to_fetch': 'MockCollectionName'
}
result = get_indicators_command(client, mock_args)
expected_output = util_load_json('test_data/get_indicators_expected_output.json')
expected_context = util_load_json('test_data/get_indicators_expected_context.json')
assert isinstance(result, CommandResults)
assert 'Indicators from SOCRadar ThreatFeed Collections (MockCollectionName):' in result.readable_output
assert result.outputs == expected_context
assert result.raw_response == expected_output
def test_get_indicators_command_handles_error(requests_mock):
"""Tests the get_indicators_command function.
Configures requests_mock instance to generate the appropriate
SOCRadar SOCRadar Threat Intelligence Collections API response, loaded from a local JSON file. Checks
the output of the command function with the expected output.
"""
from FeedSOCRadarThreatFeed import Client, get_indicators_command
mock_socradar_api_key = "APIKey"
mock_response = util_load_json('test_data/get_indicators_response_error.json')
suffix = f'threat/intelligence/socradar_collections?key={mock_socradar_api_key}' \
f'&collection_names=MockCollectionName'
requests_mock.get(f'{SOCRADAR_API_ENDPOINT}/{suffix}', json=mock_response)
client = Client(
base_url=SOCRADAR_API_ENDPOINT,
api_key=mock_socradar_api_key,
tlp_color="GREEN",
tags=["TEST"],
verify=False,
proxy=False
)
mock_args = {
'limit': 1,
'collections_to_fetch': 'MockCollectionName'
}
result = get_indicators_command(client, mock_args)
assert isinstance(result, CommandResults)
assert len(result.outputs) == 0
def test_date_string_to_iso_format_parsing():
"""Tests the date_string_to_iso_format_parsing function.
"""
from FeedSOCRadarThreatFeed import date_string_to_iso_format_parsing
mock_date_str = "1111-11-11 11:11:11"
formatted_date = date_string_to_iso_format_parsing(mock_date_str)
assert formatted_date
def test_build_entry_context():
"""Tests the build_entry_context function.
"""
from FeedSOCRadarThreatFeed import build_entry_context
mock_indicators = util_load_json('test_data/build_entry_context_input.json')
context_entry = build_entry_context(mock_indicators)
expected_context_entry = util_load_json('test_data/build_entry_context_expected_entry.json')
assert context_entry == expected_context_entry
def test_reset_last_fetch_dict():
"""Tests the reset_last_fetch_dict function.
"""
from FeedSOCRadarThreatFeed import reset_last_fetch_dict
result = reset_last_fetch_dict()
assert isinstance(result, CommandResults)
assert 'Fetch history has been successfully deleted!' in result.readable_output
CONVERT_DEMISTO_INDICATOR_TYPE_INPUTS = [
('hostname', FeedIndicatorType.Domain), ('url', FeedIndicatorType.URL), ('ip', FeedIndicatorType.IP),
('hash', FeedIndicatorType.File)
]
@pytest.mark.parametrize('socradar_indicator_type, demisto_indicator_type', CONVERT_DEMISTO_INDICATOR_TYPE_INPUTS)
def test_convert_to_demisto_indicator_type(socradar_indicator_type, demisto_indicator_type):
from FeedSOCRadarThreatFeed import convert_to_demisto_indicator_type
assert convert_to_demisto_indicator_type(socradar_indicator_type) == demisto_indicator_type
| mit | 21dc6a0366f3e2e5e521e14ff89fe9b1 | 34.347656 | 114 | 0.700298 | 3.506005 | false | true | false | false |
demisto/content | Packs/CommonScripts/Scripts/ScheduleGenericPolling/ScheduleGenericPolling.py | 2 | 2748 | import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
# Returns a comma-separated string representation of a list
# Possible inputs: null, int, str, bytes, ["","",...], [int, int], 'a,b,...', '"a","b",...', '["","",...]'
def parseIds(idsArg):
if idsArg is None:
return
if isinstance(idsArg, list):
return ','.join(map(lambda item: str(item) if type(item) == int else item.encode('utf-8'), idsArg))
if isinstance(idsArg, str) or isinstance(idsArg, bytes) or isinstance(idsArg, unicode):
return ','.join(argToList(idsArg.encode('utf-8')))
return str(idsArg)
def get_arg_and_encode(arg_name):
arg = demisto.getArg(arg_name)
return arg.encode('utf-8') if type(arg) != int else arg
ids = parseIds(demisto.getArg('ids'))
dt = get_arg_and_encode('dt')
pollingCommand = demisto.getArg('pollingCommand')
pollingCommandArgName = demisto.getArg('pollingCommandArgName')
tag = get_arg_and_encode('tag')
playbookId = ' playbookId="{}"'.format(demisto.getArg('playbookId') if 'playbookId' in demisto.args() else '')
interval = int(demisto.getArg('interval'))
timeout = int(demisto.getArg('timeout'))
args_names = demisto.getArg('additionalPollingCommandArgNames').strip()
args_values = get_arg_and_encode('additionalPollingCommandArgValues').strip()
if interval <= 0 or timeout <= 0:
return_error("Interval and timeout must be positive numbers")
# Verify correct dt path (does not verify condition!)
if not demisto.dt(demisto.context(), dt):
if not demisto.dt(demisto.context(), re.sub('\(.*\)', '', dt)):
return_error("Incorrect dt path: no ids found")
demisto.results("Warning: no ids matching the dt condition were found.\nVerify that the condition is correct and "
"that all ids have finished running.")
command_string = '''!GenericPollingScheduledTask pollingCommand="{0}" pollingCommandArgName="{1}"{2} ids="{3}" \
pendingIds="{4}" interval="{5}" timeout="{6}" tag="{7}" additionalPollingCommandArgNames="{8}" \
additionalPollingCommandArgValues="{9}"'''.format(pollingCommand, pollingCommandArgName, playbookId,
ids.replace('"', r'\"'), dt.replace('"', r'\"'),
interval, timeout, tag, args_names, args_values)
res = demisto.executeCommand("ScheduleCommand",
{
'command': command_string,
'cron': '*/{} * * * *'.format(interval),
'times': 1
})
if isError(res[0]):
return_error(res)
| mit | e2e7bfa16e3516c831c540d6f9400861 | 48.963636 | 120 | 0.599709 | 3.892351 | false | false | false | false |
demisto/content | Packs/ServiceNow/Integrations/ServiceNow/ServiceNow.py | 2 | 52303 | import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
import re
import requests
import json
from datetime import datetime
import shutil
# disable insecure warnings
requests.packages.urllib3.disable_warnings()
if not demisto.params().get('proxy', False):
del os.environ['HTTP_PROXY']
del os.environ['HTTPS_PROXY']
del os.environ['http_proxy']
del os.environ['https_proxy']
def get_server_url():
url = demisto.params()['url']
url = re.sub('/[\/]+$/', '', url)
url = re.sub('\/$', '', url)
return url
''' GLOBAL VARIABLES '''
DEFAULTS = {
'limit': 10,
'offset': 0,
'fetch_limit': 10,
'fetch_time': '10 minutes',
'ticket_type': 'incident'
}
USERNAME = demisto.params()['credentials']['identifier']
PASSWORD = demisto.params()['credentials']['password']
VERIFY_SSL = not demisto.params().get('insecure', False)
API = '/api/now/'
VERSION = demisto.params().get('api_version')
PARAMS_TICKET_TYPE = demisto.params().get('ticket_type', DEFAULTS['ticket_type'])
FETCH_TIME = demisto.params().get('fetch_time').strip()
SYSPARM_QUERY = demisto.params().get('sysparm_query')
SYSPARM_LIMIT = demisto.params().get('fetch_limit', DEFAULTS['fetch_limit'])
TIMESTAMP_FIELD = demisto.params().get('timestamp_field', 'opened_at')
TICKET_TYPE = demisto.params().get('ticket_type', DEFAULTS['ticket_type'])
GET_ATTACHMENTS = demisto.params().get('get_attachments', False)
if VERSION:
API += VERSION + '/'
SERVER_URL = get_server_url() + API
TICKET_STATES = {
'incident': {
'1': '1 - New',
'2': '2 - In Progress',
'3': '3 - On Hold',
'4': '4 - Awaiting Caller',
'5': '5 - Awaiting Evidence',
'6': '6 - Resolved',
'7': '7 - Closed',
'8': '8 - Canceled'
},
'problem': {
'1': '1 - Open',
'2': '2 - Known Error',
'3': '3 - Pending Change',
'4': '4 - Closed/Resolved'
},
'change_request': {
'-5': '-5 - New',
'-4': '-4 - Assess',
'-3': '-3 - Authorize',
'-2': '-2 - Scheduled',
'-1': '-1 - Implement',
'0': '0 - Review',
'3': '3 - Closed',
'4': '4 - Canceled'
},
'sc_task': {
'-5': '-5 - Pending',
'1': '1 - Open',
'2': '2 - Work In Progress',
'3': '3 - Closed Complete',
'4': '4 - Closed Incomplete',
'7': '7 - Closed Skipped'
},
'sc_request': {
'1': '1 - Approved',
'3': '3 - Closed',
'4': '4 - Rejected'
}
}
TICKET_SEVERITY = {
'1': '1 - High',
'2': '2 - Medium',
'3': '3 - Low'
}
TICKET_PRIORITY = {
'1': '1 - Critical',
'2': '2 - High',
'3': '3 - Moderate',
'4': '4 - Low',
'5': '5 - Planning'
}
COMPUTER_STATUS = {
'1': 'In use',
'2': 'On order',
'3': 'On maintenance',
'6': 'In stock/In transit',
'7': 'Retired',
'100': 'Missing'
}
# Map SNOW severity to Demisto severity for incident creation
SEVERITY_MAP = {
'1': 3,
'2': 2,
'3': 1
}
SNOW_ARGS = ['active', 'activity_due', 'opened_at', 'short_description', 'additional_assignee_list', 'approval_history',
'approval_set', 'assigned_to', 'assignment_group',
'business_duration', 'business_service', 'business_stc', 'calendar_duration', 'calendar_stc', 'caller_id',
'caused_by', 'close_code', 'close_notes',
'closed_at', 'closed_by', 'cmdb_ci', 'comments', 'comments_and_work_notes', 'company', 'contact_type',
'correlation_display', 'correlation_id',
'delivery_plan', 'delivery_task', 'description', 'due_date', 'expected_start', 'follow_up', 'group_list',
'hold_reason', 'impact', 'incident_state',
'knowledge', 'location', 'made_sla', 'notify', 'order', 'parent', 'parent_incident', 'priority',
'problem_id', 'resolved_at', 'resolved_by', 'rfc',
'severity', 'sla_due', 'state', 'subcategory', 'sys_tags', 'time_worked', 'urgency', 'user_input',
'watch_list', 'work_end', 'work_notes', 'work_notes_list',
'work_start', 'impact', 'incident_state', 'title', 'type', 'change_type', 'category', 'state', 'caller']
# Every table in ServiceNow should have those fields
DEFAULT_RECORD_FIELDS = {
'sys_id': 'ID',
'sys_updated_by': 'UpdatedBy',
'sys_updated_on': 'UpdatedAt',
'sys_created_by': 'CreatedBy',
'sys_created_on': 'CreatedAt'
}
DEPRECATED_COMMANDS = ['servicenow-get', 'servicenow-incident-get',
'servicenow-create', 'servicenow-incident-create',
'servicenow-update', 'servicenow-query',
'servicenow-incidents-query', 'servicenow-incident-update']
''' HELPER FUNCTIONS '''
def send_request(path, method='get', body=None, params=None, headers=None, file=None):
body = body if body is not None else {}
params = params if params is not None else {}
url = '{}{}'.format(SERVER_URL, path)
if not headers:
headers = {
'Accept': 'application/json',
'Content-Type': 'application/json'
}
if file:
# Not supported in v2
url = url.replace('v2', 'v1')
try:
file_entry = file['id']
file_name = file['name']
shutil.copy(demisto.getFilePath(file_entry)['path'], file_name)
with open(file_name, 'rb') as f:
files = {'file': f}
res = requests.request(method, url, headers=headers, params=params, data=body, files=files,
auth=(USERNAME, PASSWORD), verify=VERIFY_SSL)
shutil.rmtree(demisto.getFilePath(file_entry)['name'], ignore_errors=True)
except Exception as e:
raise Exception('Failed to upload file - ' + str(e))
else:
res = requests.request(method, url, headers=headers, data=json.dumps(body) if body else {}, params=params,
auth=(USERNAME, PASSWORD), verify=VERIFY_SSL)
try:
obj = res.json()
except Exception as e:
if not res.content:
return ''
raise Exception('Error parsing reply - {} - {}'.format(res.content, str(e)))
if 'error' in obj:
message = obj.get('error', {}).get('message')
details = obj.get('error', {}).get('detail')
if message == 'No Record found':
return {
# Return an empty results array
'result': []
}
raise Exception('ServiceNow Error: {}, details: {}'.format(message, details))
if res.status_code < 200 or res.status_code >= 300:
raise Exception('Got status code {} with url {} with body {} with headers {}'
.format(str(res.status_code), url, str(res.content), str(res.headers)))
return obj
def get_table_name(ticket_type=None):
if ticket_type:
return ticket_type
else:
if PARAMS_TICKET_TYPE:
return PARAMS_TICKET_TYPE
else:
return 'incident'
def create_ticket_context(data, ticket_type):
context = {
'ID': data.get('sys_id'),
'Summary': data.get('short_description'),
'Number': data.get('number'),
'CreatedOn': data.get('sys_created_on'),
'Active': data.get('active'),
'AdditionalComments': data.get('comments'),
'CloseCode': data.get('close_code'),
'OpenedAt': data.get('opened_at')
}
# These fields refer to records in the database, the value is their system ID.
if 'closed_by' in data:
context['ResolvedBy'] = data['closed_by']['value'] if 'value' in data['closed_by'] else ''
if 'opened_by' in data:
context['OpenedBy'] = data['opened_by']['value'] if 'value' in data['opened_by'] else ''
context['Creator'] = data['opened_by']['value'] if 'value' in data['opened_by'] else ''
if 'assigned_to' in data:
context['Assignee'] = data['assigned_to']['value'] if 'value' in data['assigned_to'] else ''
# Try to map fields
if 'priority' in data:
# Backward compatibility
if demisto.command() in DEPRECATED_COMMANDS:
context['Priority'] = data['priority']
else:
context['Priority'] = TICKET_PRIORITY.get(data['priority'], data['priority'])
if 'state' in data:
mapped_state = data['state']
# Backward compatibility
if demisto.command() not in DEPRECATED_COMMANDS:
if ticket_type in TICKET_STATES:
mapped_state = TICKET_STATES[ticket_type].get(data['state'], mapped_state)
context['State'] = mapped_state
return createContext(context, removeNull=True)
def get_ticket_context(data, ticket_type):
if not isinstance(data, list):
return create_ticket_context(data, ticket_type)
tickets = []
for d in data:
tickets.append(create_ticket_context(d, ticket_type))
return tickets
def get_ticket_human_readable(tickets, ticket_type):
if not isinstance(tickets, list):
tickets = [tickets]
result = []
for ticket in tickets:
hr = {
'Number': ticket.get('number'),
'System ID': ticket['sys_id'],
'Created On': ticket.get('sys_created_on'),
'Created By': ticket.get('sys_created_by'),
'Active': ticket.get('active'),
'Close Notes': ticket.get('close_notes'),
'Close Code': ticket.get('close_code'),
'Description': ticket.get('description'),
'Opened At': ticket.get('opened_at'),
'Due Date': ticket.get('due_date'),
# This field refers to a record in the database, the value is its system ID.
'Resolved By': ticket.get('closed_by', {}).get('value') if isinstance(ticket.get('closed_by'), dict)
else ticket.get('closed_by'),
'Resolved At': ticket.get('resolved_at'),
'SLA Due': ticket.get('sla_due'),
'Short Description': ticket.get('short_description'),
'Additional Comments': ticket.get('comments')
}
# Try to map the fields
if 'impact' in ticket:
hr['Impact'] = TICKET_SEVERITY.get(ticket['impact'], ticket['impact'])
if 'urgency' in ticket:
hr['Urgency'] = TICKET_SEVERITY.get(ticket['urgency'], ticket['urgency'])
if 'severity' in ticket:
hr['Severity'] = TICKET_SEVERITY.get(ticket['severity'], ticket['severity'])
if 'priority' in ticket:
hr['Priority'] = TICKET_PRIORITY.get(ticket['priority'], ticket['priority'])
if 'state' in ticket:
mapped_state = ticket['state']
if ticket_type in TICKET_STATES:
mapped_state = TICKET_STATES[ticket_type].get(ticket['state'], mapped_state)
hr['State'] = mapped_state
result.append(hr)
return result
def get_ticket_fields(template, ticket_type):
# Inverse the keys and values of those dictionaries to map the arguments to their corresponding values in ServiceNow
args = unicode_to_str_recur(demisto.args())
inv_severity = {v: k for k, v in TICKET_SEVERITY.iteritems()}
inv_priority = {v: k for k, v in TICKET_PRIORITY.iteritems()}
states = TICKET_STATES.get(ticket_type)
inv_states = {v: k for k, v in states.iteritems()} if states else {}
body = {}
for arg in SNOW_ARGS:
input_arg = args.get(arg)
if input_arg:
if arg in ['impact', 'urgency', 'severity']:
body[arg] = inv_severity.get(input_arg, input_arg)
elif arg == 'priority':
body[arg] = inv_priority.get(input_arg, input_arg)
elif arg == 'state':
body[arg] = inv_states.get(input_arg, input_arg)
else:
body[arg] = input_arg
elif template and arg in template:
body[arg] = template[arg]
return body
def get_body(fields, custom_fields):
body = {}
if fields:
for field in fields:
body[field] = fields[field]
if custom_fields:
for field in custom_fields:
# custom fields begin with "u_"
if field.startswith('u_'):
body[field] = custom_fields[field]
else:
body['u_' + field] = custom_fields[field]
return body
def split_fields(fields):
dic_fields = {}
if fields:
# As received by the command
arr_fields = fields.split(';')
for f in arr_fields:
field = f.split('=')
if len(field) > 1:
dic_fields[field[0]] = field[1]
return dic_fields
# Converts unicode elements of obj (incl. dictionary and list) to string recursively
def unicode_to_str_recur(obj):
if isinstance(obj, dict):
obj = {unicode_to_str_recur(k): unicode_to_str_recur(v) for k, v in obj.iteritems()}
elif isinstance(obj, list):
obj = map(unicode_to_str_recur, obj)
elif isinstance(obj, unicode):
obj = obj.encode('utf-8')
return obj
# Converts to an str
def convert_to_str(obj):
if isinstance(obj, unicode):
return obj.encode('utf-8')
try:
return str(obj)
except ValueError:
return obj
''' FUNCTIONS '''
def get_template(name):
query_params = {'sysparm_limit': 1, 'sysparm_query': 'name=' + name}
ticket_type = 'sys_template'
path = 'table/' + ticket_type
res = send_request('GET', path, params=query_params)
if len(res['result']) == 0:
raise ValueError("Incorrect template name")
template = res['result'][0]['template'].split('^')
dic_template = {}
for i in range(len(template) - 1):
template_value = template[i].split('=')
if len(template_value) > 1:
dic_template[template_value[0]] = template_value[1]
return dic_template
def get_ticket_command():
args = unicode_to_str_recur(demisto.args())
ticket_type = get_table_name(args.get('ticket_type'))
ticket_id = args.get('id')
number = args.get('number')
get_attachments = args.get('get_attachments', 'false')
res = get(ticket_type, ticket_id, number)
if not res or 'result' not in res:
return 'Cannot find ticket'
if isinstance(res['result'], list):
if len(res['result']) == 0:
return 'Cannot find ticket'
ticket = res['result'][0]
else:
ticket = res['result']
entries = [] # type: List[Dict]
if get_attachments.lower() != 'false':
entries = get_ticket_attachment_entries(ticket['sys_id'])
hr = get_ticket_human_readable(ticket, ticket_type)
context = get_ticket_context(ticket, ticket_type)
headers = ['System ID', 'Number', 'Impact', 'Urgency', 'Severity', 'Priority', 'State', 'Created On', 'Created By',
'Active', 'Close Notes', 'Close Code',
'Description', 'Opened At', 'Due Date', 'Resolved By', 'Resolved At', 'SLA Due', 'Short Description',
'Additional Comments']
entry = {
'Type': entryTypes['note'],
'Contents': res,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown('ServiceNow ticket', hr, headers=headers, removeNull=True),
'EntryContext': {
'Ticket(val.ID===obj.ID)': context,
'ServiceNow.Ticket(val.ID===obj.ID)': context
}
}
entries.append(entry)
return entries
def get_record_command():
args = unicode_to_str_recur(demisto.args())
table_name = args['table_name']
record_id = args['id']
fields = args.get('fields')
res = get(table_name, record_id)
if not res or 'result' not in res:
return 'Cannot find record'
if isinstance(res['result'], list):
if len(res['result']) == 0:
return 'Cannot find record'
record = res['result'][0]
else:
record = res['result']
entry = {
'Type': entryTypes['note'],
'Contents': res,
'ContentsFormat': formats['json']
}
if fields:
fields = argToList(fields)
if 'sys_id' not in fields:
# ID is added by default
fields.append('sys_id')
# filter the record for the required fields
record = dict(filter(lambda kv_pair: kv_pair[0] in fields, record.items()))
for k, v in record.iteritems():
if isinstance(v, dict):
# For objects that refer to a record in the database, take their value(system ID).
record[k] = v.get('value', record[k])
record['ID'] = record.pop('sys_id')
entry['ReadableContentsFormat'] = formats['markdown']
entry['HumanReadable'] = tableToMarkdown('ServiceNow record', record, removeNull=True)
entry['EntryContext'] = {
'ServiceNow.Record(val.ID===obj.ID)': createContext(record)
}
else:
mapped_record = {DEFAULT_RECORD_FIELDS[k]: record[k] for k in DEFAULT_RECORD_FIELDS if k in record}
entry['ReadableContentsFormat'] = formats['markdown']
entry['HumanReadable'] = tableToMarkdown('ServiceNow record' + record_id, mapped_record, removeNull=True)
entry['EntryContext'] = {
'ServiceNow.Record(val.ID===obj.ID)': createContext(mapped_record)
}
return entry
def get(table_name, record_id, number=None):
path = None
query_params = {} # type: Dict
if record_id:
path = 'table/' + table_name + '/' + record_id
elif number:
path = 'table/' + table_name
query_params = {
'number': number
}
else:
# Only in cases where the table is of type ticket
raise ValueError('servicenow-get-ticket requires either ticket ID (sys_id) or ticket number')
return send_request(path, 'get', params=query_params)
def get_ticket_attachments(ticket_id):
path = 'attachment'
query_params = {
'sysparm_query': 'table_sys_id=' + ticket_id
}
return send_request(path, 'get', params=query_params)
def get_ticket_attachment_entries(ticket_id):
entries = []
links = [] # type: List[Tuple[str, str]]
attachments_res = get_ticket_attachments(ticket_id)
if 'result' in attachments_res and len(attachments_res['result']) > 0:
attachments = attachments_res['result']
links = [(attachment['download_link'], attachment['file_name']) for attachment in attachments]
for link in links:
file_res = requests.get(link[0], auth=(USERNAME, PASSWORD), verify=VERIFY_SSL)
if file_res is not None:
entries.append(fileResult(link[1], file_res.content))
return entries
def update_ticket_command():
args = unicode_to_str_recur(demisto.args())
custom_fields = split_fields(args.get('custom_fields'))
template = args.get('template')
ticket_type = get_table_name(args.get('ticket_type'))
ticket_id = args['id']
if template:
template = get_template(template)
fields = get_ticket_fields(template, ticket_type)
res = update(ticket_type, ticket_id, fields, custom_fields)
if not res or 'result' not in res:
return_error('Unable to retrieve response')
hr = get_ticket_human_readable(res['result'], ticket_type)
context = get_ticket_context(res['result'], ticket_type)
entry = {
'Type': entryTypes['note'],
'Contents': res,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown('ServiceNow ticket updated successfully\nTicket type: ' + ticket_type,
hr, removeNull=True),
'EntryContext': {
'ServiceNow.Ticket(val.ID===obj.ID)': context
}
}
return entry
def update_record_command():
args = unicode_to_str_recur(demisto.args())
table_name = args['table_name']
record_id = args['id']
fields = args.get('fields', {})
custom_fields = args.get('custom_fields')
if fields:
fields = split_fields(fields)
if custom_fields:
custom_fields = split_fields(custom_fields)
res = update(table_name, record_id, fields, custom_fields)
if not res or 'result' not in res:
return 'Could not retrieve record'
result = res['result']
mapped_record = {DEFAULT_RECORD_FIELDS[k]: result[k] for k in DEFAULT_RECORD_FIELDS if k in result}
entry = {
'Type': entryTypes['note'],
'Contents': res,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown('ServiceNow record updated successfully', mapped_record, removeNull=True),
'EntryContext': {
'ServiceNow.Record(val.ID===obj.ID)': createContext(mapped_record)
}
}
return entry
def update(table_name, record_id, fields, custom_fields):
body = get_body(fields, custom_fields)
path = 'table/' + table_name + '/' + record_id
return send_request(path, 'patch', body=body)
def create_ticket_command():
args = unicode_to_str_recur(demisto.args())
custom_fields = split_fields(args.get('custom_fields'))
template = args.get('template')
ticket_type = get_table_name(args.get('ticket_type'))
if template:
template = get_template(template)
fields = get_ticket_fields(template, ticket_type)
res = create(ticket_type, fields, custom_fields)
if not res or 'result' not in res:
return_error('Unable to retrieve response')
hr = get_ticket_human_readable(res['result'], ticket_type)
context = get_ticket_context(res['result'], ticket_type)
headers = ['System ID', 'Number', 'Impact', 'Urgency', 'Severity', 'Priority', 'State', 'Created On', 'Created By',
'Active', 'Close Notes', 'Close Code',
'Description', 'Opened At', 'Due Date', 'Resolved By', 'Resolved At', 'SLA Due', 'Short Description',
'Additional Comments']
entry = {
'Type': entryTypes['note'],
'Contents': res,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown('ServiceNow ticket created successfully', hr,
headers=headers, removeNull=True),
'EntryContext': {
'Ticket(val.ID===obj.ID)': context,
'ServiceNow.Ticket(val.ID===obj.ID)': context
}
}
return entry
def create_record_command():
args = unicode_to_str_recur(demisto.args())
table_name = args['table_name']
fields = args.get('fields')
custom_fields = args.get('custom_fields')
if fields:
fields = split_fields(fields)
if custom_fields:
custom_fields = split_fields(custom_fields)
res = create(table_name, fields, custom_fields)
if not res or 'result' not in res:
return 'Could not retrieve record'
result = res['result']
mapped_record = {DEFAULT_RECORD_FIELDS[k]: result[k] for k in DEFAULT_RECORD_FIELDS if k in result}
entry = {
'Type': entryTypes['note'],
'Contents': res,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown('ServiceNow record created successfully', mapped_record, removeNull=True),
'EntryContext': {
'ServiceNow.Record(val.ID===obj.ID)': createContext(mapped_record)
}
}
return entry
def create(table_name, fields, custom_fields):
body = get_body(fields, custom_fields)
path = 'table/' + table_name
return send_request(path, 'post', body=body)
def delete_ticket_command():
args = unicode_to_str_recur(demisto.args())
ticket_id = args['id']
ticket_type = get_table_name(args.get('ticket_type'))
res = delete(ticket_type, ticket_id)
entry = {
'Type': entryTypes['note'],
'Contents': res,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['text'],
'HumanReadable': 'Ticket with ID ' + ticket_id + ' was successfully deleted.'
}
return entry
def delete_record_command():
args = unicode_to_str_recur(demisto.args())
record_id = args['id']
table_name = args.get('table_name')
res = delete(table_name, record_id)
entry = {
'Type': entryTypes['note'],
'Contents': res,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['text'],
'HumanReadable': 'Record with ID ' + record_id + ' was successfully deleted.'
}
return entry
def delete(table_name, record_id):
path = 'table/' + table_name + '/' + record_id
return send_request(path, 'delete')
def add_link_command():
args = unicode_to_str_recur(demisto.args())
ticket_id = args['id']
key = 'comments' if args.get('post-as-comment', 'false').lower() == 'true' else 'work_notes'
text = args.get('text', args['link'])
link = '[code]<a class="web" target="_blank" href="' + args['link'] + '" >' + text + '</a>[/code]'
ticket_type = get_table_name(args.get('ticket_type'))
res = add_link(ticket_id, ticket_type, key, link)
if not res or 'result' not in res:
return_error('Unable to retrieve response')
headers = ['System ID', 'Number', 'Impact', 'Urgency', 'Severity', 'Priority', 'State', 'Created On', 'Created By',
'Active', 'Close Notes', 'Close Code',
'Description', 'Opened At', 'Due Date', 'Resolved By', 'Resolved At', 'SLA Due', 'Short Description',
'Additional Comments']
hr = get_ticket_human_readable(res['result'], ticket_type)
entry = {
'Type': entryTypes['note'],
'Contents': res,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown('Link successfully added to ServiceNow ticket', hr,
headers=headers, removeNull=True)
}
return entry
def add_link(ticket_id, ticket_type, key, link):
body = {}
body[key] = link
path = 'table/' + ticket_type + '/' + ticket_id
return send_request(path, 'patch', body=body)
def add_comment_command():
args = unicode_to_str_recur(demisto.args())
ticket_id = args['id']
key = 'comments' if args.get('post-as-comment', 'false').lower() == 'true' else 'work_notes'
text = args['comment']
ticket_type = get_table_name(args.get('ticket_type'))
res = add_comment(ticket_id, ticket_type, key, text)
if not res or 'result' not in res:
return_error('Unable to retrieve response')
headers = ['System ID', 'Number', 'Impact', 'Urgency', 'Severity', 'Priority', 'State', 'Created On', 'Created By',
'Active', 'Close Notes', 'Close Code',
'Description', 'Opened At', 'Due Date', 'Resolved By', 'Resolved At', 'SLA Due', 'Short Description',
'Additional Comments']
hr = get_ticket_human_readable(res['result'], ticket_type)
entry = {
'Type': entryTypes['note'],
'Contents': res,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown('Comment successfully added to ServiceNow ticket', hr,
headers=headers, removeNull=True)
}
return entry
def add_comment(ticket_id, ticket_type, key, text):
body = {}
body[key] = text
path = 'table/' + ticket_type + '/' + ticket_id
return send_request(path, 'patch', body=body)
def get_ticket_notes_command():
args = unicode_to_str_recur(demisto.args())
ticket_id = args['id']
limit = args.get('limit')
offset = args.get('offset')
comments_query = 'element_id=' + ticket_id + '^element=comments^ORelement=work_notes'
res = query('sys_journal_field', limit, offset, comments_query)
if not res or 'result' not in res:
return 'No results found'
headers = ['Value', 'CreatedOn', 'CreatedBy', 'Type']
mapped_notes = [{
'Value': n.get('value'),
'CreatedOn': n.get('sys_created_on'),
'CreatedBy': n.get('sys_created_by'),
'Type': 'Work Note' if n.get('element', '') == 'work_notes' else 'Comment'
} for n in res['result']]
if not mapped_notes:
return 'No results found'
ticket = {
'ID': ticket_id,
'Note': mapped_notes
}
entry = {
'Type': entryTypes['note'],
'Contents': res,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown('ServiceNow notes for ticket ' + ticket_id, mapped_notes, headers=headers,
headerTransform=pascalToSpace, removeNull=True),
'EntryContext': {
'ServiceNow.Ticket(val.ID===obj.ID)': createContext(ticket, removeNull=True)
}
}
return entry
def query_tickets_command():
args = unicode_to_str_recur(demisto.args())
sysparm_limit = args.get('limit', DEFAULTS['limit'])
sysparm_query = args.get('query')
sysparm_offset = args.get('offset', DEFAULTS['offset'])
if not sysparm_query:
# backward compatibility
sysparm_query = args.get('sysparm_query')
ticket_type = get_table_name(args.get('ticket_type'))
res = query(ticket_type, sysparm_limit, sysparm_offset, sysparm_query)
if not res or 'result' not in res or len(res['result']) == 0:
return 'No results found'
hr = get_ticket_human_readable(res['result'], ticket_type)
context = get_ticket_context(res['result'], ticket_type)
headers = ['System ID', 'Number', 'Impact', 'Urgency', 'Severity', 'Priority', 'State', 'Created On', 'Created By',
'Active', 'Close Notes', 'Close Code',
'Description', 'Opened At', 'Due Date', 'Resolved By', 'Resolved At', 'SLA Due', 'Short Description',
'Additional Comments']
entry = {
'Type': entryTypes['note'],
'Contents': res,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown('ServiceNow tickets', hr, headers=headers, removeNull=True),
'EntryContext': {
'Ticket(val.ID===obj.ID)': context,
'ServiceNow.Ticket(val.ID===obj.ID)': context
}
}
return entry
def query_table_command():
args = unicode_to_str_recur(demisto.args())
table_name = args['table_name']
sysparm_limit = args.get('limit', DEFAULTS['limit'])
sysparm_query = args.get('query')
sysparm_offset = args.get('offset', DEFAULTS['offset'])
fields = args.get('fields')
res = query(table_name, sysparm_limit, sysparm_offset, sysparm_query)
if not res or 'result' not in res or len(res['result']) == 0:
return 'No results found'
entry = {
'Type': entryTypes['note'],
'Contents': res,
'ContentsFormat': formats['json']
}
result = res['result']
if fields:
fields = argToList(fields)
if 'sys_id' not in fields:
# ID is added by default
fields.append('sys_id')
# Filter the records according to the given fields
records = [dict(filter(lambda kv_pair: kv_pair[0] in fields, r.iteritems())) for r in res['result']]
for r in records:
r['ID'] = r.pop('sys_id')
for k, v in r.iteritems():
if isinstance(v, dict):
# For objects that refer to a record in the database, take their value (system ID).
r[k] = v.get('value', v)
entry['ReadableContentsFormat'] = formats['markdown']
entry['HumanReadable'] = tableToMarkdown('ServiceNow records', records, removeNull=True)
entry['EntryContext'] = {
'ServiceNow.Record(val.ID===obj.ID)': createContext(records)
}
else:
mapped_records = [{DEFAULT_RECORD_FIELDS[k]: r[k] for k in DEFAULT_RECORD_FIELDS if k in r} for r in result]
entry['ReadableContentsFormat'] = formats['markdown']
entry['HumanReadable'] = tableToMarkdown('ServiceNow records', mapped_records, removeNull=True)
entry['EntryContext'] = {
'ServiceNow.Record(val.ID===obj.ID)': createContext(mapped_records)
}
return entry
def query(table_name, sysparm_limit, sysparm_offset, sysparm_query):
query_params = {}
query_params['sysparm_limit'] = sysparm_limit
query_params['sysparm_offset'] = sysparm_offset
if sysparm_query:
query_params['sysparm_query'] = sysparm_query
path = 'table/' + table_name
return send_request(path, 'get', params=query_params)
def upload_file_command():
args = unicode_to_str_recur(demisto.args())
ticket_type = get_table_name(args.get('ticket_type'))
ticket_id = args['id']
file_id = args['file_id']
file_name = args.get('file_name', demisto.dt(demisto.context(), "File(val.EntryID=='" + file_id + "').Name"))
# in case of info file
if not file_name:
file_name = demisto.dt(demisto.context(), "InfoFile(val.EntryID=='" + file_id + "').Name")
if not file_name:
return_error('Could not find the file')
file_name = file_name[0] if isinstance(file_name, list) else file_name
res = upload_file(ticket_id, file_id, file_name, ticket_type)
if not res or 'result' not in res or not res['result']:
return_error('Unable to retrieve response')
hr = {
'Filename': res['result'].get('file_name'),
'Download link': res['result'].get('download_link'),
'System ID': res['result'].get('sys_id')
}
context = {
'ID': ticket_id,
'File': {}
}
context['File']['Filename'] = res['result'].get('file_name')
context['File']['Link'] = res['result'].get('download_link')
context['File']['SystemID'] = res['result'].get('sys_id')
entry = {
'Type': entryTypes['note'],
'Contents': res,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown('File uploaded successfully', hr),
'EntryContext': {
'ServiceNow.Ticket(val.ID===obj.ID)': context,
'Ticket(val.ID===obj.ID)': context
}
}
return entry
def upload_file(ticket_id, file_id, file_name, ticket_type):
headers = {
'Accept': 'application/json'
}
body = {
'table_name': ticket_type,
'table_sys_id': ticket_id,
'file_name': file_name
}
path = 'attachment/upload'
return send_request(path, 'post', headers=headers, body=body, file={'id': file_id, 'name': file_name})
# Deprecated
def get_computer_command():
args = unicode_to_str_recur(demisto.args())
table_name = 'cmdb_ci_computer'
computer_name = args['computerName']
res = query(table_name, None, 0, 'u_code=' + computer_name)
if not res or 'result' not in res:
return 'Cannot find computer'
elif isinstance(res['result'], list):
if len(res['result']) == 0:
return 'Cannot find computer'
computer = res['result'][0]
else:
computer = res['result']
if computer['u_code'] != computer_name:
return 'Computer not found'
hr = {
'ID': computer['sys_id'],
'u_code (computer name)': computer['u_code'],
'Support group': computer['support_group'],
'Operating System': computer['os'],
'Comments': computer['comments']
}
ec = createContext(computer, removeNull=True)
if 'support_group' in computer:
ec['support_group'] = computer['support_group']['value'] if 'value' in computer['support_group'] else ''
entry = {
'Type': entryTypes['note'],
'Contents': computer,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown('ServiceNow Computer', hr),
'EntryContext': {
'ServiceNowComputer(val.sys_id==obj.sys_id)': ec,
}
}
return entry
def query_computers_command():
args = unicode_to_str_recur(demisto.args())
table_name = 'cmdb_ci_computer'
computer_id = args.get('computer_id')
computer_name = args.get('computer_name')
asset_tag = args.get('asset_tag')
computer_query = args.get('query', {})
offset = args.get('offset', DEFAULTS['offset'])
limit = args.get('limit', DEFAULTS['limit'])
if computer_id:
res = get(table_name, computer_id)
else:
if computer_name:
computer_query = 'name=' + computer_name
elif asset_tag:
computer_query = 'asset_tag=' + asset_tag
res = query(table_name, limit, offset, computer_query)
if not res or 'result' not in res:
return 'No computers found'
computers = res['result']
if not isinstance(computers, list):
computers = [computers]
if len(computers) == 0:
return 'No computers found'
headers = ['ID', 'AssetTag', 'Name', 'DisplayName', 'SupportGroup', 'OperatingSystem', 'Company', 'AssignedTo',
'State', 'Cost', 'Comments']
mapped_computers = [{
'ID': computer.get('sys_id'),
'AssetTag': computer.get('asset_tag'),
'Name': computer.get('name'),
'DisplayName': '{} - {}'.format(computer.get('asset_tag', ''), computer.get('name', '')),
'SupportGroup': computer.get('support_group'),
'OperatingSystem': computer.get('os'),
'Company': computer.get('company', {}).get('value')
if isinstance(computer.get('company'), dict) else computer.get('company'),
'AssignedTo': computer.get('assigned_to', {}).get('value')
if isinstance(computer.get('assigned_to'), dict) else computer.get('assigned_to'),
'State': COMPUTER_STATUS.get(computer.get('install_status', ''), computer.get('install_status')),
'Cost': '{} {}'.format(computer.get('cost', ''), computer.get('cost_cc', '')).rstrip(),
'Comments': computer.get('comments')
} for computer in computers]
entry = {
'Type': entryTypes['note'],
'Contents': res,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown('ServiceNow Computers', mapped_computers, headers=headers,
removeNull=True, headerTransform=pascalToSpace),
'EntryContext': {
'ServiceNow.Computer(val.ID===obj.ID)': createContext(mapped_computers, removeNull=True),
}
}
return entry
def query_groups_command():
args = unicode_to_str_recur(demisto.args())
table_name = 'sys_user_group'
group_id = args.get('group_id')
group_name = args.get('group_name')
group_query = args.get('query', {})
offset = args.get('offset', DEFAULTS['offset'])
limit = args.get('limit', DEFAULTS['limit'])
if group_id:
res = get(table_name, group_id)
else:
if group_name:
group_query = 'name=' + group_name
res = query(table_name, limit, offset, group_query)
if not res or 'result' not in res:
return 'No groups found'
groups = res['result']
if not isinstance(groups, list):
groups = [groups]
if len(groups) == 0:
return 'No groups found'
headers = ['ID', 'Description', 'Name', 'Active', 'Manager', 'Updated']
mapped_groups = [{
'ID': group.get('sys_id'),
'Description': group.get('description'),
'Name': group.get('name'),
'Active': group.get('active'),
'Manager': group.get('manager', {}).get('value')
if isinstance(group.get('manager'), dict) else group.get('manager'),
'Updated': group.get('sys_updated_on'),
} for group in groups]
entry = {
'Type': entryTypes['note'],
'Contents': res,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown('ServiceNow Groups', mapped_groups, headers=headers,
removeNull=True, headerTransform=pascalToSpace),
'EntryContext': {
'ServiceNow.Group(val.ID===obj.ID)': createContext(mapped_groups, removeNull=True),
}
}
return entry
def query_users_command():
args = unicode_to_str_recur(demisto.args())
table_name = 'sys_user'
user_id = args.get('user_id')
user_name = args.get('user_name')
user_query = args.get('query', {})
offset = args.get('offset', DEFAULTS['offset'])
limit = args.get('limit', DEFAULTS['limit'])
if user_id:
res = get(table_name, user_id)
else:
if user_name:
user_query = 'user_name=' + user_name
res = query(table_name, limit, offset, user_query)
if not res or 'result' not in res:
return 'No users found'
res = unicode_to_str_recur(res)
users = res['result']
if not isinstance(users, list):
users = [users]
if len(users) == 0:
return 'No users found'
headers = ['ID', 'Name', 'UserName', 'Email', 'Created', 'Updated']
mapped_users = [{
'ID': user.get('sys_id'),
'Name': '{} {}'.format(user.get('first_name', ''), user.get('last_name', '')).rstrip(),
'UserName': user.get('user_name'),
'Email': user.get('email'),
'Created': user.get('sys_created_on'),
'Updated': user.get('sys_updated_on'),
} for user in users]
mapped_users = unicode_to_str_recur(mapped_users)
entry = {
'Type': entryTypes['note'],
'Contents': res,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown('ServiceNow Users', mapped_users, headers=headers, removeNull=True,
headerTransform=pascalToSpace),
'EntryContext': {
'ServiceNow.User(val.ID===obj.ID)': createContext(mapped_users, removeNull=True),
}
}
return entry
# Deprecated
def get_groups_command():
args = unicode_to_str_recur(demisto.args())
table_name = 'sys_user_group'
group_name = args['name']
res = query(table_name, None, 0, 'name=' + group_name)
if not res or 'result' not in res:
return 'No groups found'
hr_groups = []
context_groups = []
for group in res['result']:
if group['name'] == group_name:
hr_groups.append({
'ID': group['sys_id'],
'Name': group['name'],
'Description': group['description'],
'Email': group['email'],
'Active': group['active'],
'Manager': ['manager']
})
context_groups.append({
'GroupId': group['sys_id'],
'GroupName': group['name']
})
entry = {
'Type': entryTypes['note'],
'Contents': res,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown('ServiceNow Group', hr_groups),
'EntryContext': {
'ServiceNowGroups(val.GroupId==obj.GroupId)': context_groups,
}
}
return entry
def list_table_fields_command():
args = unicode_to_str_recur(demisto.args())
table_name = args['table_name']
res = get_table_fields(table_name)
if not res or 'result' not in res:
return 'Cannot find table'
if len(res['result']) == 0:
return 'Table contains no records'
fields = [{'Name': k} for k, v in res['result'][0].iteritems()]
entry = {
'Type': entryTypes['note'],
'Contents': res,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown('ServiceNow Table fields - ' + table_name, fields),
'EntryContext': {
'ServiceNow.Field': createContext(fields),
}
}
return entry
def get_table_fields(table_name):
# Get one record
path = 'table/' + table_name + '?sysparm_limit=1'
res = send_request(path, 'GET')
return res
def get_table_name_command():
args = unicode_to_str_recur(demisto.args())
label = args['label']
offset = args.get('offset', DEFAULTS['offset'])
limit = args.get('limit', DEFAULTS['limit'])
table_query = 'label=' + label
res = query('sys_db_object', limit, offset, table_query)
if not res or 'result' not in res:
return 'Cannot find table'
tables = res['result']
if len(tables) == 0:
return 'Cannot find table'
headers = ['ID', 'Name', 'SystemName']
mapped_tables = [{
'ID': table.get('sys_id'),
'Name': table.get('name'),
'SystemName': table.get('sys_name')
} for table in tables]
entry = {
'Type': entryTypes['note'],
'Contents': res,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown('ServiceNow Tables for label - ' + label, mapped_tables,
headers=headers, headerTransform=pascalToSpace),
'EntryContext': {
'ServiceNow.Table(val.ID===obj.ID)': createContext(mapped_tables),
}
}
return entry
def fetch_incidents():
query_params = {}
incidents = []
if FETCH_TIME:
fetch_time = FETCH_TIME
else:
fetch_time = DEFAULTS['fetch_time']
last_run = demisto.getLastRun()
if 'time' not in last_run:
snow_time, _ = parse_date_range(fetch_time, '%Y-%m-%d %H:%M:%S')
else:
snow_time = last_run['time']
query = ''
if SYSPARM_QUERY:
query += SYSPARM_QUERY + '^'
query += 'ORDERBY{0}^{0}>{1}'.format(TIMESTAMP_FIELD, snow_time)
if query:
query_params['sysparm_query'] = query
query_params['sysparm_limit'] = SYSPARM_LIMIT
path = 'table/' + TICKET_TYPE
res = send_request(path, 'get', params=query_params)
count = 0
parsed_snow_time = datetime.strptime(snow_time, '%Y-%m-%d %H:%M:%S')
for result in res.get('result', []):
labels = []
if TIMESTAMP_FIELD not in result:
raise ValueError("The timestamp field [{}]"
" does not exist in the ticket".format(TIMESTAMP_FIELD))
if count > SYSPARM_LIMIT:
break
try:
if datetime.strptime(result[TIMESTAMP_FIELD], '%Y-%m-%d %H:%M:%S') < parsed_snow_time:
continue
except Exception:
pass
for k, v in result.iteritems():
if isinstance(v, basestring):
labels.append({
'type': k,
'value': v
})
else:
labels.append({
'type': k,
'value': json.dumps(v)
})
severity = SEVERITY_MAP.get(result.get('severity', ''), 0)
file_names = []
if GET_ATTACHMENTS:
file_entries = get_ticket_attachment_entries(result['sys_id'])
for file_result in file_entries:
if file_result['Type'] == entryTypes['error']:
raise Exception('Error getting attachment: ' + str(file_result['Contents']))
file_names.append({
'path': file_result['FileID'],
'name': file_result['File']
})
incidents.append({
'name': 'ServiceNow Incident ' + result.get('number'),
'labels': labels,
'details': json.dumps(result),
'severity': severity,
'attachment': file_names,
'rawJSON': json.dumps(result)
})
count += 1
snow_time = result[TIMESTAMP_FIELD]
demisto.incidents(incidents)
demisto.setLastRun({'time': snow_time})
def test_module():
# Validate fetch_time parameter is valid (if not, parse_date_range will raise the error message)
parse_date_range(FETCH_TIME, '%Y-%m-%d %H:%M:%S')
path = 'table/' + TICKET_TYPE + '?sysparm_limit=1'
res = send_request(path, 'GET')
if 'result' not in res:
return_error('ServiceNow error: ' + str(res))
ticket = res['result']
if ticket and demisto.params().get('isFetch'):
if isinstance(ticket, list):
ticket = ticket[0]
if TIMESTAMP_FIELD not in ticket:
raise ValueError("The timestamp field [{}]"
" does not exist in the ticket".format(TIMESTAMP_FIELD))
LOG('Executing command ' + demisto.command())
raise_exception = False
try:
if demisto.command() == 'test-module':
test_module()
demisto.results('ok')
elif demisto.command() == 'fetch-incidents':
raise_exception = True
fetch_incidents()
elif demisto.command() == 'servicenow-get' or \
demisto.command() == 'servicenow-incident-update' or demisto.command() == 'servicenow-get-ticket':
demisto.results(get_ticket_command())
elif demisto.command() == 'servicenow-update' or \
demisto.command() == 'servicenow-incident-update' or demisto.command() == 'servicenow-update-ticket':
demisto.results(update_ticket_command())
elif demisto.command() == 'servicenow-create' or \
demisto.command() == 'servicenow-incident-create' or demisto.command() == 'servicenow-create-ticket':
demisto.results(create_ticket_command())
elif demisto.command() == 'servicenow-delete-ticket':
demisto.results(delete_ticket_command())
elif demisto.command() == 'servicenow-add-link' or demisto.command() == 'servicenow-incident-add-link':
demisto.results(add_link_command())
elif demisto.command() == 'servicenow-add-comment' or demisto.command() == 'servicenow-incident-add-comment':
demisto.results(add_comment_command())
elif demisto.command() == 'servicenow-query' or \
demisto.command() == 'servicenow-incidents-query' or demisto.command() == 'servicenow-query-tickets':
demisto.results(query_tickets_command())
elif demisto.command() == 'servicenow-upload-file' or demisto.command() == 'servicenow-incident-upload-file':
demisto.results(upload_file_command())
elif demisto.command() == 'servicenow-query-table':
demisto.results(query_table_command())
elif demisto.command() == 'servicenow-get-computer':
demisto.results(get_computer_command())
elif demisto.command() == 'servicenow-query-computers':
demisto.results(query_computers_command())
elif demisto.command() == 'servicenow-query-groups':
demisto.results(query_groups_command())
elif demisto.command() == 'servicenow-query-users':
demisto.results(query_users_command())
elif demisto.command() == 'servicenow-get-groups':
demisto.results(get_groups_command())
elif demisto.command() == 'servicenow-get-record':
demisto.results(get_record_command())
elif demisto.command() == 'servicenow-update-record':
demisto.results(update_record_command())
elif demisto.command() == 'servicenow-create-record':
demisto.results(create_record_command())
elif demisto.command() == 'servicenow-delete-record':
demisto.results(delete_record_command())
if demisto.command() == 'servicenow-list-table-fields':
demisto.results(list_table_fields_command())
if demisto.command() == 'servicenow-get-table-name':
demisto.results(get_table_name_command())
if demisto.command() == 'servicenow-get-ticket-notes':
demisto.results(get_ticket_notes_command())
except Exception as e:
LOG(e)
LOG.print_log()
if not raise_exception:
return_error(str(e))
else:
raise
| mit | 2aa10857d998b1445834800bc5b21c77 | 32.853074 | 120 | 0.584632 | 3.698416 | false | false | false | false |
demisto/content | Packs/dnstwist/Integrations/dnstwist/dnstwist.py | 2 | 3329 | import json
import subprocess
from CommonServerPython import *
TWIST_EXE = '/dnstwist/dnstwist.py'
if demisto.command() == 'dnstwist-domain-variations':
KEYS_TO_MD = ["whois_updated", "whois_created", "dns_a", "dns_mx", "dns_ns"]
DOMAIN = demisto.args()['domain']
LIMIT = int(demisto.args()['limit'])
WHOIS = demisto.args().get('whois')
def get_dnstwist_result(domain, include_whois):
args = [TWIST_EXE, '-f', 'json']
if include_whois:
args.append('-w')
args.append(domain)
res = subprocess.check_output(args, stderr=subprocess.DEVNULL)
return json.loads(res)
def get_domain_to_info_map(dns_twist_result):
results = []
for x in dns_twist_result:
temp = {} # type: dict
for k, v in x.items():
if k in KEYS_TO_MD:
if x["domain"] not in temp:
temp["domain-name"] = x["domain"]
if k == "dns_a":
temp["IP Address"] = v
else:
temp[k] = v
if temp:
results.append(temp)
return results
dnstwist_result = get_dnstwist_result(DOMAIN, WHOIS == 'yes')
new_result = get_domain_to_info_map(dnstwist_result)
md = tableToMarkdown('dnstwist for domain - ' + DOMAIN, new_result,
headers=["domain-name", "IP Address", "dns_mx", "dns_ns", "whois_updated", "whois_created"])
domain_context = new_result[0] # The requested domain for variations
domains_context_list = new_result[1:LIMIT + 1] # The variations domains
domains = []
for item in domains_context_list:
temp = {"Name": item["domain-name"]}
if "IP Address" in item:
temp["IP"] = item["IP Address"]
if "dns_mx" in item:
temp["DNS-MX"] = item["dns_mx"]
if "dns_ns" in item:
temp["DNS-NS"] = item["dns_ns"]
if "whois_updated" in item:
temp["WhoisUpdated"] = item["whois_updated"]
if "whois_created" in item:
temp["WhoisCreated"] = item["whois_created"]
domains.append(temp)
ec = {"Domains": domains}
if "domain-name" in domain_context:
ec["Name"] = domain_context["domain-name"]
if "IP Address" in domain_context:
ec["IP"] = domain_context["IP Address"]
if "dns_mx" in domain_context:
ec["DNS-MX"] = domain_context["dns_mx"]
if "dns_ns" in domain_context:
ec["DNS-NS"] = domain_context["dns_ns"]
if "whois_updated" in domain_context:
ec["WhoisUpdated"] = domain_context["whois_updated"]
if "whois_created" in domain_context:
ec["WhoisCreated"] = domain_context["whois_created"]
entry_result = {
'Type': entryTypes['note'],
'ContentsFormat': formats['json'],
'Contents': dnstwist_result,
'HumanReadable': md,
'ReadableContentsFormat': formats['markdown'],
'EntryContext': {'dnstwist.Domain(val.Name == obj.Name)': ec}
}
demisto.results(entry_result)
if demisto.command() == 'test-module':
# This is the call made when pressing the integration test button.
subprocess.check_output([TWIST_EXE, '-h'], stderr=subprocess.STDOUT)
demisto.results('ok')
sys.exit(0)
| mit | 66d2581633ae2595e86ded61ddc3b160 | 35.582418 | 117 | 0.567738 | 3.460499 | false | false | false | false |
demisto/content | Utils/github_workflow_scripts/send_slack_message.py | 2 | 6814 | #!/usr/bin/env python3
from typing import List
from slack_sdk import WebClient
from blessings import Terminal
from utils import get_env_var
import json
import requests
from github import Github, File, PullRequest
import urllib3
from pprint import pformat
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
GREEN_COLOR = "#6eb788"
SLACK_CHANNEL_TO_SEND_PR_TO = 'contribution-reviews'
def get_metadata_file(file: File.File) -> dict:
"""Perform a GET request to receive a given file content
Args:
file (File): The file object to receive the content for
Returns:
(dict): Content of a metadata file
"""
raw_url = file.raw_url
try:
response_json = requests.get(raw_url, verify=False).json()
except ValueError:
raise Exception(f'{file.filename} is not a well-formatted metadata.json file') # pylint: disable=W0707
return response_json
def create_slack_markdown(text: str) -> dict:
"""Create slack block-kit markdown entry
Args:
text (str): String to appear in the entry
Returns:
(dict): markdown entry for the slack block-kit
"""
return {
"type": "mrkdwn",
"text": text
}
def create_slack_fields(text_fields: List) -> dict:
"""Create slack block-kit section entry with fields key
Args:
text_fields (List): list of key-value tuples
Returns:
(dict): section entry for the slack block-kit
"""
return {
"type": "section",
"fields": [create_slack_markdown(f'*{key}*:\n {value}') for key, value in text_fields]
}
def create_slack_section(key: str, value: str) -> dict:
"""Create slack block-kit section entry with text key
Args:
key (str): pack related key (example pack version)
value (str): pack related value (example 1.0.0)
Returns:
(dict): section entry for the slack block-kit
"""
return ({
"type": "section",
"text": create_slack_markdown(f'```{key}: {value}\n```')
})
def create_individual_pack_segment(metadata_obj: dict) -> List[dict]:
"""Create the pack information segment of the message
Args:
metadata_obj (dict): metadata information dictionary
Returns:
(List): List of slack blocks representing the pack information
"""
pack_name: str = metadata_obj.get('name', '')
version: str = metadata_obj.get('currentVersion', '')
support: str = metadata_obj.get('support', '')
pack_details = [
create_slack_section('Pack Name', pack_name),
create_slack_section('Support Type', support),
create_slack_section('Version', version),
{
"type": "divider"
}
]
return pack_details
def create_packs_segment(metadata_files: list) -> List[dict]:
"""Aggregate the pack information segments of the message
Args:
metadata_files (List): List of File objects representing metadata files
Returns:
(List): List of slack blocks representing all packs information
"""
all_packs = []
for file in metadata_files:
metadata_obj = get_metadata_file(file)
pack_segment = create_individual_pack_segment(metadata_obj)
all_packs += pack_segment
return all_packs
def create_pull_request_segment(pr: PullRequest.PullRequest) -> List[dict]:
"""Create the pull request information segment of the message
Args:
pr (PullRequest): object that represents the pull request.
Returns:
(List): List containing a slack block-kit section entry which represents the PR info
"""
assignees = ','.join([assignee.login for assignee in pr.assignees])
contributor = pr.user.login
number_of_changed_changed_files = pr.changed_files
labels = ','.join([label.name for label in pr.labels])
pr_info_segment = create_slack_fields([
('Assignees', assignees),
('Contributor', contributor),
('Changed Files', number_of_changed_changed_files),
('Labels', labels),
])
return [pr_info_segment, {'text': create_slack_markdown(f'*URL:* `{pr.html_url}`'), 'type': 'section'}]
def create_pr_title(pr: PullRequest.PullRequest) -> List[dict]:
"""Create the message title
Args:
pr (PullRequest): object that represents the pull request.
Returns:
(List): List containing a dictionary which represents the message title
"""
header = [{
"type": "header",
"text": {
"type": "plain_text",
"text": f"{pr.title}",
"emoji": True
}
}]
return header
def slack_post_message(client: WebClient, message_blocks: List):
"""Post a message to a slack channel
Args:
client (WebClient): Slack web-client object.
message_blocks (List): List of blocks representing the message blocks.
Returns:
(List): List containing a dictionary which represents the message title
"""
client.chat_postMessage(
channel=SLACK_CHANNEL_TO_SEND_PR_TO,
attachments=[
{
"color": GREEN_COLOR,
"blocks": message_blocks
}])
def main():
t = Terminal()
payload_str = get_env_var('EVENT_PAYLOAD')
print(f'{t.cyan}Starting the slack notifier{t.normal}')
payload = json.loads(payload_str)
pr_number = payload.get('pull_request', {}).get('number')
# Get the PR information in order to get information like metadata
org_name = 'demisto'
repo_name = 'content'
gh = Github(get_env_var('CONTENTBOT_GH_ADMIN_TOKEN'), verify=False)
content_repo = gh.get_repo(f'{org_name}/{repo_name}')
pr = content_repo.get_pull(pr_number)
metadata_files = [file for file in pr.get_files() if file.filename.endswith('_metadata.json')]
# We don't want to notify about community PRs made through the UI
if pr.user.login == 'xsoar-bot':
print(f'{t.cyan}PR was created using the XSOAR-UI, support will be community. Not sending a slack message ')
else:
# Build all blocks of the message
header = create_pr_title(pr)
pull_request_segment = create_pull_request_segment(pr)
packs_segment = create_packs_segment(metadata_files)
blocks = header + pull_request_segment + packs_segment
print(f'{t.yellow}Finished preparing message: \n{pformat(blocks)}{t.normal}')
# Send message
slack_token = get_env_var('CORTEX_XSOAR_SLACK_TOKEN')
client = WebClient(token=slack_token)
slack_post_message(client, blocks)
print(f'{t.cyan}Slack message sent successfully{t.normal}')
if __name__ == "__main__":
main()
| mit | 80961f277d5a70e8578072eff9edaff6 | 29.832579 | 116 | 0.624596 | 3.918344 | false | false | false | false |
demisto/content | Packs/MalwareInvestigationAndResponse/Scripts/ReadProcessesFileXDR/ReadProcessesFileXDR.py | 2 | 2893 | import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
from typing import Union
import re
CONTEXT_PATH_TO_READ_PROCESS_FILE_NAME_FROM_XDR_DATA = "PaloAltoNetworksXDR.ScriptResult"
def read_xdr_context():
script_results = demisto.get(demisto.context(), CONTEXT_PATH_TO_READ_PROCESS_FILE_NAME_FROM_XDR_DATA)
return script_results
def entries_to_markdown(entry_list: List[str]):
"""
Args:
entry_list (List[str]): the _return_value array from demisto context
Returns:
str: a markdown table to be displayed on the layout.
"""
if not entry_list:
return ''
process_list = []
regex = r"Name: (?P<name>.*), CPU: (?P<cpu>.*), Memory: (?P<memory>.*)\b"
for entry in entry_list:
result = re.search(regex, entry)
if not result:
continue
process_list.append({
'Name': result.group('name'),
'CPU': result.group('cpu'),
'Memory': result.group('memory')
})
md = tableToMarkdown('', process_list, ['Name', 'CPU', 'Memory'])
return md
def detect_process_field(entry: str):
lst = ['Name', 'Memory', 'CPU']
for word in lst:
if word not in entry:
return False
return True
def find_last_process_list_script(script_results: Union[list, dict]):
"""
Iterates over the 'script_results' to find the last script result
activate that matches the 'detected_process_field' filter.
Args:
script_results (List | dict): script results after running XDRIR script
Returns:
list | None: if a proper result was found return the _return_value (list) else None
Actions:
"""
if not script_results:
return None
if not isinstance(script_results, list):
script_results = [script_results]
for script_result in reversed(script_results):
if not (results := script_result.get('results', [])):
continue
if not isinstance(results, list):
results = [results]
for result in reversed(results):
if not (_return_value := result.get('_return_value', [])):
continue
# check the first element in _return_value list to verify it matches the
# format of the process list script, and not some other script under context entry.
if detect_process_field(_return_value[0]):
return _return_value
return None
def main():
script_results = read_xdr_context()
_return_value = find_last_process_list_script(script_results)
markdown = entries_to_markdown(_return_value)
if markdown:
return_results(CommandResults(readable_output=markdown))
else:
return_results(CommandResults(readable_output="No data to present"))
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | 5ff0c4bde4e9f7f0ef184cb5fb60e4bf | 28.520408 | 105 | 0.6215 | 3.941417 | false | false | false | false |
nylas/nylas-python | nylas/client/scheduler_models.py | 1 | 1698 | from nylas.client.restful_models import RestfulModel
class SchedulerTimeSlot(RestfulModel):
attrs = ["account_id", "calendar_id", "host_name", "emails"]
datetime_attrs = {"start": "start", "end": "end"}
def __init__(self, api):
RestfulModel.__init__(self, SchedulerTimeSlot, api)
class SchedulerBookingConfirmation(RestfulModel):
attrs = [
"id",
"account_id",
"additional_field_values",
"calendar_event_id",
"calendar_id",
"edit_hash",
"is_confirmed",
"location",
"recipient_email",
"recipient_locale",
"recipient_name",
"recipient_tz",
"title",
]
datetime_attrs = {"start_time": "start_time", "end_time": "end_time"}
def __init__(self, api):
RestfulModel.__init__(self, SchedulerBookingConfirmation, api)
class SchedulerBookingRequest(RestfulModel):
attrs = [
"additional_values",
"additional_emails",
"email",
"locale",
"name",
"page_hostname",
"replaces_booking_hash",
"timezone",
"slot",
]
def __init__(self, api):
RestfulModel.__init__(self, SchedulerBookingRequest, api)
def as_json(self, enforce_read_only=True):
dct = RestfulModel.as_json(self)
if "additional_values" not in dct or dct["additional_values"] is None:
dct["additional_values"] = {}
if "additional_emails" not in dct or dct["additional_emails"] is None:
dct["additional_emails"] = []
if "slot" in dct and isinstance(dct["slot"], SchedulerTimeSlot):
dct["slot"] = dct["slot"].as_json()
return dct
| mit | e95f0342805b467785586e0a42c47151 | 27.779661 | 78 | 0.574794 | 3.798658 | false | false | false | false |
demisto/content | Packs/Workday/Integrations/WorkdayIAMEventsGenerator/WorkdayIAMEventsGenerator.py | 2 | 28056 | # noqa: F401
from flask import Flask, jsonify
from gevent.pywsgi import WSGIServer
from CommonServerPython import *
FIRST_RUN_REPORT = {
"Report_Entry": [
{
"Employee_Type": "Regular",
"Leadership": "Yes-HQ",
"Work_Country_Code": "840",
"Street_Address": "3000 Tannery Way",
"Employment_Status": "Active",
"VP_Flag": "N",
"Mgr_ID": "115069",
"Cost_Center_Description": "Channel Sales",
"GDPR_Country_Flag": "0",
"Director_Flag": "Y",
"Email_-_Primary_Home": "ronnyrahardjo@test.com",
"First_Name": "Ronny",
"Last_Hire_Date": "10/05/2020",
"People_Manager_Flag": "N",
"Department": "Sales NAM:NAM Channel Sales",
"Workday_ID": "5aa443c785ff10461ac83e5a6be32e1e",
"Postal_Code": "95054",
"Rehired_Employee": "Yes",
"Org_Level_1": "Sales",
"Org_Level_3": "NAM Channel Sales",
"Country_Name": "United States Of America",
"Org_Level_2": "Sales NAM",
"Emp_ID": "100122",
"Job_Family": "Product Management",
"User_Name": "rrahardjo@test.com",
"Preferred_Name_-_First_Name": "Ronny",
"Prehire_Flag": "False",
"Management_Level_1": "Nikesh Arora",
"Work_Country_Abbrev": "US",
"Management_Level_2": "Timmy Turner",
"Email_Address": "rrahardjo@test.com",
"Title": "Dir, Product Line Manager",
"City": "Santa Clara",
"Work_State_US_Only": "California",
"Job_Code": "2245",
"PAN_CF_Okta_Location_Region": "Americas",
"Last_Name": "Rahardjo",
"Job_Function": "Product Management Function",
"State": "California",
"Exec_Admin_Flag": "N",
"Preferred_Name": "Ronny Rahardjo",
"Regular_Employee_Flag": "Y",
"Preferred_Name_-_Last_Name": "Rahardjo",
"Cost_Center_Code": "120100",
"Location": "Office - USA - CA - Headquarters",
"Last_Day_of_Work": "02/15/2021",
"Termination_Date": "02/15/2021",
"Hire_Date": "01/01/2010"
},
{
"Employee_Type": "Regular",
"Leadership": "No",
"Work_Country_Code": "840",
"Street_Address": "WeWork Embarcadero Center",
"Employment_Status": "Active",
"VP_Flag": "N",
"Mgr_ID": "115069",
"Cost_Center_Description": "Magnifier Sales Inc",
"GDPR_Country_Flag": "0",
"Public_Work_Mobile_Phone_Number": "+44 7900-160-819",
"Director_Flag": "N",
"Email_-_Primary_Home": "stevearnoldtstc@test.com",
"First_Name": "Stephen",
"Last_Hire_Date": "10/01/2020",
"People_Manager_Flag": "N",
"Department": "WW Sales Functions:Cortex Sales",
"Workday_ID": "5aa443c785ff10461a941c31a173e459",
"Postal_Code": "94111",
"Rehired_Employee": "Yes",
"Org_Level_1": "Sales",
"Org_Level_3": "Cortex Sales",
"Country_Name": "United States Of America",
"Org_Level_2": "WW Sales Functions",
"Emp_ID": "101351",
"Job_Family": "Software Engineering",
"User_Name": "sarnold@test.com",
"Preferred_Name_-_First_Name": "Stephen",
"Prehire_Flag": "False",
"Management_Level_1": "Nikesh Arora",
"Work_Country_Abbrev": "US",
"Management_Level_2": "Timmy Turner",
"Email_Address": "sarnold@test.com",
"Title": "Mgr, SW Engineering",
"City": "San Francisco",
"Work_State_US_Only": "California",
"Job_Code": "2163",
"PAN_CF_Okta_Location_Region": "Americas",
"Last_Name": "Arnold",
"Job_Function": "Engineering Function",
"State": "California",
"Exec_Admin_Flag": "N",
"Preferred_Name": "Stephen Arnold",
"Regular_Employee_Flag": "Y",
"Preferred_Name_-_Last_Name": "Arnold",
"Cost_Center_Code": "101100",
"Location": "Office - USA - CA - San Francisco",
"Last_Day_of_Work": "02/15/2021",
"Termination_Date": "02/15/2021",
"Hire_Date": "01/01/2010"
},
{
"Employee_Type": "Regular",
"Leadership": "No",
"Work_Country_Code": "840",
"Street_Address": "3000 Tannery Way",
"Employment_Status": "Active",
"VP_Flag": "N",
"Mgr_ID": "115069",
"Cost_Center_Description": "IoT - Engineering",
"GDPR_Country_Flag": "0",
"Director_Flag": "N",
"Email_-_Primary_Home": "test37@testing.com",
"First_Name": "Tooth",
"Last_Hire_Date": "06/15/2020",
"People_Manager_Flag": "N",
"Department": "Enterprise R&D:FWaaP",
"Workday_ID": "9aa7e309929e01ebec7923080803461b",
"Postal_Code": "95054",
"Rehired_Employee": "No",
"Org_Level_1": "All R&D",
"Org_Level_3": "FWaaP",
"Country_Name": "United States Of America",
"Org_Level_2": "Enterprise R&D",
"Emp_ID": "115104",
"Job_Family": "Software Engineering",
"Preferred_Name_-_First_Name": "Tooth",
"Prehire_Flag": "False",
"Management_Level_1": "Nikesh Arora",
"Work_Country_Abbrev": "US",
"Management_Level_2": "Timmy Turner",
"Email_Address": "tfairy@test.com",
"Title": "Staff Engineer SW",
"City": "Santa Clara",
"Work_State_US_Only": "California",
"Job_Code": "5162",
"PAN_CF_Okta_Location_Region": "Americas",
"Last_Name": "Fairy_Updated",
"Job_Function": "Engineering Function",
"State": "California",
"Exec_Admin_Flag": "N",
"Preferred_Name": "Tooth Fairy_Updated",
"Regular_Employee_Flag": "Y",
"Preferred_Name_-_Last_Name": "Fairy_Updated",
"Cost_Center_Code": "613116",
"Location": "Office - USA - CA - Headquarters",
"Last_Day_of_Work": "02/15/2021",
"Termination_Date": "02/15/2021",
"Hire_Date": "01/01/2010"
},
{
"Employee_Type": "Regular",
"Leadership": "No",
"Work_Country_Code": "840",
"Street_Address": "3000 Tannery Way",
"Employment_Status": "Active",
"VP_Flag": "N",
"Mgr_ID": "115069",
"Cost_Center_Description": "Consulting Systems Engineering",
"GDPR_Country_Flag": "0",
"Director_Flag": "N",
"Email_-_Primary_Home": "test26@testing.com",
"First_Name": "Remy",
"Last_Hire_Date": "06/15/2020",
"People_Manager_Flag": "N",
"Department": "WW Sales Functions:WW SE Sales",
"Workday_ID": "9aa7e309929e01830c041f1c08039323",
"Postal_Code": "95054",
"Rehired_Employee": "No",
"Org_Level_1": "Sales",
"Org_Level_3": "WW SE Sales",
"Country_Name": "United States Of America",
"Org_Level_2": "WW Sales Functions",
"Emp_ID": "115094",
"Job_Family": "Software Engineering",
"User_Name": "rbuxaplenty@test.com",
"Preferred_Name_-_First_Name": "Remy",
"Prehire_Flag": "False",
"Management_Level_1": "Nikesh Arora",
"Work_Country_Abbrev": "US",
"Management_Level_2": "Timmy Turner",
"Email_Address": "rbuxaplenty@test.com",
"Title": "Staff Engineer Software",
"City": "Santa Clara",
"Work_State_US_Only": "California",
"Job_Code": "5162",
"PAN_CF_Okta_Location_Region": "Americas",
"Last_Name": "Buxaplenty",
"Job_Function": "Engineering Function",
"State": "California",
"Exec_Admin_Flag": "N",
"Preferred_Name": "Remy Buxaplenty",
"Regular_Employee_Flag": "Y",
"Preferred_Name_-_Last_Name": "Buxaplenty",
"Cost_Center_Code": "310100",
"Location": "Office - USA - CA - Headquarters",
"Last_Day_of_Work": "02/15/2021",
"Termination_Date": "02/15/2021",
"Hire_Date": "01/01/2010"
},
{
"Employee_Type": "Regular",
"Leadership": "No",
"Work_Country_Code": "840",
"Street_Address": "3000 Tannery Way",
"Employment_Status": "Active",
"VP_Flag": "N",
"Mgr_ID": "115069",
"Cost_Center_Description": "IoT - PM",
"GDPR_Country_Flag": "0",
"Director_Flag": "N",
"Email_-_Primary_Home": "test24@testing.com",
"First_Name": "Norm",
"Last_Hire_Date": "06/15/2020",
"People_Manager_Flag": "N",
"Department": "Enterprise R&D:FWaaP",
"Workday_ID": "9aa7e309929e0125823a032108030b25",
"Postal_Code": "95054",
"Rehired_Employee": "No",
"Org_Level_1": "All R&D",
"Org_Level_3": "FWaaP",
"Country_Name": "United States Of America",
"Org_Level_2": "Enterprise R&D",
"Emp_ID": "115092",
"Job_Family": "Product Management",
"User_Name": "ngenie@test.com",
"Preferred_Name_-_First_Name": "Norm",
"Prehire_Flag": "False",
"Management_Level_1": "Nikesh Arora",
"Work_Country_Abbrev": "US",
"Management_Level_2": "Timmy Turner",
"Email_Address": "ngenie@test.com",
"Title": "Sr Prod Mgr",
"City": "Santa Clara",
"Work_State_US_Only": "California",
"Job_Code": "5224",
"PAN_CF_Okta_Location_Region": "Americas",
"Last_Name": "Genie",
"Job_Function": "Product Management Function",
"State": "California",
"Exec_Admin_Flag": "N",
"Preferred_Name": "Norm Genie",
"Regular_Employee_Flag": "Y",
"Preferred_Name_-_Last_Name": "Genie",
"Cost_Center_Code": "651116",
"Location": "Office - USA - CA - Headquarters",
"Last_Day_of_Work": "02/15/2021",
"Termination_Date": "02/15/2021",
"Hire_Date": "01/01/2010"
},
{
"Employee_Type": "Regular",
"Leadership": "No",
"Work_Country_Code": "840",
"Street_Address": "3000 Tannery Way",
"Employment_Status": "Active",
"VP_Flag": "N",
"Mgr_ID": "115069",
"Cost_Center_Description": "IoT - PM",
"GDPR_Country_Flag": "0",
"Director_Flag": "N",
"Email_-_Primary_Home": "test23@testing.com",
"First_Name": "Santa",
"Last_Hire_Date": "06/15/2020",
"People_Manager_Flag": "N",
"Department": "Enterprise R&D:FWaaP",
"Workday_ID": "9aa7e309929e01b392c9a5220803c825",
"Postal_Code": "95054",
"Rehired_Employee": "No",
"Org_Level_1": "All R&D",
"Org_Level_3": "FWaaP",
"Country_Name": "United States Of America",
"Org_Level_2": "Enterprise R&D",
"Emp_ID": "115091",
"Job_Family": "Technical Writing",
"Preferred_Name_-_First_Name": "Santa",
"Prehire_Flag": "False",
"Management_Level_1": "Nikesh Arora",
"Work_Country_Abbrev": "US",
"Management_Level_2": "Timmy Turner",
"Email_Address": "sclaus@test.com",
"Title": "Sr Technical Writer",
"City": "Santa Clara",
"Work_State_US_Only": "California",
"Job_Code": "5314",
"PAN_CF_Okta_Location_Region": "Americas",
"Last_Name": "Claus",
"Job_Function": "Product Management Function",
"State": "California",
"Exec_Admin_Flag": "N",
"Preferred_Name": "Santa Claus",
"Regular_Employee_Flag": "Y",
"Preferred_Name_-_Last_Name": "Claus",
"Cost_Center_Code": "651116",
"Location": "Office - USA - CA - Headquarters",
"Last_Day_of_Work": "02/15/2021",
"Termination_Date": "02/15/2021",
"Hire_Date": "01/01/2010"
},
{
"Employee_Type": "Regular",
"Leadership": "No",
"Work_Country_Code": "840",
"Street_Address": "3000 Tannery Way",
"Employment_Status": "Active",
"VP_Flag": "N",
"Mgr_ID": "115069",
"Cost_Center_Description": "IoT - PM",
"GDPR_Country_Flag": "0",
"Director_Flag": "N",
"Email_-_Primary_Home": "test20@testing.com",
"First_Name": "Dolores",
"Last_Hire_Date": "06/15/2020",
"People_Manager_Flag": "N",
"Department": "Enterprise R&D:FWaaP",
"Workday_ID": "9aa7e309929e0188f4eb6b2a08031228",
"Postal_Code": "95054",
"Rehired_Employee": "No",
"Org_Level_1": "All R&D",
"Org_Level_3": "FWaaP",
"Country_Name": "United States Of America",
"Org_Level_2": "Enterprise R&D",
"Emp_ID": "115088",
"Job_Family": "Software Engineering",
"Preferred_Name_-_First_Name": "Dolores",
"Prehire_Flag": "False",
"Management_Level_1": "Nikesh Arora",
"Work_Country_Abbrev": "US",
"Management_Level_2": "Timmy Turner",
"Email_Address": "dcrocker@test.com",
"Title": "Sr Mgr, UX Design",
"City": "Santa Clara",
"Work_State_US_Only": "California",
"Job_Code": "2164",
"PAN_CF_Okta_Location_Region": "Americas",
"Last_Name": "Crocker",
"Job_Function": "Engineering Function",
"State": "California",
"Exec_Admin_Flag": "N",
"Preferred_Name": "Dolores Crocker",
"Regular_Employee_Flag": "Y",
"Preferred_Name_-_Last_Name": "Crocker",
"Cost_Center_Code": "651116",
"Location": "Office - USA - CA - Headquarters",
"Last_Day_of_Work": "02/15/2021",
"Termination_Date": "02/15/2021",
"Hire_Date": "01/01/2010"
},
{
"Employee_Type": "Regular",
"Leadership": "No",
"Work_Country_Code": "840",
"Street_Address": "3000 Tannery Way",
"Employment_Status": "Active",
"VP_Flag": "N",
"Mgr_ID": "115069",
"Cost_Center_Description": "IoT - Engineering",
"GDPR_Country_Flag": "0",
"Director_Flag": "N",
"Email_-_Primary_Home": "test19@testing.com",
"First_Name": "Crash",
"Last_Hire_Date": "06/15/2020",
"People_Manager_Flag": "N",
"Department": "Enterprise R&D:FWaaP",
"Workday_ID": "9aa7e309929e014a0d78ca2c08030629",
"Postal_Code": "95054",
"Rehired_Employee": "No",
"Org_Level_1": "All R&D",
"Org_Level_3": "FWaaP",
"Country_Name": "United States Of America",
"Org_Level_2": "Enterprise R&D",
"Emp_ID": "115087",
"Job_Family": "Software Engineering",
"Preferred_Name_-_First_Name": "Crash",
"Prehire_Flag": "False",
"Management_Level_1": "Nikesh Arora",
"Work_Country_Abbrev": "US",
"Management_Level_2": "Timmy Turner",
"Email_Address": "cnebula@test.com",
"Title": "Staff Engineer Software",
"City": "Santa Clara",
"Work_State_US_Only": "California",
"Job_Code": "5162",
"PAN_CF_Okta_Location_Region": "Americas",
"Last_Name": "Nebula",
"Job_Function": "Engineering Function",
"State": "California",
"Exec_Admin_Flag": "N",
"Preferred_Name": "Crash Nebula",
"Regular_Employee_Flag": "Y",
"Preferred_Name_-_Last_Name": "Nebula",
"Cost_Center_Code": "613116",
"Location": "Office - USA - CA - Headquarters",
"Last_Day_of_Work": "02/15/2021",
"Termination_Date": "02/15/2021",
"Hire_Date": "01/01/2010"
},
{
"Employee_Type": "Regular",
"Leadership": "No",
"Work_Country_Code": "840",
"Street_Address": "3000 Tannery Way",
"Employment_Status": "Active",
"VP_Flag": "N",
"Mgr_ID": "115069",
"Cost_Center_Description": "IoT - Engineering",
"GDPR_Country_Flag": "0",
"Director_Flag": "N",
"Email_-_Primary_Home": "test18@testing.com",
"First_Name": "Trixie",
"Last_Hire_Date": "06/15/2020",
"People_Manager_Flag": "N",
"Department": "Enterprise R&D:FWaaP",
"Workday_ID": "9aa7e309929e01eb443ce92e08031f2a",
"Postal_Code": "95054",
"Rehired_Employee": "No",
"Org_Level_1": "All R&D",
"Org_Level_3": "FWaaP",
"Country_Name": "United States Of America",
"Org_Level_2": "Enterprise R&D",
"Emp_ID": "115086",
"Job_Family": "Software Engineering",
"Preferred_Name_-_First_Name": "Trixie",
"Prehire_Flag": "False",
"Management_Level_1": "Nikesh Arora",
"Work_Country_Abbrev": "US",
"Management_Level_2": "Timmy Turner",
"Email_Address": "ttang@test.com",
"Title": "Principal Engineer Software",
"City": "Santa Clara",
"Work_State_US_Only": "California",
"Job_Code": "5164",
"PAN_CF_Okta_Location_Region": "Americas",
"Last_Name": "Tang",
"Job_Function": "Engineering Function",
"State": "California",
"Exec_Admin_Flag": "N",
"Preferred_Name": "Trixie Tang",
"Regular_Employee_Flag": "Y",
"Preferred_Name_-_Last_Name": "Tang",
"Cost_Center_Code": "613116",
"Location": "Office - USA - CA - Headquarters",
"Last_Day_of_Work": "02/15/2021",
"Termination_Date": "02/15/2021",
"Hire_Date": "01/01/2010"
}
]
}
NEW_HIRE_REPORT = {
"Report_Entry": [
{
"Employee_Type": "Regular",
"Leadership": "No",
"Work_Country_Code": "840",
"Street_Address": "3000 Tannery Way",
"Employment_Status": "Active",
"VP_Flag": "N",
"Mgr_ID": "115069",
"Cost_Center_Description": "IoT - PM",
"GDPR_Country_Flag": "0",
"Director_Flag": "N",
"Email_-_Primary_Home": "test6@testing.com",
"First_Name": 'first_name',
"Last_Hire_Date": "06/15/2020",
"People_Manager_Flag": "N",
"Department": "Enterprise R&D:FWaaP",
"Workday_ID": "9aa7e309929e013ff3c6e3440803b833",
"Postal_Code": "95054",
"Rehired_Employee": "No",
"Org_Level_1": "All R&D",
"Org_Level_3": "FWaaP",
"Country_Name": "United States Of America",
"Org_Level_2": "Enterprise R&D",
"Emp_ID": "115074",
"Job_Family": "Product Management",
"Preferred_Name_-_First_Name": 'first_name',
"Nikesh Arora": "False",
"Management_Level_1": "Nikesh Arora",
"Work_Country_Abbrev": "US",
"Management_Level_2": "Timmy Turner",
"Email_Address": 'user_email',
"Title": "Product Line Manager",
"City": "Santa Clara",
"Work_State_US_Only": "California",
"Job_Code": "5225",
"PAN_CF_Okta_Location_Region": "Americas",
"Last_Name": 'lsat_name',
"Job_Function": "Product Management Function",
"State": "California",
"Exec_Admin_Flag": "N",
"Preferred_Name": "Chester McBadbat",
"Regular_Employee_Flag": "Y",
"Preferred_Name_-_Last_Name": 'last_name',
"Cost_Center_Code": "651116",
"Location": "Office - USA - CA - Headquarters",
"Last_Day_of_Work": "02/15/2021",
"Termination_Date": "02/15/2021",
"Hire_Date": "01/01/2010"
}
]
}
APP: Flask = Flask('xsoar-workday')
@APP.route('/', methods=['GET'])
def get_full_reports():
integration_context = get_integration_context()
return jsonify(integration_context)
def get_full_report():
set_integration_context(FIRST_RUN_REPORT)
integration_context = get_integration_context()
return integration_context['Report_Entry'][0]
def test_module():
if int(demisto.params().get('longRunningPort', '')) and demisto.params().get("longRunning"):
user_report = get_full_report()
if user_report:
demisto.results('ok')
else:
return_error('Could not connect to the long running server. Please make sure everything is configured.')
else:
return_error('Please make sure the long running port is filled and the long running checkbox is marked.')
def get_employee_id():
"""
Get the maximum employee id number and increase it by one.
This function is used to avoid duplication while creating a new hire report.
Returns: (int) Employee ID number.
"""
integration_context = get_integration_context()
employee_ids = []
for report in integration_context['Report_Entry']:
employee_id = int(report.get('Emp_ID'))
employee_ids.append(employee_id)
max_employee_id = int(max(employee_ids)) + 1
return str(max_employee_id)
def generate_new_hire_reports():
user_email = demisto.args().get('user_email')
first_name = demisto.args().get('first_name', '')
last_name = demisto.args().get('last_name', '')
integration_context = get_integration_context()
new_report = NEW_HIRE_REPORT['Report_Entry'][0]
for report in integration_context['Report_Entry']:
email_address = report.get('Email_Address')
if user_email == email_address:
raise Exception(f'User "{user_email}" already exist. Please try another user email.')
new_report['Email_Address'] = user_email
new_report['First_Name'] = first_name
new_report['Last_Name'] = last_name
new_report['Preferred_Name'] = f'{first_name} {last_name}'
new_report['Preferred_Name_-_First_Name'] = first_name
new_report['Preferred_Name_-_Last_Name'] = last_name
new_report['Emp_ID'] = get_employee_id()
integration_context['Report_Entry'].append(new_report)
set_integration_context(integration_context)
return_results('Successfully generated the new hire event.')
def generate_terminate_report():
user_email = demisto.args().get('user_email')
integration_context = get_integration_context()
now = datetime.now()
current_date = now.strftime("%m/%d/%Y")
user_report = None
for report in integration_context['Report_Entry']:
if report['Email_Address'] == user_email:
user_report = report
if not user_report:
raise Exception(f'The user email {user_email} does not exist. Please try one of the followings: '
f'ttang@test.com, rrahardjo@test.com, sarnold@test.com')
is_terminated = user_report.get('Employment_Status')
rehired_status = user_report.get('Rehired_Employee')
if is_terminated == 'Terminated' and rehired_status == 'No':
raise Exception(f'The user {user_email} is already terminated.')
user_report['Employment_Status'] = 'Terminated'
user_report['Last_Day_of_Work'] = demisto.args().get('last_day_of_work', str(current_date))
user_report['Termination_Date'] = demisto.args().get('termination_date', str(current_date))
set_integration_context(integration_context)
return_results('Successfully generated the Terminate user event.')
def generate_update_report():
user_email = demisto.args().get('user_email')
integration_context = get_integration_context()
title = demisto.args().get('title')
city = demisto.args().get('city')
street_address = demisto.args().get('street_address')
last_day_of_work = demisto.args().get('last_day_of_work')
user_report = None
for report in integration_context['Report_Entry']:
if report['Email_Address'] == user_email:
user_report = report
if not user_report:
raise Exception(f'The user email {user_email} does not exist. Please try one of the followings: '
f'ttang@test.com, rrahardjo@test.com, sarnold@test.com')
if title:
user_report['Title'] = title
if city:
user_report['City'] = city
if street_address:
user_report['Street_Address'] = street_address
if last_day_of_work:
user_report['Last_Day_of_Work'] = last_day_of_work
set_integration_context(integration_context)
return_results('Successfully generated the Update user event.')
def generate_rehire_report():
user_email = demisto.args().get('user_email')
integration_context = get_integration_context()
user_report = None
for report in integration_context['Report_Entry']:
if report['Email_Address'] == user_email:
user_report = report
if not user_report:
raise Exception(f'The user email {user_email} does not exist. Please try one of the followings: '
f'ttang@test.com, rrahardjo@test.com, sarnold@test.com')
is_terminated = user_report.get('Employment_Status')
rehired_status = user_report.get('Rehired_Employee')
if is_terminated == 'Active' or rehired_status == 'Yes':
raise Exception(f'The user {user_email} is not terminated. Either he is still active or was already '
f'rehired.')
user_report['Rehired_Employee'] = 'Yes'
user_report['Prehire_Flag'] = 'True'
set_integration_context(integration_context)
return_results('Successfully generated the rehire user event.')
def main():
if demisto.command() == 'test-module':
test_module()
elif demisto.command() == 'long-running-execution':
integration_context = get_integration_context()
if not integration_context:
set_integration_context(FIRST_RUN_REPORT)
while True:
port = int(demisto.params().get('longRunningPort', ''))
server = WSGIServer(('0.0.0.0', port), APP)
server.serve_forever()
elif demisto.command() == 'workday-generate-hire-event':
generate_new_hire_reports()
elif demisto.command() == 'workday-generate-update-event':
generate_update_report()
elif demisto.command() == 'workday-generate-rehire-event':
generate_rehire_report()
elif demisto.command() == 'workday-generate-terminate-event':
generate_terminate_report()
elif demisto.command() == 'initialize-context':
set_integration_context(FIRST_RUN_REPORT)
return_results('The integration context has been initialized.')
if __name__ == '__builtin__' or __name__ == 'builtins':
main()
| mit | c81b6cbadc20b0f4c0791dcd26d7eac7 | 39.897959 | 116 | 0.524772 | 3.41688 | false | true | false | false |
demisto/content | Packs/ExpanseV2/Integrations/ExpanseV2/ExpanseV2.py | 2 | 110763 | """Cortex XSOAR Integration for Expanse Expander and Behavior
"""
import demistomock as demisto
from CommonServerPython import * # noqa # pylint: disable=unused-wildcard-import
from CommonServerUserPython import * # noqa
import urllib3
import copy
import json
import base64
import re
from typing import (
Any, Dict, Optional, Iterator,
Tuple, Union, cast, Set, List
)
from itertools import islice
from dateparser import parse
from datetime import datetime, timezone
from collections import defaultdict
import ipaddress
# Disable insecure warnings
urllib3.disable_warnings() # pylint: disable=no-member
""" CONSTANTS """
TOKEN_DURATION = 7200
DEFAULT_RESULTS = 20 # default results per search
MAX_RESULTS = 5000 # max results per search
MAX_PAGE_SIZE = 1000 # max results per page
MAX_INCIDENTS = 100 # max incidents per fetch
MAX_UPDATES = 100 # max updates received
DATE_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ"
DEFAULT_FIRST_FETCH = "3 days" # default parameter for first fetch
ISSUE_PROGRESS_STATUS = ['New', 'Investigating', 'InProgress', 'AcceptableRisk', 'Resolved']
ISSUE_PROGRESS_STATUS_CLOSED = ['AcceptableRisk', 'Resolved', 'NoRisk']
ISSUE_ACTIVITY_STATUS = ['Active', 'Inactive']
ISSUE_PRIORITY = ['Critical', 'High', 'Medium', 'Low']
CLOUD_MANAGEMENT_STATUS = ['ManagedCloud', 'UnmanagedCloud', 'NotApplicable']
ISSUE_SORT_OPTIONS = ['created', '-created', 'modified', '-modified', 'assigneeUsername',
'-assigneeUsername', 'priority', '-priority', 'progressStatus', '-progressStatus',
'activityStatus', '-activityStatus', 'headline', '-headline']
SERVICE_DISCOVERY_TYPE = ["ColocatedOnIp", "DirectlyDiscovered"]
SERVICE_SORT_OPTIONS = ['firstObserved', '-firstObserved', 'lastObserved', '-lastObserved', 'name', '-name']
EXPANSE_RESOLVEDSTATUS_TO_XSOAR = {
'Resolved': 'Resolved',
'AcceptableRisk': 'Other'
}
EXPANSE_ISSUE_READABLE_HEADER_LIST = [
'id', 'headline', 'issueType', 'category', 'ip', 'portProtocol', 'portNumber', 'domain', 'certificate', 'priority',
'progressStatus', 'activityStatus', 'providers', 'assigneeUsername', 'businessUnits', 'created', 'modified',
'annotations', 'assets', 'helpText'
]
EXPANSE_SERVICE_READABLE_HEADER_LIST = [
'id', 'name', 'ips', 'domains', 'portNumber', 'activityStatus', 'businessUnits', 'certificates', 'tlsVersions',
'classifications', 'firstObserved', 'lastObserved', 'annotations', 'assets', 'discoveryInfo'
]
EXPANSE_POC_READABLE_HEADER_LIST = [
'id', 'email', 'firstName', 'lastName', 'phone', 'role', 'created', 'modified'
]
MIRROR_DIRECTION = {
'None': None,
'Incoming': 'In',
'Outgoing': 'Out',
'Both': 'Both'
}
TAGGABLE_ASSET_TYPE_MAP = {
'Domain': 'domains',
'Certificate': 'certificates',
'CloudResource': 'cloud-resources',
'IpRange': 'ip-range'
}
ASSET_TAG_OPERATIONS = ['ASSIGN', 'UNASSIGN']
ASSET_POC_OPERATIONS = ['ASSIGN', 'UNASSIGN']
ISSUE_UPDATE_TYPES = {
'Assignee': 'assigneeUsername',
'Comment': 'comment',
'Priority': 'priority',
'ProgressStatus': 'progressStatus',
'ActivityStatus': 'activityStatus'
}
PRIORITY_SEVERITY_MAP = {
'Unknown': 0, # unknown
'Low': 1, # low severity
'Medium': 2, # medium severity
'High': 3, # high severity
'Critical': 4 # critical severity
}
SEVERITY_PRIORITY_MAP = {v: k for k, v in PRIORITY_SEVERITY_MAP.items()}
IPRANGE_INCLUDE_OPTIONS = ["none", "annotations", "severityCounts", "attributionReasons",
"relatedRegistrationInformation", "locationInformation"]
POC_EMAIL_PATTERN = r"^\S+@\S+$"
DEPRECATED_COMMANDS = {"expanse-get-risky-flows", "expanse-list-risk-rules"}
""" CLIENT CLASS """
class Client(BaseClient):
"""Client class to interact with the Expanse API"""
def __init__(
self, base_url: str, api_key: str, verify: bool, proxy: bool, **kwargs
):
self.api_key = api_key
hdr = {
"Content-Type": "application/json",
"Accept": "application/json",
"User-Agent": "Expanse_XSOAR/1.10.0",
}
super().__init__(base_url, verify=verify, proxy=proxy, headers=hdr, **kwargs)
def _paginate(self, method: str, url_suffix: str,
params: Optional[Dict[str, Any]]) -> Iterator[Any]:
next_url: Optional[str] = None
while True:
result = self._http_request(
method=method,
url_suffix=url_suffix,
full_url=next_url,
params=params,
raise_on_status=True
)
data = result.get('data', [])
if data is not None:
yield from data
pagination = result.get('pagination', None)
if pagination is None:
break
next_url = pagination.get('next', None)
if next_url is None:
break
params = None
def _get_utcnow(self) -> datetime:
"""
Used to allow mocking for Unit Tests
"""
return datetime.utcnow()
def authenticate(self) -> None:
"""
Perform authentication using API_KEY,
stores token and stored timestamp in integration context,
retrieves new token when expired
"""
current_utc_timestamp = int(self._get_utcnow().timestamp())
stored_token = demisto.getIntegrationContext()
if (
isinstance(stored_token, dict)
and "token" in stored_token
and "expires" in stored_token
and current_utc_timestamp < int(stored_token["expires"])
):
self._headers['Authorization'] = f'JWT {stored_token["token"]}'
else:
# fetch new token
hdr = self._headers.copy()
hdr["Authorization"] = f"Bearer {self.api_key}"
r = self._http_request('GET', "/v1/IdToken", headers=hdr)
if isinstance(r, dict) and r.get("token", None) is None:
raise ValueError("Authorization failed")
token_expiration = current_utc_timestamp + TOKEN_DURATION
self._headers['Authorization'] = f'JWT {r["token"]}'
demisto.setIntegrationContext(
{"token": r["token"], "expires": token_expiration}
)
def get_issue_count(self) -> int:
r = self._http_request(
method='GET', url_suffix='/v1/issues/issues/count',
)
if not isinstance(r, dict) or 'count' not in r:
raise DemistoException(f'Error determining issue count. Response from server: {str(r)}')
return int(r['count'])
def get_issues(self,
limit: int,
content_search: Optional[str] = None,
provider: Optional[str] = None,
business_units: Optional[str] = None,
assignee: Optional[str] = None,
issue_type: Optional[str] = None,
inet_search: Optional[str] = None,
domain_search: Optional[str] = None,
port_number: Optional[str] = None,
progress_status: Optional[str] = None,
activity_status: Optional[str] = None,
priority: Optional[str] = None,
tags: Optional[str] = None,
created_before: Optional[str] = None,
created_after: Optional[str] = None,
modified_before: Optional[str] = None,
modified_after: Optional[str] = None,
cloud_management_status: Optional[str] = None,
sort: Optional[str] = None
) -> Iterator[Any]:
params = {
'limit': limit,
'contentSearch': content_search,
'providerName': provider if provider else None,
'businessUnitName': business_units if business_units else None,
'assigneeUsername': assignee if assignee else None,
'issueTypeName': issue_type if issue_type else None,
'inetSearch': inet_search,
'domainSearch': domain_search,
'portNumber': port_number if port_number else None,
'progressStatus': progress_status if progress_status else None,
'activityStatus': activity_status if activity_status else None,
'priority': priority if priority else None,
'tagName': tags if tags else None,
'createdBefore': created_before,
'createdAfter': created_after,
'modifiedBefore': modified_before,
'modifiedAfter': modified_after,
'cloudManagementStatus': cloud_management_status if cloud_management_status else None,
'sort': sort
}
return self._paginate(
method='GET', url_suffix="/v1/issues/issues", params=params
)
def get_issue_by_id(self, issue_id: str) -> Dict[str, Any]:
return self._http_request(
method='GET', url_suffix=f'/v1/issues/issues/{issue_id}')
def get_issue_updates(self, issue_id: str, update_types: Optional[List],
created_after: Optional[str], limit: int = DEFAULT_RESULTS) -> Iterator[Any]:
updates = self._paginate(
method='GET', url_suffix=f'/v1/issues/issues/{issue_id}/updates',
params=dict(limit=limit))
after = datestring_to_timestamp_us(created_after) if created_after else None
for u in updates:
if after and 'created' in u and datestring_to_timestamp_us(u['created']) <= after:
continue
if update_types and 'updateType' in u and u['updateType'] not in update_types:
continue
yield u
def get_services(self,
limit: int,
content_search: Optional[str] = None,
provider: Optional[str] = None,
business_units: Optional[str] = None,
service_type: Optional[str] = None,
inet_search: Optional[str] = None,
domain_search: Optional[str] = None,
port_number: Optional[str] = None,
activity_status: Optional[str] = None,
discovery_type: Optional[str] = None,
country_code: Optional[str] = None,
tags: Optional[str] = None,
cloud_management_status: Optional[str] = None,
sort: Optional[str] = None
) -> Iterator[Any]:
params = {
'limit': limit,
'contentSearch': content_search,
'providerName': provider if provider else None,
'businessUnitName': business_units if business_units else None,
'classificationId': service_type if service_type else None,
'ipSearch': inet_search,
'domainSearch': domain_search,
'portNumber': port_number if port_number else None,
'countryCode': country_code if country_code else None,
'activityStatus': activity_status if activity_status else None,
'discoveryType': discovery_type if discovery_type else None,
'tagName': tags if tags else None,
'cloudManagementStatus': cloud_management_status if cloud_management_status else None,
'sort': sort
}
return self._paginate(
method='GET', url_suffix="/v1/services/services", params=params
)
def get_service_by_id(self, service_id: str) -> Dict[str, Any]:
return self._http_request(
method='GET', url_suffix=f'/v1/services/services/{service_id}')
def list_businessunits(self, limit: int = DEFAULT_RESULTS) -> Iterator[Any]:
params = dict(limit=limit)
return self._paginate(
method='GET',
url_suffix='/v1/issues/businessUnits',
params=params
)
def list_providers(self, limit: int = DEFAULT_RESULTS) -> Iterator[Any]:
params = dict(limit=limit)
return self._paginate(
method='GET',
url_suffix='/v1/issues/providers',
params=params
)
def list_pocs(self, limit: int = DEFAULT_RESULTS) -> Iterator[Any]:
params = dict(limit=limit)
return self._paginate(
method='GET',
url_suffix='/v2/annotation/point-of-contact',
params=params
)
def create_poc(self, email: str, first_name: Optional[str], last_name: Optional[str], role: Optional[str],
phone: Optional[str]) -> Dict[str, Any]:
data: Dict = {
'email': email,
'firstName': first_name,
'lastName': last_name,
'phone': phone,
'role': role
}
return self._http_request(
method='POST',
url_suffix='/v2/annotation/point-of-contact',
data=json.dumps(data)
)
def list_tags(self, limit: int = DEFAULT_RESULTS) -> Iterator[Any]:
params = dict(limit=limit)
return self._paginate(
method='GET',
url_suffix='/v3/annotations/tags',
params=params
)
def create_tag(self, name: str, description: Optional[str]) -> Dict[str, Any]:
data: Dict = {
'name': name,
'description': description
}
return self._http_request(
method='POST',
url_suffix='/v3/annotations/tags',
data=json.dumps(data)
)
def get_asset_details(self, asset_type: str, asset_id: str,
include: str = 'annotations,attributionReasons') -> Dict[str, Any]:
data: Dict = {}
if asset_type == 'IpRange':
data = self.get_iprange_by_id(
iprange_id=asset_id,
include=include
)
elif asset_type == 'Certificate':
data = self.get_certificate_by_md5_hash(asset_id)
elif asset_type == 'Domain':
data = self.get_domain_by_domain(domain=asset_id)
else:
demisto.debug(f'get_asset_details: unsupported asset type {asset_type}')
return data
def manage_asset_tags(self, asset_type: str, operation_type: str, asset_id: str,
tag_ids: List[str]) -> Dict[str, Any]:
endpoint_base = asset_type if asset_type == "ip-range" else f"assets/{asset_type}"
data: Dict = {"operations": [{
'operationType': operation_type,
'tagIds': tag_ids,
'assetId': asset_id
}]}
return self._http_request(
method='POST',
url_suffix=f'/v2/{endpoint_base}/tag-assignments/bulk',
json_data=data
)
def manage_asset_pocs(self, asset_type: str, operation_type: str, asset_id: str, poc_ids: List[str]) -> Dict[str, Any]:
endpoint_base = asset_type if asset_type == "ip-range" else f"assets/{asset_type}"
data: Dict = {"operations": [{
'operationType': operation_type,
'contactIds': poc_ids,
'assetId': asset_id
}]}
return self._http_request(
method='POST',
url_suffix=f'/v2/{endpoint_base}/contact-assignments/bulk',
json_data=data
)
def update_issue(self, issue_id: str, update_type: str, value: str) -> Dict[str, Any]:
data: Dict = {
'updateType': update_type,
'value': value
}
return self._http_request(
method='POST',
url_suffix=f'/v1/issues/issues/{issue_id}/updates',
data=json.dumps(data)
)
def get_iprange_by_id(self, iprange_id: str, include: str) -> Dict[str, Any]:
try:
result: Dict = self._http_request(
method='GET',
url_suffix=f'/v2/ip-range/{iprange_id}',
raise_on_status=True,
params={
'include': include
}
)
except DemistoException as e:
if str(e).startswith('Error in API call [404]') or str(e).startswith('Error in API call [400]'):
return {}
raise e
return result
def get_domain_by_domain(self, domain: str, last_observed_date: Optional[str] = None) -> Dict[str, Any]:
params = {}
if last_observed_date is not None:
params['minRecentIpLastObservedDate'] = last_observed_date
try:
result = self._http_request(
method='GET',
url_suffix=f'/v2/assets/domains/{domain}',
raise_on_status=True,
params=params
)
except DemistoException as e:
if str(e).startswith('Error in API call [404]') or str(e).startswith('Error in API call [400]'):
return {}
raise e
return result
def get_certificate_by_md5_hash(self, md5_hash: str, last_observed_date: Optional[str] = None) -> Dict[str, Any]:
params = {}
if last_observed_date is not None:
params['minRecentIpLastObservedDate'] = last_observed_date
try:
result: Dict = self._http_request(
method='GET',
url_suffix=f'/v2/assets/certificates/{md5_hash}',
raise_on_status=True,
params=params
)
except DemistoException as e:
if str(e).startswith('Error in API call [404]') or str(e).startswith('Error in API call [400]'):
return {}
raise e
return result
def get_ipranges(self, params: Dict[str, Any]) -> Iterator[Any]:
return self._paginate(
method='GET',
url_suffix='/v2/ip-range',
params=params
)
def get_domains(self, params: Dict[str, Any]) -> Iterator[Any]:
return self._paginate(
method='GET',
url_suffix='/v2/assets/domains',
params=params
)
def get_certificates(self, params: Dict[str, Any]) -> Iterator[Any]:
return self._paginate(
method='GET',
url_suffix='/v2/assets/certificates',
params=params
)
def get_ips(self, params: Dict[str, Any]) -> Iterator[Any]:
return self._paginate(
method='GET',
url_suffix='/v2/assets/ips',
params=params
)
def get_cloud_resources(self, params: Dict[str, Any]) -> Iterator[Any]:
return self._paginate(
method='GET',
url_suffix='/v2/assets/cloud-resources',
params=params
)
def get_cloud_resource(self, asset_id: str) -> Dict[str, Any]:
try:
result: Dict = self._http_request(
method='GET',
url_suffix=f'/v2/assets/cloud-resources/{asset_id}',
raise_on_status=True,
)
except DemistoException as e:
if str(e).startswith('Error in API call [404]') or str(e).startswith('Error in API call [400]'):
demisto.info("Cloud resource with ID: {asset_id} was not found. Error message from API: {str(e)}")
return {}
raise e
return result
def parse_asset_data(self, issue: Dict[str, Any],
fetch_details: Optional[bool] = False) -> Tuple[List[Dict[str, Any]], List[str], bool]:
assets: List[Dict[str, Any]] = []
changed = False
ml_feature_list: List[str] = []
if issue.get('assets') and isinstance(issue['assets'], list):
assets = copy.deepcopy(issue['assets'])
for n, a in enumerate(assets):
if not isinstance(a, dict) or 'assetType' not in a:
continue
# Handle conversion of IP ranges to CIDRs for AutoExtract
if (
a['assetType'] == 'IpRange'
and 'displayName' in a
and isinstance(dn := a['displayName'], str)
and len(r := dn.split('-')) == 2
):
assets[n]['displayName'] = ','.join(range_to_cidrs(r[0], r[1]))
changed = True
if not fetch_details or 'assetKey' not in a:
continue
# Fetch additional details for assets
details = self.get_asset_details(a['assetType'], a['assetKey'])
if not isinstance(details, dict):
continue
# Replace asset ID with the real asset ID (the ID shown in asset is a reference of the association table)
if real_id := details.get('id', None):
assets[n]['id'] = real_id
changed = True
# Handle Tags
if (tags := details.get('annotations', {}).get('tags')) and isinstance(tags, list):
assets[n]['tags'] = '\n'.join(sorted(t['name'] for t in tags if 'name' in t))
changed = True
# Handle Attribution reasons
if (ar := details.get('attributionReasons')) and isinstance(ar, list):
assets[n]['attributionReasons'] = '\n'.join(
sorted(
str(a.get('reason')) for a in ar if isinstance(a, dict) and a.get('reason')
)
)
changed = True
# Handle ML fields
if a.get('assetType') == 'IpRange':
# for IP Range collect relatedRegistrarInformation.registryEntities.formattedName
if (
(rri := details.get('relatedRegistrationInformation'))
and isinstance(rri, list)
and isinstance(rri[0], dict)
and (re := rri[0].get('registryEntities'))
and isinstance(re, list)
):
ml_feature_list.extend(set(r['formattedName']
for r in re if 'formattedName' in r)) # pylint: disable=E1133
elif a.get('assetType') == "Certificate":
# for Certificate collect issuerOrg, issuerName,
# subjectName, subjectAlternativeNames, subjectOrg, subjectOrgUnit
if (cert := details.get('certificate')) and isinstance(cert, dict):
for f in ['issuerOrg', 'issuerName', 'subjectOrg', 'subjectName', 'subjectOrgUnit']:
if (x := cert.get(f)):
ml_feature_list.append(x)
if (san := cert.get('subjectAlternativeNames')) and isinstance(san, str):
ml_feature_list.extend(san.split(' '))
elif a.get('assetType') == "Domain":
# for Domain collect domain, name servers, registrant and admin name/organization
if (
(whois := details.get('whois'))
and isinstance(whois, list)
and isinstance(whois[0], dict)
):
if (x := whois[0].get('domain')):
ml_feature_list.append(x)
# nameServers
if (
(ns := whois[0].get('nameServers'))
and isinstance(ns, list)
):
ml_feature_list.extend(ns)
# admin
if (admin := whois[0].get('admin')):
for f in ['name', 'organization']:
if (x := admin.get(f)):
ml_feature_list.append(x)
# registrant
if (reg := whois[0].get('registrant')):
for f in ['name', 'organization']:
if (x := reg.get(f)):
ml_feature_list.append(x)
if len(ml_feature_list) > 0:
changed = True
return assets, ml_feature_list, changed
class DeprecatedCommandException(BaseException):
def __init__(self):
super().__init__(
f'The {demisto.command()} command is no longer supported by the Xpanse API, and has been deprecated.'
)
""" HELPER FUNCTIONS """
class DBotScoreOnlyIndicator(Common.Indicator):
"""
This class represents a generic indicator and is used only to return DBotScore
"""
def __init__(self, dbot_score: Common.DBotScore):
self.dbot_score = dbot_score
def to_context(self):
return self.dbot_score.to_context()
def calculate_limits(limit: Any) -> Tuple[int, int]:
total_results = check_int(limit, 'limit', None, None, False)
if not total_results:
total_results = DEFAULT_RESULTS
elif total_results > MAX_RESULTS:
total_results = MAX_RESULTS
max_page_size = MAX_PAGE_SIZE if total_results > MAX_PAGE_SIZE else total_results
return (total_results, max_page_size)
def handle_iprange_include(arg: Optional[str], arg_name: Optional[str]) -> str:
include = argToList(arg)
sanitized_include: str = ''
if include and not any('none' in i for i in include):
if not all(i in IPRANGE_INCLUDE_OPTIONS for i in include):
raise ValueError(f'{arg_name} must contain the following options: {", ".join(IPRANGE_INCLUDE_OPTIONS)}')
else:
sanitized_include = ','.join(include)
return sanitized_include
def range_to_cidrs(start: str, end: str) -> Iterator[str]:
try:
for i in ipaddress.summarize_address_range(ipaddress.IPv4Address(start), ipaddress.IPv4Address(end)):
yield str(i)
except ipaddress.AddressValueError as e:
raise ValueError(f'Invalid IP address in range: {str(e)}')
def check_int(arg: Any, arg_name: str, min_val: int = None, max_val: int = None,
required: bool = False) -> Optional[int]:
"""Converts a string argument to a Python int
This function is used to quickly validate an argument provided and convert
it into an ``int`` type. It will throw a ValueError if the input is invalid
or outside the optional range. If the input is None, it will throw a ValueError
if required is ``True``, or ``None`` if required is ``False.
"""
# check if argument is mandatory
if arg is None:
if required is True:
raise ValueError(f'Missing argument "{arg_name}"')
return None
i: Optional[int] = None
if isinstance(arg, str):
if not arg.isdigit():
raise ValueError(f'Integer invalid: "{arg_name}"="{arg}"')
try:
i = int(arg)
except ValueError:
raise ValueError(f'Integer invalid: "{arg_name}"="{arg}"')
elif isinstance(arg, int):
i = arg
else:
raise ValueError(f'Invalid number: "{arg_name}"')
# range check
if min_val and i < min_val:
raise ValueError(f'Integer outside minimum range: "{arg_name}"="{arg}" ("min={min_val}")')
if max_val and i > max_val:
raise ValueError(f'Integer outside maximum range: "{arg_name}"="{arg}" ("max={max_val}")')
return i
def convert_priority_to_xsoar_severity(priority: str) -> int:
"""Maps Expanse priority to Cortex XSOAR severity
Converts the Expanse issue priority ('Low', 'Medium', 'High',
'Critical') to Cortex XSOAR incident severity (1 to 4) for mapping.
:type priority: ``str``
:param priority: priority as returned from the Expanse API (str)
:return: Cortex XSOAR Severity (1 to 4)
:rtype: ``int``
"""
return PRIORITY_SEVERITY_MAP[priority] if priority in PRIORITY_SEVERITY_MAP else PRIORITY_SEVERITY_MAP['Unknown']
def datestring_to_timestamp_us(ds: str) -> int:
dt = parse(ds)
assert dt is not None
ts = int(dt.timestamp()) * 1000000 + dt.microsecond
return ts
def timestamp_us_to_datestring_utc(ts: int, date_format: str = DATE_FORMAT) -> str:
dt = datetime.fromtimestamp(ts // 1000000, timezone.utc).replace(microsecond=ts % 1000000)
ds = dt.strftime(date_format)
return ds
def format_cidr_data(cidrs: List[Dict[str, Any]]) -> List[CommandResults]:
cidr_data_list: List[Dict[str, Any]] = []
command_results = []
for cidr_data in cidrs:
cidr_data['cidr'] = ','.join(range_to_cidrs(cidr_data['startAddress'], cidr_data['endAddress'])) \
if ('startAddress' in cidr_data and 'endAddress' in cidr_data) else None
if not cidr_data['cidr']:
continue
cidr_context_excluded_fields: List[str] = ['startAddress', 'endAddress']
cidr_data_list.append({
k: cidr_data[k]
for k in cidr_data if k not in cidr_context_excluded_fields
})
cidr_standard_context = DBotScoreOnlyIndicator(
dbot_score=Common.DBotScore(
indicator=cidr_data['cidr'],
indicator_type=DBotScoreType.CIDR,
integration_name="ExpanseV2",
score=Common.DBotScore.NONE,
reliability=demisto.params().get('integrationReliability')
)
)
command_results.append(CommandResults(
readable_output=tableToMarkdown("New CIDR indicator was found", cidr_standard_context.to_context()),
indicator=cidr_standard_context
))
command_results.append(CommandResults(
outputs_prefix='Expanse.IPRange',
outputs_key_field='id',
outputs=cidr_data_list if len(cidr_data_list) > 0 else None,
))
return command_results
def find_indicator_md5_by_hash(h: str) -> Optional[str]:
field = {
40: 'sha1',
64: 'sha256',
128: 'sha512'
}.get(len(h))
if field is None:
return None
search_indicators = IndicatorsSearcher()
fetched_iocs = search_indicators.search_indicators_by_version(
query=f'{field}:{h} and type:Certificate and -md5:""', size=1 # we just take the first one
).get('iocs')
if fetched_iocs is None or len(fetched_iocs) == 0:
return None
if (custom_fields := fetched_iocs[0].get('CustomFields')) is None:
return None
return custom_fields.get('md5')
def format_domain_data(domains: List[Dict[str, Any]]) -> List[CommandResults]:
domain_data_list: List[Dict[str, Any]] = []
command_results = []
for domain_data in domains:
if not isinstance(domain_data, dict) or 'domain' not in domain_data:
continue
domain = domain_data['domain']
whois_args = {}
if (w := domain_data.get('whois')) and isinstance(w, list):
whois = w[0]
admin = whois.get('admin', {})
registrar = whois.get('registrar', {})
if not isinstance(registrar, dict):
registrar = {}
registrant = whois.get('registrant', {})
if not isinstance(registrant, dict):
registrant = {}
domain_statuses = whois.get('domainStatuses', [])
if not isinstance(domain_statuses, list):
domain_statuses = []
whois_args = assign_params(
creation_date=whois.get('creationDate'),
updated_date=whois.get('updatedDate'),
expiration_date=whois.get('registryExpiryDate'),
name_servers=whois.get('nameServers'),
domain_status=domain_statuses[0] if domain_statuses else [],
organization=admin.get('organization'),
admin_name=admin.get('name'),
admin_email=admin.get('emailAddress'),
admin_phone=admin.get('phoneNumber'),
admin_country=admin.get('country'),
registrar_name=registrar.get('name'),
registrant_email=registrant.get('emailAddress'),
registrant_name=registrant.get('name'),
registrant_phone=registrant.get('phoneNumber'),
registrant_country=registrant.get('country')
)
domain_standard_context: Common.Domain
if domain.startswith('*.'):
indicator_type = DBotScoreType.DOMAINGLOB
else:
indicator_type = DBotScoreType.DOMAIN
domain_standard_context = Common.Domain(
domain=domain,
dbot_score=Common.DBotScore(
indicator=domain,
indicator_type=indicator_type,
integration_name="ExpanseV2",
score=Common.DBotScore.NONE,
reliability=demisto.params().get('integrationReliability')
),
**whois_args
)
command_results.append(CommandResults(
readable_output=tableToMarkdown("New Domain indicator was found", domain_standard_context.to_context()),
indicator=domain_standard_context
))
domain_context_excluded_fields: List[str] = []
domain_data_list.append({
k: domain_data[k]
for k in domain_data if k not in domain_context_excluded_fields
})
readable_output = tableToMarkdown(
'Expanse Domain List', domain_data_list) if len(domain_data_list) > 0 else "## No Domains found"
command_results.append(CommandResults(
readable_output=readable_output,
outputs_prefix='Expanse.Domain',
outputs_key_field='domain',
outputs=domain_data_list if len(domain_data_list) > 0 else None,
))
return command_results
def format_certificate_data(certificates: List[Dict[str, Any]]) -> List[CommandResults]:
certificate_data_list: List[Dict[str, Any]] = []
certificate_context_excluded_fields: List[str] = []
command_results = []
for certificate in certificates:
expanse_certificate = certificate.get('certificate')
if expanse_certificate is None:
continue
# Standard Context (Common.Certificate + DBotScore)
# they are prefixed with PEM but they really the hashes of DER (like it should be)
ec_sha256 = expanse_certificate.get('pemSha256')
if ec_sha256 is None:
demisto.debug('SHA-256 not found!!!')
continue
indicator_value = base64.urlsafe_b64decode(ec_sha256).hex()
ec_md5 = expanse_certificate.get('md5Hash')
ec_sha1 = expanse_certificate.get('pemSha1')
ec_spki = expanse_certificate.get('publicKeySpki')
ec_modulus = expanse_certificate.get('publicKeyModulus')
ec_publickey = None
if (pktemp := expanse_certificate.get('publicKey')) is not None:
ec_publickey = base64.urlsafe_b64decode(pktemp).hex()
ec_san = expanse_certificate.get('subjectAlternativeNames')
pem = None
if (details := certificate.get('details')) is not None:
if (base64Encoded := details.get('base64Encoded')) is not None and base64Encoded != '':
pem_lines = '\n'.join([base64Encoded[i:i + 64] for i in range(0, len(base64Encoded), 64)])
pem = f"-----BEGIN CERTIFICATE-----\n{pem_lines}\n-----END CERTIFICATE-----"
certificate_standard_context = Common.Certificate(
serial_number=expanse_certificate.get('serialNumber'),
subject_dn=expanse_certificate.get('subject'),
issuer_dn=expanse_certificate.get('issuer'),
md5=base64.urlsafe_b64decode(ec_md5).hex() if ec_md5 else None,
sha1=base64.urlsafe_b64decode(ec_sha1).hex() if ec_sha1 else None,
sha256=indicator_value,
publickey=Common.CertificatePublicKey(
algorithm=expanse_certificate.get('publicKeyAlgorithm'),
length=expanse_certificate.get('publicKeyBits'),
modulus=':'.join([ec_modulus[i:i + 2] for i in range(0, len(ec_modulus), 2)]) if ec_modulus else None,
exponent=expanse_certificate.get('publicKeyRsaExponent'),
publickey=':'.join(
[ec_publickey[i:i + 2] for i in range(0, len(ec_publickey), 2)]) if ec_publickey else None
),
spki_sha256=base64.urlsafe_b64decode(ec_spki).hex() if ec_spki else None,
signature_algorithm=expanse_certificate.get('signatureAlgorithm'),
subject_alternative_name=[san for san in ec_san.split() if len(san) != 0] if ec_san else None,
validity_not_after=expanse_certificate.get('validNotAfter'),
validity_not_before=expanse_certificate.get('validNotBefore'),
pem=pem,
dbot_score=Common.DBotScore(
indicator=indicator_value,
indicator_type=DBotScoreType.CERTIFICATE,
integration_name="ExpanseV2",
score=Common.DBotScore.NONE,
reliability=demisto.params().get('integrationReliability')
)
)
command_results.append(CommandResults(
readable_output=tableToMarkdown("New Certificate indicator was found",
certificate_standard_context.to_context()),
indicator=certificate_standard_context,
))
# Expanse Context
certificate_data_list.append({
k: certificate[k]
for k in certificate if k not in certificate_context_excluded_fields
})
readable_output = tableToMarkdown(
'Expanse Certificate List', certificate_data_list) if len(
certificate_data_list) > 0 else "## No Certificates found"
command_results.append(CommandResults(
readable_output=readable_output,
outputs_prefix='Expanse.Certificate',
outputs_key_field='id',
outputs=certificate_data_list if len(certificate_data_list) > 0 else None,
ignore_auto_extract=True,
))
return command_results
def format_cloud_resource_data(cloud_resources: List[Dict[str, Any]]) -> List[CommandResults]:
cloud_resource_data_list: List[Dict[str, Any]] = []
command_results = []
hr_cloud_resource_list = []
for cloud_resource_data in cloud_resources:
cloud_resource_data_list.append(cloud_resource_data)
cloud_resource_standard_context = DBotScoreOnlyIndicator(
dbot_score=Common.DBotScore(
indicator=cloud_resource_data['ips'][0],
indicator_type=DBotScoreType.IP,
integration_name="ExpanseV2",
score=Common.DBotScore.NONE,
reliability=demisto.params().get('integrationReliability')
)
)
command_results.append(CommandResults(
readable_output=tableToMarkdown("New IP indicator was found", {"IP": cloud_resource_data['ips'][0]}),
indicator=cloud_resource_standard_context
))
hr_cloud_resource_list.append({
"ID": cloud_resource_data.get("id"),
"IP": cloud_resource_data.get("ips"),
"Domain": cloud_resource_data.get("domain"),
"Cloud Provider": cloud_resource_data.get("provider", {}).get("name"),
"Asset Type": cloud_resource_data.get("type"),
"Instance ID": cloud_resource_data.get("instanceId"),
"Region": cloud_resource_data.get("region"),
"Source": cloud_resource_data.get("sourceDetails"),
})
readable_output = tableToMarkdown(
'Expanse Cloud Resource List', hr_cloud_resource_list) if len(hr_cloud_resource_list) > 0 else \
"## No Cloud Resources found"
command_results.append(CommandResults(
outputs_prefix='Expanse.CloudResource',
outputs_key_field='id',
outputs=cloud_resource_data_list if len(cloud_resource_data_list) > 0 else None,
readable_output=readable_output,
raw_response=cloud_resources
))
return command_results
""" COMMAND FUNCTIONS """
def test_module(client: Client) -> str:
"""Tests API connectivity and authentication'
Returning 'ok' indicates that the integration works like it is supposed to.
Connection to the service is successful.
Raises exceptions if something goes wrong.
:type client: ``Client``
:param Client: client to use
:return: 'ok' if test passed, anything else will fail the test.
:rtype: ``str``
"""
try:
client.get_issue_count()
except DemistoException as e:
if "Forbidden" in str(e) or "Authorization failed" in str(e):
return "Authorization Error: make sure API Key is correctly set"
else:
raise e
return "ok"
def get_issues_command(client: Client, args: Dict[str, Any]) -> CommandResults:
total_results, max_page_size = calculate_limits(args.get('limit', None))
provider = ','.join(argToList(args.get('provider')))
business_units = ','.join(argToList(args.get('business_unit')))
assignee = ','.join(argToList(args.get('assignee')))
issue_type = ','.join(argToList(args.get('issue_type')))
tags = ','.join(argToList(args.get('tag')))
content_search = args.get('content_search')
inet_search = args.get('inet_search')
domain_search = args.get('domain_search')
arg_list = argToList(args.get('port_number'))
# this will trigger exceptions if the port provided isn't a valid port number 0-65535
all(check_int(i, 'port_number', 0, 65535, True) for i in arg_list)
port_number = ','.join(arg_list)
arg_list = argToList(args.get('progress_status'))
if arg_list and not all(i in ISSUE_PROGRESS_STATUS for i in arg_list):
raise ValueError(f'progress_status must include: {", ".join(ISSUE_PROGRESS_STATUS)}')
progress_status = ','.join(arg_list)
arg_list = argToList(args.get('activity_status'))
if arg_list and not all(i in ISSUE_ACTIVITY_STATUS for i in arg_list):
raise ValueError(f'activity_status must include: {", ".join(ISSUE_ACTIVITY_STATUS)}')
activity_status = ','.join(arg_list)
arg_list = argToList(args.get('priority'))
if arg_list and not all(i in ISSUE_PRIORITY for i in arg_list):
raise ValueError(f'priority must include: {", ".join(ISSUE_PRIORITY)}')
priority = ','.join(arg_list)
arg_list = argToList(args.get('cloud_management_status'))
if arg_list and not all(i in CLOUD_MANAGEMENT_STATUS for i in arg_list):
raise ValueError(f'cloud_management_status must include: {", ".join(CLOUD_MANAGEMENT_STATUS)}')
cloud_management_status = ','.join(arg_list)
arg_list = argToList(args.get('sort'))
if arg_list and not all(i in ISSUE_SORT_OPTIONS for i in arg_list):
raise ValueError(f'sort must include: {", ".join(ISSUE_SORT_OPTIONS)}')
sort = ','.join(arg_list)
d = args.get('created_before', None)
created_before = parse(d).strftime(DATE_FORMAT) if d else None # type: ignore
d = args.get('created_after', None)
created_after = parse(d).strftime(DATE_FORMAT) if d else None # type: ignore
d = args.get('modified_before', None)
modified_before = parse(d).strftime(DATE_FORMAT) if d else None # type: ignore
d = args.get('modified_after', None)
modified_after = parse(d).strftime(DATE_FORMAT) if d else None # type: ignore
issues = list(
islice(
client.get_issues(limit=max_page_size, content_search=content_search, provider=provider,
business_units=business_units, assignee=assignee, issue_type=issue_type,
inet_search=inet_search, domain_search=domain_search, port_number=port_number,
progress_status=progress_status, activity_status=activity_status, priority=priority,
tags=tags, created_before=created_before, created_after=created_after,
modified_before=modified_before, modified_after=modified_after,
cloud_management_status=cloud_management_status, sort=sort),
total_results
)
)
if len(issues) < 1:
return CommandResults(readable_output='No Issues Found')
readable_output = tableToMarkdown(
name='Expanse Issues',
t=issues,
headers=EXPANSE_ISSUE_READABLE_HEADER_LIST,
headerTransform=pascalToSpace
)
return CommandResults(
readable_output=readable_output, outputs_prefix="Expanse.Issue", outputs_key_field="id", outputs=issues
)
def get_services_command(client: Client, args: Dict[str, Any]) -> CommandResults:
total_results, max_page_size = calculate_limits(args.get('limit', None))
provider = ','.join(argToList(args.get('provider')))
business_units = ','.join(argToList(args.get('business_unit')))
service_type = ','.join(argToList(args.get('service_type')))
tags = ','.join(argToList(args.get('tag')))
content_search = args.get('content_search')
inet_search = args.get('inet_search')
domain_search = args.get('domain_search')
arg_list = argToList(args.get('port_number'))
# this will trigger exceptions if data is invalid
all(check_int(i, 'port_number', 0, 65535, True) for i in arg_list)
port_number = ','.join(arg_list)
arg_list = argToList(args.get('country_code'))
# This will check to make sure that a provided country code is a two character alpha string
if arg_list and not all(i.isalpha() and len(i) == 2 for i in arg_list):
raise ValueError('country_code must be an ISO-3166 two character country code')
country_code = ','.join([i.upper() for i in arg_list])
arg_list = argToList(args.get('activity_status'))
if arg_list and not all(i in ISSUE_ACTIVITY_STATUS for i in arg_list):
raise ValueError(f'activity_status must include: {", ".join(ISSUE_ACTIVITY_STATUS)}')
activity_status = ','.join(arg_list)
arg_list = argToList(args.get('discovery_type'))
if arg_list and not all(i in SERVICE_DISCOVERY_TYPE for i in arg_list):
raise ValueError(f'discovery_type must include: {", ".join(SERVICE_DISCOVERY_TYPE)}')
discovery_type = ','.join(arg_list)
arg_list = argToList(args.get('cloud_management_status'))
if arg_list and not all(i in CLOUD_MANAGEMENT_STATUS for i in arg_list):
raise ValueError(f'cloud_management_status must include: {", ".join(CLOUD_MANAGEMENT_STATUS)}')
cloud_management_status = ','.join(arg_list)
sort = args.get('sort')
if sort and sort not in SERVICE_SORT_OPTIONS:
raise ValueError(f'sort must include: {", ".join(SERVICE_SORT_OPTIONS)}')
services = list(
islice(
client.get_services(limit=max_page_size, content_search=content_search, provider=provider,
business_units=business_units, service_type=service_type,
inet_search=inet_search, domain_search=domain_search, port_number=port_number,
activity_status=activity_status, discovery_type=discovery_type,
tags=tags, cloud_management_status=cloud_management_status,
country_code=country_code, sort=sort),
total_results
)
)
if len(services) < 1:
return CommandResults(readable_output='No Services Found')
# reduce some objects for human readable
hr_services = copy.deepcopy(services)
for service in hr_services:
service["classifications"] = [c.get("name") for c in service.get("classifications", [])]
service["tlsVersions"] = [f'version: {t.get("tlsVersion")} - cipher_suite: {t.get("cipherSuite")}'
for t in service.get("tlsVersions", [])]
service["certificates"] = [f'subject_name: {c.get("certificate", {}).get("subjectName")}' for c in
service.get("certificates", [])]
readable_output = tableToMarkdown(
name='Expanse Services',
t=hr_services,
headers=EXPANSE_SERVICE_READABLE_HEADER_LIST,
headerTransform=pascalToSpace
)
return CommandResults(
readable_output=readable_output,
outputs_prefix="Expanse.Service",
outputs_key_field="id",
outputs=services
)
def get_service_command(client: Client, args: Dict[str, Any]) -> CommandResults:
if not (service_id := args.get('service_id')):
raise ValueError('service_id not specified')
service = client.get_service_by_id(service_id=service_id)
# reduce some objects for human readable
hr_service = copy.deepcopy(service)
if hr_service is not None:
hr_service["classifications"] = [c.get("name") for c in hr_service.get("classifications", [])]
hr_service["tlsVersions"] = [f'version: {t.get("tlsVersion")} - cipher_suite: {t.get("cipherSuite")}'
for t in hr_service.get("tlsVersions", [])]
hr_service["certificates"] = [f'subject_name: {c.get("certificate", {}).get("subjectName")}'
for c in hr_service.get("certificates", [])]
readable_output = tableToMarkdown(
name='Expanse Services',
t=hr_service,
headers=EXPANSE_SERVICE_READABLE_HEADER_LIST,
headerTransform=pascalToSpace
)
return CommandResults(
readable_output=readable_output,
outputs_prefix="Expanse.Service",
outputs_key_field="id",
outputs=service
)
def get_issue_command(client: Client, args: Dict[str, Any]) -> CommandResults:
if not (issue_id := args.get('issue_id')):
raise ValueError('issue_id not specified')
issue = client.get_issue_by_id(issue_id=issue_id)
readable_output = tableToMarkdown(
name='Expanse Issues',
t=issue,
headers=EXPANSE_ISSUE_READABLE_HEADER_LIST,
headerTransform=pascalToSpace
)
return CommandResults(
readable_output=readable_output,
outputs_prefix="Expanse.Issue",
outputs_key_field="id",
outputs=issue
)
def get_issue_updates_command(client: Client, args: Dict[str, Any]) -> CommandResults:
total_results, max_page_size = calculate_limits(args.get('limit', None))
if not (issue_id := args.get('issue_id')):
raise ValueError('issue_id not specified')
update_types = argToList(args.get('update_types'))
if update_types and not all(i in ISSUE_UPDATE_TYPES.keys() for i in update_types):
raise ValueError(f'Invalid update_type: {update_types}. Must include: {",".join(ISSUE_UPDATE_TYPES.keys())}')
d = args.get('created_after')
created_after = parse(d).strftime(DATE_FORMAT) if d else None # type: ignore
issue_updates = [
{**u, "issueId": issue_id} # this adds the issue id to the resulting dict
for u in sorted(
islice(
client.get_issue_updates(
issue_id=issue_id,
limit=max_page_size,
update_types=update_types,
created_after=created_after
),
total_results
),
key=lambda k: k['created']
)
]
return CommandResults(
outputs_prefix="Expanse.IssueUpdate", outputs_key_field="id", outputs=issue_updates
)
def get_issue_comments_command(client: Client, args: Dict[str, Any]) -> CommandResults:
total_results, max_page_size = calculate_limits(args.get('limit', None))
if not (issue_id := args.get('issue_id')):
raise ValueError('issue_id not specified')
d = args.get('created_after')
created_after = parse(d).strftime(DATE_FORMAT) if d else None # type: ignore
issue_comments = [
{**u, "issueId": issue_id} # this adds the issue id to the resulting dict
for u in sorted(
islice(
client.get_issue_updates(
issue_id=issue_id,
limit=max_page_size,
update_types=['Comment'],
created_after=created_after
),
total_results
),
key=lambda k: k['created']
)
]
for n, c in enumerate(issue_comments):
if (u := c.get('user')) and isinstance(u, dict) and 'username' in u:
issue_comments[n]['user'] = u['username']
md = tableToMarkdown(
name='Expanse Issue Comments',
t=issue_comments,
headers=['user', 'value', 'created'],
headerTransform=pascalToSpace,
removeNull=True
)
return CommandResults(
outputs_prefix="Expanse.IssueComment",
outputs_key_field="id",
outputs=issue_comments,
readable_output=md
)
def update_issue_command(client: Client, args: Dict[str, Any]) -> CommandResults:
if not (issue_id := args.get('issue_id')):
raise ValueError('issue_id not specified')
update_type = args.get("update_type")
if not update_type or update_type not in ISSUE_UPDATE_TYPES:
raise ValueError(f'update_type must be one of: {",".join(ISSUE_UPDATE_TYPES.keys())}')
if not (value := args.get('value')):
raise ValueError('value must be specified')
issue_update = client.update_issue(issue_id, update_type, value)
return CommandResults(
outputs_prefix="Expanse.IssueUpdate",
outputs_key_field="id",
outputs={**issue_update, "issueId": issue_id} # this adds the issue id to the resulting dict
)
def fetch_incidents(client: Client, max_incidents: int,
last_run: Dict[str, Union[Optional[int], Optional[str]]], first_fetch: Optional[int],
priority: Optional[str], activity_status: Optional[str],
progress_status: Optional[str], business_units: Optional[str], issue_types: Optional[str],
tags: Optional[str], cloud_management_status: Optional[str],
mirror_direction: Optional[str], sync_tags: Optional[List[str]],
fetch_details: Optional[bool]
) -> Tuple[Dict[str, Union[Optional[int], Optional[str]]], List[dict]]:
"""This function retrieves new alerts every interval (default is 1 minute).
This function has to implement the logic of making sure that incidents are
fetched only onces and no incidents are missed. By default it's invoked by
XSOAR every minute. It will use last_run to save the timestamp of the last
incident it processed. If last_run is not provided, it should use the
integration parameter first_fetch to determine when to start fetching
the first time. Uses "createdAfter" in the Expanse API for timestamp.
:return:
A tuple containing two elements:
next_run (``Dict[str, int]``): Contains the timestamp that will be
used in ``last_run`` on the next fetch, and the last issue id.
incidents (``List[dict]``): List of incidents that will be created in XSOAR
:rtype: ``Tuple[Dict[str, Union[Optional[int], Optional[str]]], List[dict]]``
"""
last_fetch = last_run.get('last_fetch')
if last_fetch is None:
last_fetch = cast(int, first_fetch)
else:
last_fetch = cast(int, last_fetch)
latest_created_time = last_fetch
last_issue_id = last_run.get('last_issue_id')
latest_issue_id: Optional[str] = None
incidents: List[Dict[str, Any]] = []
arg_list = argToList(priority)
if arg_list and not all(i in ISSUE_PRIORITY for i in arg_list):
raise ValueError(f'priority must include: {", ".join(ISSUE_PRIORITY)}')
_priority = ','.join(arg_list)
arg_list = argToList(progress_status)
if arg_list and not all(i in ISSUE_PROGRESS_STATUS for i in arg_list):
raise ValueError(f'progressStatus must include: {", ".join(ISSUE_PROGRESS_STATUS)}')
_progress_status = ','.join(arg_list)
arg_list = argToList(activity_status)
if arg_list and not all(i in ISSUE_ACTIVITY_STATUS for i in arg_list):
raise ValueError(f'activityStatus must include: {", ".join(ISSUE_ACTIVITY_STATUS)}')
_activity_status = ','.join(arg_list)
arg_list = argToList(cloud_management_status)
if arg_list and not all(i in CLOUD_MANAGEMENT_STATUS for i in arg_list):
raise ValueError(f'cloudManagementStatus must include: {", ".join(CLOUD_MANAGEMENT_STATUS)}')
_cloud_management_status = ','.join(arg_list)
created_after = timestamp_us_to_datestring_utc(latest_created_time, DATE_FORMAT)
r = client.get_issues(
limit=max_incidents if not last_issue_id else max_incidents + 1, # workaround to avoid unnecessary API calls
priority=_priority, business_units=business_units,
progress_status=_progress_status, activity_status=_activity_status, tags=tags,
issue_type=issue_types, cloud_management_status=_cloud_management_status,
created_after=created_after, sort='created'
)
broken = False
issues: List = []
skip = cast(str, last_issue_id)
for i in r:
if skip and not broken:
if 'id' not in i or 'created' not in i:
continue
# fix created time to make sure precision is the same to microsecond with no rounding
i['created'] = timestamp_us_to_datestring_utc(datestring_to_timestamp_us(i['created']), DATE_FORMAT)
if i['created'] != created_after:
issues.append(i)
broken = True
elif i['id'] == skip:
broken = True
else:
issues.append(i)
if len(issues) == max_incidents:
break
for issue in issues:
ml_feature_list: List[str] = []
if 'created' not in issue or 'id' not in issue:
continue
incident_created_time = datestring_to_timestamp_us(issue.get('created'))
if last_fetch:
if incident_created_time < last_fetch:
continue
incident_name = issue.get('headline') if 'headline' in issue else issue.get('id')
# Mirroring
issue['xsoar_mirroring'] = {
'mirror_direction': mirror_direction,
'mirror_id': issue.get('id'),
'mirror_instance': demisto.integrationInstance(),
'sync_tags': sync_tags
}
issue['xsoar_severity'] = convert_priority_to_xsoar_severity(issue.get('priority', 'Unknown'))
# Handle asset information
issue['assets'], ml_feature_list, _ = client.parse_asset_data(issue, fetch_details)
# add issue specific information to ml key
if (
(provider := issue.get('providers'))
and isinstance(provider, list)
and 'name' in provider[0]
):
ml_feature_list.append(provider[0].get('name'))
if (
(latest_evidence := issue.get('latestEvidence'))
and isinstance(latest_evidence, dict)
):
if (
(geolocation := latest_evidence.get('geolocation'))
and isinstance(geolocation, dict)
):
for f in ['countryCode', 'city']:
if (x := geolocation.get(f)):
ml_feature_list.append(x)
# dedup, sort and join ml feature list
issue['ml_features'] = ' '.join(sorted(list(set(ml_feature_list))))
incident = {
'name': incident_name,
'details': issue.get('helpText'),
'occurred': issue.get('created'),
'rawJSON': json.dumps(issue),
'severity': issue.get('xsoar_severity')
}
latest_issue_id = issue.get('id')
incidents.append(incident)
if incident_created_time > latest_created_time:
latest_created_time = incident_created_time
next_run = {
'last_fetch': latest_created_time,
'last_issue_id': latest_issue_id if latest_issue_id else last_issue_id}
return next_run, incidents
def get_modified_remote_data_command(client: Client, args: Dict[str, Any]) -> GetModifiedRemoteDataResponse:
remote_args = GetModifiedRemoteDataArgs(args)
last_update = remote_args.last_update # In the first run, this value will be set to 1 minute earlier
demisto.debug(f'Performing get-modified-remote-data command. Last update is: {last_update}')
last_update_utc = dateparser.parse(last_update, settings={'TIMEZONE': 'UTC'})
assert last_update_utc is not None, f'could not parse {last_update}'
modified_after = last_update_utc.strftime(DATE_FORMAT)
modified_incidents = client.get_issues(
limit=100,
modified_after=modified_after
)
modified_incident_ids = list()
for raw_incident in modified_incidents:
incident_id = raw_incident.get('id')
modified_incident_ids.append(incident_id)
return GetModifiedRemoteDataResponse(modified_incident_ids)
def get_remote_data_command(client: Client, args: Dict[str, Any], sync_owners: bool = False,
incoming_tags: Optional[List[str]] = [],
mirror_details: bool = False) -> GetRemoteDataResponse:
parsed_args = GetRemoteDataArgs(args)
demisto.debug(f'Performing get-remote-data command with incident id: {parsed_args.remote_incident_id}')
issue_updates: List[Dict[str, Any]] = sorted(
islice(
client.get_issue_updates(
issue_id=parsed_args.remote_incident_id,
limit=MAX_UPDATES,
update_types=None,
created_after=parsed_args.last_update
),
MAX_UPDATES
),
key=lambda k: k.get('created') # type: ignore
)
new_entries: List = []
incident_updates: Dict[str, Any] = {}
latest_comment: Dict[str, Any] = {} # used for closing comment
for update in issue_updates:
update_type = update.get('updateType')
if not update_type or update_type not in ISSUE_UPDATE_TYPES:
demisto.debug('Skipping unknown Expanse incoming update type: {update_type}')
continue
if not (new_value := update.get('value')):
continue
updated_field = ISSUE_UPDATE_TYPES[update_type]
previous_value = update.get('previousValue')
update_user = update['user']['username'] \
if ('user' in update and isinstance(update['user'], dict)
and 'username' in update['user']) else 'Unknown user'
# handle incoming comment
if update_type == 'Comment':
new_entries.append({
'Type': EntryType.NOTE,
'Contents': f'{update_user} added a comment: [{new_value}]',
'ContentsFormat': EntryFormat.TEXT,
'Note': True,
'Tags': incoming_tags
})
latest_comment = update
# handle incoming ownership change
elif update_type == 'Assignee':
incident_updates[updated_field] = new_value
new_entries.append({
'Type': EntryType.NOTE,
'Contents': f'Mirroring: {update_user} changed assignee from [{previous_value}] to [{new_value}]',
'ContentsFormat': EntryFormat.TEXT,
'Note': False
})
if not sync_owners:
continue
# handle unassignment
if new_value == 'Unassigned':
incident_updates['xsoar_owner'] = ''
continue
# new user assignment
user_info = demisto.findUser(email=new_value)
if user_info:
incident_updates['xsoar_owner'] = user_info.get('username')
else:
demisto.debug(f'The user assigned to Expanse incident {parsed_args.remote_incident_id} [{new_value}]'
f'is not registered on XSOAR, cannot change owner')
elif update_type == 'ProgressStatus':
# handle issue closure
if previous_value not in ISSUE_PROGRESS_STATUS_CLOSED and new_value in ISSUE_PROGRESS_STATUS_CLOSED:
close_reason = EXPANSE_RESOLVEDSTATUS_TO_XSOAR.get(new_value, 'Other')
resolve_comment = latest_comment.get('value', '')
demisto.debug(f'Closing Expanse issue {parsed_args.remote_incident_id}')
new_entries.append({
'Type': EntryType.NOTE,
'Contents': {
'dbotIncidentClose': True,
'closeReason': close_reason,
'closeNotes': resolve_comment,
},
'ContentsFormat': EntryFormat.JSON,
})
incident_updates['closeReason'] = close_reason
incident_updates['closeNotes'] = resolve_comment
# handle issue reopening
elif previous_value in ISSUE_PROGRESS_STATUS_CLOSED and new_value not in ISSUE_PROGRESS_STATUS_CLOSED:
demisto.debug(f'Reopening Expanse issue {parsed_args.remote_incident_id}')
new_entries.append({
'Type': EntryType.NOTE,
'Contents': {
'dbotIncidentReopen': True,
},
'ContentsFormat': EntryFormat.JSON,
})
incident_updates['closeReason'] = None
incident_updates['closeNotes'] = None
incident_updates[updated_field] = new_value
new_entries.append({
'Type': EntryType.NOTE,
'Contents': f'Mirroring: {update_user} updated field [{updated_field}] from [{previous_value}] to [{new_value}]',
'ContentsFormat': EntryFormat.TEXT,
'Note': False
})
# handle everything else
else:
incident_updates[updated_field] = new_value
if update_type == 'Priority':
incident_updates['xsoar_severity'] = convert_priority_to_xsoar_severity(new_value)
new_entries.append({
'Type': EntryType.NOTE,
'Contents': f'Mirroring: {update_user} updated field [{updated_field}] from [{previous_value}] to [{new_value}]',
'ContentsFormat': EntryFormat.TEXT,
'Note': False
})
# update_assets
if mirror_details:
issue_details: Dict[str, Any] = client.get_issue_by_id(issue_id=parsed_args.remote_incident_id)
assets, ml_feature_list, changed = client.parse_asset_data(issue_details, mirror_details)
if changed:
incident_updates['assets'] = assets
# dedup, sort and join ml feature list
incident_updates['ml_features'] = ' '.join(sorted(set(ml_feature_list)))
# process incident updates only if anything has changed
if len(incident_updates) > 0 or len(new_entries) > 0:
incident_updates['id'] = parsed_args.remote_incident_id
return GetRemoteDataResponse(incident_updates, new_entries)
def update_remote_system_command(client: Client, args: Dict[str, Any], sync_owners: bool = False) -> str:
remote_args = UpdateRemoteSystemArgs(args)
remote_incident_id = remote_args.remote_incident_id
entries: List = remote_args.entries if remote_args.entries else []
for e in entries:
if 'contents' in e and 'category' in e and e.get('category') == 'chat':
client.update_issue(
issue_id=remote_incident_id,
update_type='Comment',
value=e.get('contents')
)
if remote_args.delta and remote_args.incident_changed:
delta = remote_args.delta
# handle ownership change
if sync_owners and 'owner' in delta:
owner_email: Optional[str] = None
owner_user = delta.get('owner')
if owner_user:
user_info = demisto.findUser(username=owner_user)
if user_info and isinstance(user_info, dict) and 'email' in user_info:
owner_email = user_info.get('email')
if owner_email: # change the owner in Expanse only if the XSOAR user has a valid email
client.update_issue(
issue_id=remote_incident_id,
update_type='Assignee',
value=owner_email
)
else: # change to unassigned
client.update_issue(
issue_id=remote_incident_id,
update_type='Assignee',
value='Unassigned'
)
# handle severity
if 'severity' in delta and delta.get('severity') in SEVERITY_PRIORITY_MAP:
client.update_issue(
issue_id=remote_incident_id,
update_type='Priority',
value=SEVERITY_PRIORITY_MAP[delta.get('severity')]
)
# handle issue closing
if remote_args.inc_status == 2:
close_reason = remote_args.data.get('closeReason', None)
close_notes = remote_args.data.get('closeNotes', None)
close_reason_comment: str = f'XSOAR Incident Close Reason: {close_reason}\n' if close_reason else ''
close_notes_comment: str = f'XSOAR Incident Close Notes: {close_notes}\n' if close_notes else ''
client.update_issue(
issue_id=remote_incident_id,
update_type='Comment',
value=f'Issue closed in Cortex XSOAR.\n{close_reason_comment}{close_notes_comment}'
)
client.update_issue(
issue_id=remote_incident_id,
update_type='ProgressStatus',
value='Resolved'
)
# handle Progress Status change
elif 'expanseprogressstatus' in delta and delta.get('expanseprogressstatus') in ISSUE_PROGRESS_STATUS:
client.update_issue(
issue_id=remote_incident_id,
update_type='ProgressStatus',
value=delta.get('expanseprogressstatus')
)
return remote_incident_id
def list_businessunits_command(client: Client, args: Dict[str, Any]) -> CommandResults:
total_results, max_page_size = calculate_limits(args.get('limit'))
outputs = list(
islice(client.list_businessunits(limit=max_page_size), total_results)
)
return CommandResults(
outputs_prefix="Expanse.BusinessUnit",
outputs_key_field="id",
outputs=outputs if len(outputs) > 0 else None,
readable_output="## No Business Units found" if len(outputs) == 0 else None
)
def list_providers_command(client: Client, args: Dict[str, Any]) -> CommandResults:
total_results, max_page_size = calculate_limits(args.get('limit'))
outputs = list(
islice(client.list_providers(limit=max_page_size), total_results)
)
return CommandResults(
outputs_prefix="Expanse.Provider",
outputs_key_field="id",
outputs=outputs if len(outputs) > 0 else None,
readable_output="## No Providers found" if len(outputs) == 0 else None
)
def list_pocs_command(client: Client, args: Dict[str, Any]) -> CommandResults:
total_results, max_page_size = calculate_limits(args.get('limit'))
outputs = list(
islice(client.list_pocs(limit=max_page_size), total_results)
)
readable_output = tableToMarkdown(
name='Expanse Points of Contact',
t=outputs,
headers=EXPANSE_POC_READABLE_HEADER_LIST,
headerTransform=pascalToSpace
)
return CommandResults(
outputs_prefix="Expanse.PointOfContact",
outputs_key_field="id",
outputs=outputs if len(outputs) > 0 else None,
readable_output="## No Point Of Contacts found" if len(outputs) == 0 else readable_output
)
def create_poc_command(client: Client, args: Dict[str, Any]) -> CommandResults:
email: str = args.get('email', '')
if not email or not re.match(POC_EMAIL_PATTERN, email):
raise ValueError('Point of Contact email needs to be a valid email')
first_name: str = args.get('first_name', '')
if first_name and len(first_name) > 64:
raise ValueError('Point of Contact first_name needs to be less than 64 characters')
last_name: str = args.get('last_name', '')
if last_name and len(last_name) > 64:
raise ValueError('Point of Contact last_name needs to be less than 64 characters')
phone: str = args.get('phone', '')
if phone and not phone.isnumeric():
raise ValueError('Point of Contact phone needs to be a numeric string')
role: str = args.get('role', '')
if role and len(role) > 64:
raise ValueError('Point of Contact role needs to be less than 64 characters')
try:
poc = client.create_poc(email, first_name, last_name, role, phone)
except DemistoException as e:
if str(e).startswith('Error in API call [409]'):
return CommandResults(readable_output='Point of Contact email already exists')
raise e
return CommandResults(
outputs_prefix="Expanse.PointOfContact",
outputs_key_field="id",
outputs=poc,
readable_output=f"New POC created for {email}"
)
def list_tags_command(client: Client, args: Dict[str, Any]) -> CommandResults:
total_results, max_page_size = calculate_limits(args.get('limit'))
outputs = list(
islice(client.list_tags(limit=max_page_size), total_results)
)
return CommandResults(
outputs_prefix="Expanse.Tag",
outputs_key_field="id",
outputs=outputs if len(outputs) > 0 else None,
readable_output="## No Tags found" if len(outputs) == 0 else None
)
def create_tag_command(client: Client, args: Dict[str, Any]) -> CommandResults:
name: str = args.get('name', '')
if not name or len(name) < 1 or len(name) > 127:
raise ValueError('Tag name must be less than 128 characters long')
description: str = args.get('description', '')
if description and len(description) > 511:
raise ValueError('Tag description must be less than 512 characters long')
try:
tag = client.create_tag(name, description)
except DemistoException as e:
if str(e).startswith('Error in API call [409]'):
return CommandResults(readable_output='Tag already exists')
raise e
return CommandResults(
outputs_prefix="Expanse.Tag",
outputs_key_field="id",
outputs=tag
)
def manage_asset_tags_command(client: Client, args: Dict[str, Any]) -> CommandResults:
operation_type = args.get('operation_type')
if operation_type not in ASSET_TAG_OPERATIONS:
raise ValueError(f'Operation type must be one of {",".join(ASSET_TAG_OPERATIONS)}')
asset_type = args.get('asset_type')
if not asset_type or asset_type not in TAGGABLE_ASSET_TYPE_MAP:
raise ValueError(f'Asset type must be one of {",".join(TAGGABLE_ASSET_TYPE_MAP.keys())}')
mapped_asset_type = TAGGABLE_ASSET_TYPE_MAP[asset_type]
asset_id = args.get('asset_id')
if not asset_id:
raise ValueError('Asset id must be provided')
tag_ids = argToList(args.get('tags'))
tag_names = argToList(args.get('tag_names'))
if len(tag_names) > 0:
[tag_ids.append(t['id']) for t in client.list_tags() if t['name'] in tag_names]
tags: List[str] = list(set(tag_ids))
if len(tags) < 1:
raise ValueError('Must provide valid tag IDs or names')
client.manage_asset_tags(mapped_asset_type, operation_type, asset_id, tags)
return CommandResults(
readable_output='Operation complete'
)
def manage_asset_pocs_command(client: Client, args: Dict[str, Any]) -> CommandResults:
operation_type = args.get('operation_type')
if operation_type not in ASSET_POC_OPERATIONS:
raise ValueError(f'Operation type must be one of {",".join(ASSET_POC_OPERATIONS)}')
asset_type = args.get('asset_type')
if not asset_type or asset_type not in TAGGABLE_ASSET_TYPE_MAP:
raise ValueError(f'Asset type must be one of {",".join(TAGGABLE_ASSET_TYPE_MAP.keys())}')
mapped_asset_type = TAGGABLE_ASSET_TYPE_MAP[asset_type]
asset_id = args.get('asset_id')
if not asset_id:
raise ValueError('Asset id must be provided')
poc_ids = argToList(args.get('pocs'))
poc_emails = argToList(args.get('poc_emails'))
if len(poc_emails) > 0:
for p in client.list_pocs():
if p.get('email') in poc_emails:
poc_ids.append(p['id'])
pocs: List[str] = list(set(poc_ids))
if len(pocs) < 1:
raise ValueError('Must provide valid Point of Contact IDs or emails')
client.manage_asset_pocs(mapped_asset_type, operation_type, asset_id, pocs)
return CommandResults(
readable_output=f'Operation complete ({operation_type} {poc_emails or poc_ids} to {asset_id})'
)
def get_iprange_command(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
include = handle_iprange_include(args.pop('include', None), 'include')
id_: Optional[str] = args.pop('id', None)
if id_ is not None and len(args) != 0:
raise ValueError("You can only use [id] only with [include] parameter")
total_results, max_page_size = calculate_limits(args.get('limit'))
outputs: List[Dict[str, Any]]
if id_ is not None:
outputs = [client.get_iprange_by_id(id_, include)]
else:
params: Dict = {
"include": include,
"limit": max_page_size
}
business_units = argToList(args.get('business_units'))
if len(business_units) != 0:
params['business-units'] = ','.join(business_units)
business_unit_names = argToList(args.get('business_unit_names'))
if len(business_unit_names) != 0:
params['business-unit-names'] = ','.join(business_unit_names)
inet = args.get('inet')
if inet is not None:
params['inet'] = inet
tags = argToList(args.get('tags'))
if len(tags) != 0:
params['tags'] = ','.join(tags)
tag_names = argToList(args.get('tag_names'))
if len(tag_names) != 0:
params['tag-names'] = ','.join(tag_names)
outputs = list(
islice(
client.get_ipranges(params=params),
total_results
)
)
return (format_cidr_data(outputs))
def get_domain_command(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
domain: Optional[str] = args.pop('domain', None)
last_observed_date: Optional[str] = args.pop('last_observed_date', None)
if domain is not None and len(args) != 0:
raise ValueError("The only argument allowed with domain is last_observed_date")
total_results, max_page_size = calculate_limits(args.get('limit'))
if domain is not None:
output = client.get_domain_by_domain(domain=domain, last_observed_date=last_observed_date)
if output and isinstance(output, dict) and 'domain' not in output:
output['domain'] = domain
return format_domain_data([output])
params: Dict[str, Any] = {
"limit": max_page_size
}
domain_search: Optional[str] = args.get('search')
if domain_search is not None:
params['domainSearch'] = domain_search
provider_id = argToList(args.get('providers'))
if len(provider_id) > 0:
params['providerId'] = ','.join(provider_id)
provider_name = argToList(args.get('provider_names'))
if len(provider_name) > 0:
params['providerName'] = ','.join(provider_name)
business_unit_id = argToList(args.get('business_units'))
if len(business_unit_id) > 0:
params['businessUnitId'] = ','.join(business_unit_id)
business_unit_name = argToList(args.get('business_unit_names'))
if len(business_unit_name) > 0:
params['businessUnitName'] = ','.join(business_unit_name)
tag_id = argToList(args.get('tags'))
if len(tag_id) > 0:
params['tagId'] = ','.join(tag_id)
tag_name = argToList(args.get('tag_names'))
if len(tag_name) > 0:
params['tagName'] = ','.join(tag_name)
dns_resolution_status = args.get('has_dns_resolution')
if dns_resolution_status is not None:
params['dnsResolutionStatus'] = "HAS_DNS_RESOLUTION" if argToBoolean(
dns_resolution_status) else "NO_DNS_RESOLUTION"
service_status = args.get('has_active_service')
if service_status is not None:
params['serviceStatus'] = "HAS_ACTIVE_SERVICE" if argToBoolean(service_status) else "NO_ACTIVE_SERVICE"
has_related_cloud_resources = args.get('has_related_cloud_resources')
if has_related_cloud_resources is not None:
params['hasRelatedCloudResources'] = "true" if argToBoolean(has_related_cloud_resources) else "false"
if last_observed_date is not None:
params['minLastObservedDate'] = last_observed_date
domain_data = list(
islice(
client.get_domains(params=params),
total_results
)
)
return format_domain_data(domain_data)
def get_cloud_resource_command(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
asset_id: Optional[str] = args.pop('id', None)
if asset_id is not None:
output = client.get_cloud_resource(asset_id=asset_id)
return format_cloud_resource_data([output])
total_results, max_page_size = calculate_limits(args.get('limit'))
params: Dict[str, Any] = {
"limit": max_page_size
}
domain_search: Optional[str] = args.get('domain')
if domain_search is not None:
params['domainSearch'] = domain_search
ip_search: Optional[str] = args.get('ip')
if ip_search is not None:
params['inetSearch'] = ip_search
provider_id = argToList(args.get('providers'))
if len(provider_id) > 0:
params['providerId'] = ','.join(provider_id)
provider_name = argToList(args.get('provider_names'))
if len(provider_name) > 0:
params['providerName'] = ','.join(provider_name)
business_unit_id = argToList(args.get('business_units'))
if len(business_unit_id) > 0:
params['businessUnitId'] = ','.join(business_unit_id)
business_unit_name = argToList(args.get('business_unit_names'))
if len(business_unit_name) > 0:
params['businessUnitName'] = ','.join(business_unit_name)
tag_id = argToList(args.get('tags'))
if len(tag_id) > 0:
params['tagId'] = ','.join(tag_id)
tag_name = argToList(args.get('tag_names'))
if len(tag_name) > 0:
params['tagName'] = ','.join(tag_name)
type_search = argToList(args.get('types'))
if len(type_search) > 0:
params['type'] = ','.join(type_search)
region_search = argToList(args.get('regions'))
if len(region_search) > 0:
params['region'] = ','.join(region_search)
last_observed_date: Optional[str] = args.pop('last_observed_date', None)
if last_observed_date is not None:
params['minLastObservedDate'] = last_observed_date
cloud_resource_data = list(
islice(
client.get_cloud_resources(params=params),
total_results
)
)
return format_cloud_resource_data(cloud_resource_data)
def get_certificate_command(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
md5_hash: Optional[str] = args.pop('md5_hash', None)
last_observed_date: Optional[str] = args.pop('last_observed_date', None)
if md5_hash is not None and len(args) != 0:
raise ValueError("The only argument allowed with md5_hash is last_observed_date")
total_results, max_page_size = calculate_limits(args.get('limit'))
if md5_hash is not None:
# we try to convert it as hex, and if we fail we trust it's a base64 encoded
# MD5 hash
try:
if len(ba_hash := bytearray.fromhex(md5_hash)) == 16:
md5_hash = base64.urlsafe_b64encode(ba_hash).decode('ascii')
except ValueError:
pass
output = client.get_certificate_by_md5_hash(
md5_hash=md5_hash,
last_observed_date=last_observed_date
)
return format_certificate_data(certificates=[output])
params: Dict[str, Any] = {
"limit": max_page_size
}
cn_search: Optional[str] = args.get('search')
if cn_search is not None:
params['commonNameSearch'] = cn_search
provider_id = argToList(args.get('providers'))
if len(provider_id) > 0:
params['providerId'] = ','.join(provider_id)
provider_name = argToList(args.get('provider_names'))
if len(provider_name) > 0:
params['providerName'] = ','.join(provider_name)
business_unit_id = argToList(args.get('business_units'))
if len(business_unit_id) > 0:
params['businessUnitId'] = ','.join(business_unit_id)
business_unit_name = argToList(args.get('business_unit_names'))
if len(business_unit_name) > 0:
params['businessUnitName'] = ','.join(business_unit_name)
tag_id = argToList(args.get('tags'))
if len(tag_id) > 0:
params['tagId'] = ','.join(tag_id)
tag_name = argToList(args.get('tag_names'))
if len(tag_name) > 0:
params['tagName'] = ','.join(tag_name)
certificate_advertisement_status = args.get('has_certificate_advertisement')
if certificate_advertisement_status is not None:
if argToBoolean(certificate_advertisement_status):
params['certificateAdvertisementStatus'] = "HAS_CERTIFICATE_ADVERTISEMENT"
else:
params['certificateAdvertisementStatus'] = "NO_CERTIFICATE_ADVERTISEMENT"
service_status = args.get('has_active_service')
if service_status is not None:
params['serviceStatus'] = "HAS_ACTIVE_SERVICE" if argToBoolean(service_status) else "NO_ACTIVE_SERVICE"
has_related_cloud_resources = args.get('has_related_cloud_resources')
if has_related_cloud_resources is not None:
params['hasRelatedCloudResources'] = "true" if argToBoolean(has_related_cloud_resources) else "false"
if last_observed_date is not None:
params['minLastObservedDate'] = last_observed_date
cert_data = list(
islice(
client.get_certificates(params=params),
total_results
)
)
return format_certificate_data(certificates=cert_data)
def get_associated_domains_command(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
cn_search = args.get('common_name')
ip_search = args.get('ip')
command_results = []
if ip_search is not None and cn_search is not None:
raise ValueError("only one of common_name and ip arguments should be specified")
if cn_search is None and ip_search is None:
raise ValueError("one of common_name or ip arguments should be specified")
max_certificates, certificates_max_page_size = calculate_limits(args.get('limit'))
max_domains, domains_max_page_size = calculate_limits(args.get('domains_limit'))
ips_base_params: Dict[str, Any] = {
"limit": domains_max_page_size
}
certificates_search_params = {
"commonNameSearch": cn_search,
"limit": certificates_max_page_size
}
matching_domains: Dict[str, Dict[str, Any]] = {}
ips_to_query: Dict[str, Set[str]] = defaultdict(set)
if ip_search is not None:
ips_to_query[ip_search].clear() # create an empty set
if cn_search is not None:
certificates = islice(client.get_certificates(certificates_search_params), max_certificates)
for certificate in certificates:
md5_hash = certificate.get('certificate', {}).get('md5Hash')
if md5_hash is None:
continue
certificate_details = client.get_certificate_by_md5_hash(md5_hash=md5_hash)
if certificate_details is None:
continue
for recent_ip in certificate_details.get('details', {}).get('recentIps', []):
ip_address = recent_ip.get('ip')
if ip_address is None:
continue
ips_to_query[ip_address].add(md5_hash)
for ip2q in ips_to_query.keys():
ips_search_params: Dict[str, Any] = {
'inetSearch': ip2q,
'assetType': 'DOMAIN'
}
ips_search_params.update(ips_base_params)
for ipdomain in islice(client.get_ips(ips_search_params), max_domains):
if (domain := ipdomain.get('domain')) is None:
continue
if domain not in matching_domains:
matching_domains[domain] = {
'name': domain,
'IP': [],
'certificate': []
}
matching_domains[domain]['IP'].append(ip2q)
matching_domains[domain]['certificate'].extend(list(ips_to_query[ip2q]))
readable_output = tableToMarkdown(
f"Expanse Domains matching Certificate Common Name: {cn_search}",
list(matching_domains.values()) if len(matching_domains) > 0 else "## No Domains found",
headers=['name', 'IP', 'certificate']
)
for d in matching_domains.keys():
indicator = Common.Domain(d, Common.DBotScore(d, DBotScoreType.DOMAIN, "ExpanseV2", Common.DBotScore.NONE))
command_results.append(CommandResults(
readable_output=tableToMarkdown("New Domain indicator was found.", indicator.to_context()),
indicator=indicator
))
command_results.append(CommandResults(
readable_output=readable_output,
outputs_prefix='Expanse.AssociatedDomain',
outputs_key_field='name',
outputs=list(
matching_domains.values()) if len(matching_domains) > 0 else None, ))
return command_results
def certificate_command(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
hashes = argToList(args.get('certificate'))
if len(hashes) == 0:
raise ValueError('certificate hash(es) not specified')
set_expanse_fields = argToBoolean(args.get('set_expanse_fields', 'true'))
if len(hashes) > MAX_RESULTS:
hashes = hashes[:MAX_RESULTS]
certificate_data: List[Dict[str, Any]] = []
for curr_hash in hashes:
# we try to convert it as hex, and if we fail we trust it's a base64 encoded
try:
ba_hash = bytearray.fromhex(curr_hash)
if len(ba_hash) == 16:
# MD5 hash
curr_hash = base64.urlsafe_b64encode(ba_hash).decode('ascii')
else: # maybe a different hash? let's look for an indicator with a corresponding hash
result_hash = find_indicator_md5_by_hash(ba_hash.hex())
if result_hash is None:
continue
curr_hash = base64.urlsafe_b64encode(bytearray.fromhex(result_hash)).decode('ascii')
except ValueError:
pass
c = client.get_certificate_by_md5_hash(md5_hash=curr_hash)
if not c or not isinstance(c, dict):
continue
certificate_data.append(c)
result = format_certificate_data(certificate_data)
# XXX - this is a workaround to the lack of the possibility of extending mapper
# of standard Indicator Types. We need to call createIndicator to set custom fields
if not set_expanse_fields or (result and result[0].outputs is None):
return result
indicators: List[Dict[str, Any]] = []
result_outputs = cast(List[Dict[str, Any]], result[0].outputs) # we keep mypy happy
for certificate in result_outputs:
ec_sha256 = certificate.get('certificate', {}).get('pemSha256')
if ec_sha256 is None:
continue
indicator_value = base64.urlsafe_b64decode(ec_sha256).hex()
if find_indicator_md5_by_hash(indicator_value) is None:
demisto.debug(f'Update: Indicator {indicator_value} not found')
continue
annotations = certificate.get('annotations', {})
tags = []
if 'tags' in annotations:
tags = [tag['name'] for tag in annotations['tags']]
provider_name: Optional[str] = None
providers = certificate.get('providers')
if isinstance(providers, list) and len(providers) > 0:
provider_name = providers[0].get('name')
tenant_name: Optional[str] = None
tenant = certificate.get('tenant')
if tenant is not None:
tenant_name = tenant.get('name')
business_unit_names: List[str] = []
business_units = certificate.get("businessUnits", [])
for bu in business_units:
if 'name' not in bu:
continue
business_unit_names.append(bu['name'])
indicator: Dict[str, Any] = {
'type': 'Certificate',
'value': indicator_value,
'score': Common.DBotScore.NONE,
'source': 'ExpanseV2',
'fields': {
'expansedateadded': certificate.get('dateAdded'),
'expansefirstobserved': certificate.get('firstObserved'),
'firstseenbysource': certificate.get('firstObserved'),
'expanselastobserved': certificate.get('lastObserved'),
'lastseenbysource': certificate.get('lastObserved'),
'expansecertificateadvertisementstatus': certificate.get('certificateAdvertisementStatus'),
'expansetags': tags,
'expanseproperties': '\n'.join(certificate.get('properties', [])),
'expanseservicestatus': certificate.get('serviceStatus'),
'expanseprovidername': provider_name,
'expansetenantname': tenant_name,
'expansebusinessunits': business_unit_names
}
}
indicators.append(indicator)
demisto.createIndicators(indicators)
return result
def domain_command(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
# XXX - implement feed related indicators using recentIPs
domains = argToList(args.get('domain'))
if len(domains) == 0:
raise ValueError('domain(s) not specified')
# trim down the list to the max number of supported results
if len(domains) > MAX_RESULTS:
domains = domains[:MAX_RESULTS]
domain_data: List[Dict[str, Any]] = []
for domain in domains:
d = client.get_domain_by_domain(domain=domain)
if not d or not isinstance(d, dict):
continue
if 'domain' not in d:
d['domain'] = domain
domain_data.append(d)
return format_domain_data(domain_data)
def ip_command(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
ips = argToList(args.get('ip'))
if len(ips) == 0:
raise ValueError('ip(s) not specified')
# trim down the list to the max number of supported results
if len(ips) > MAX_RESULTS:
ips = ips[:MAX_RESULTS]
ip_data_list: List[Dict[str, Any]] = []
command_results = []
for ip in ips:
ip_data = next(client.get_ips(params={'inetSearch': f"{ip}", "limit": 1}), None)
if ip_data is None:
# If we don't get anything back from the ips endpoint, we can return
# details from IP Ranges
ip_data = next(client.get_ipranges(params={'inet': f"{ip}", "limit": 1}), None)
if ip_data is None:
continue
ip_data['ip'] = ip
ip_standard_context = Common.IP(
ip=ip,
dbot_score=Common.DBotScore(
indicator=ip,
indicator_type=DBotScoreType.IP,
integration_name="ExpanseV2",
score=Common.DBotScore.NONE,
reliability=demisto.params().get('integrationReliability')
),
hostname=ip_data.get('domain', None)
)
command_results.append(CommandResults(
readable_output=tableToMarkdown("New IP indicator was found", {"IP": ip, "Domain": ip_data.get('domain')}),
indicator=ip_standard_context
))
ip_context_excluded_fields: List[str] = []
ip_data_list.append({
k: ip_data[k]
for k in ip_data if k not in ip_context_excluded_fields
})
readable_output = tableToMarkdown(
'Expanse IP List', ip_data_list) if len(ip_data_list) > 0 else "## No IPs found"
command_results.append(CommandResults(
readable_output=readable_output,
outputs_prefix='Expanse.IP',
outputs_key_field=['ip', 'type', 'assetKey', 'assetType'],
outputs=ip_data_list if len(ip_data_list) > 0 else None,
))
return command_results
def cidr_command(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
cidrs = argToList(args.get('cidr'))
if len(cidrs) == 0:
raise ValueError('cidr(s) not specified')
# trim down the list to the max number of supported results
if len(cidrs) > MAX_RESULTS:
cidrs = cidrs[:MAX_RESULTS]
include = handle_iprange_include(args.get('include'), 'include')
cidr_data: List[Dict[str, Any]] = []
for cidr in cidrs:
c = next(client.get_ipranges(params={'inet': cidr, 'include': include, 'limit': 1}), None)
if not c or not isinstance(c, dict):
continue
cidr_data.append(c)
return format_cidr_data(cidr_data)
def list_risk_rules_command(client: Client, args: Dict[str, Any]):
raise DeprecatedCommandException()
def get_risky_flows_command(client: Client, args: Dict[str, Any]):
raise DeprecatedCommandException()
def domains_for_certificate_command(client: Client, args: Dict[str, Any]) -> CommandResults:
"""
Returns all domains that have resolved to IP addresses a certificate has been seen on. There is no direct way to
correlate between certificates and domains in Expanse this does so indirectly.
"""
search = args.get('common_name')
params = {
"commonNameSearch": search
}
matching_domains = [] # type:ignore
certificates_iterator = client.get_certificates(params=params)
certificates = [certificate for certificate in certificates_iterator]
for certificate in certificates:
certificate_details = client.get_certificate_by_md5_hash(
md5_hash=certificate.get('certificate', {}).get('md5Hash'))
for ip in certificate_details.get('details', {}).get('recentIps', []):
params = {
'inetSearch': ip.get('ip'),
'assetType': 'DOMAIN'
}
matching_domains += client.get_ips(params=params)
if len(matching_domains) == 0:
return CommandResults(readable_output="No data found")
context = get_expanse_certificate_to_domain_context(common_name=search, data=matching_domains)
context_copy = context.copy()
del context_copy['DomainList'] # Remove full objects from human readable response
human_readable = tableToMarkdown("Expanse Domains matching Certificate Common Name: {search}".format(search=search),
context_copy)
return CommandResults(
outputs_prefix="Expanse.IPDomains",
readable_output=human_readable,
raw_response=matching_domains,
outputs_key_field="SearchTerm",
outputs=context,
)
def get_expanse_certificate_to_domain_context(common_name, data):
"""
Provides custom context information for domains looked up via certificate.
:param common_name: The original search parameter
:param data: The data returned from the API query
:return: A dict of aggregated domain details
"""
return {
"SearchTerm": common_name,
"TotalDomainCount": len(data),
"FlatDomainList": [domain.get('domain') for domain in data],
"DomainList": data
}
""" MAIN FUNCTION """
def main() -> None:
"""main function, parses params and runs command functions
"""
params = demisto.params()
args = demisto.args()
command = demisto.command()
api_key = params.get("apikey")
base_url = urljoin(params.get("url", "").rstrip("/"), "/api")
verify_certificate = not params.get("insecure", False)
proxy = params.get("proxy", False)
try:
client = Client(
api_key=api_key, base_url=base_url, verify=verify_certificate, proxy=proxy
)
client.authenticate()
if command == "test-module":
result = test_module(client)
return_results(result)
elif command == 'fetch-incidents':
max_incidents = check_int(arg=params.get('max_fetch'), arg_name='max_fetch',
min_val=None, max_val=None, required=False)
if not max_incidents or max_incidents > MAX_INCIDENTS:
max_incidents = MAX_INCIDENTS
ff = params.get('first_fetch', DEFAULT_FIRST_FETCH)
if not ff:
raise ValueError('firstFetch not specified')
first_fetch = datestring_to_timestamp_us(ff)
priority = params.get('priority')
activity_status = params.get('activity_status')
progress_status = params.get('progress_status')
business_units = argToList(params.get('business_unit'))
issue_types = argToList(params.get('issue_type'))
tags = argToList(params.get('tag'))
cloud_management_status = params.get('cloud_management_status')
sync_tags = argToList(params.get('sync_tags'))
fetch_details: bool = True # forced to True to retrieve the proper asset IDs
mirror_direction = MIRROR_DIRECTION.get(params.get('mirror_direction', 'None'))
next_run, incidents = fetch_incidents(
client=client,
max_incidents=max_incidents,
last_run=demisto.getLastRun(),
first_fetch=first_fetch,
priority=priority,
activity_status=activity_status,
progress_status=progress_status,
business_units=business_units,
tags=tags,
cloud_management_status=cloud_management_status,
issue_types=issue_types,
mirror_direction=mirror_direction,
sync_tags=sync_tags,
fetch_details=fetch_details
)
demisto.setLastRun(next_run)
demisto.incidents(incidents)
# To be compatible with 6.1
elif command == "get-modified-remote-data":
return_results(get_modified_remote_data_command(client, args))
elif command == "get-remote-data":
sync_owners = argToBoolean(params.get('sync_owners'))
# XXX: mirror_details forced to be disabled to reduce API calls in the backend.
# Will be reviewed in next versions to use XSOAR 6.1 mirroring enhancements.
mirror_details = False
# mirror_details = argToBoolean(params.get('mirror_details'))
incoming_tags = argToList(params.get('incoming_tags'))
return_results(get_remote_data_command(client, args, sync_owners, incoming_tags, mirror_details))
elif command == "update-remote-system":
sync_owners = argToBoolean(params.get('sync_owners'))
return_results(update_remote_system_command(client, args, sync_owners))
elif command == "expanse-get-issues":
return_results(get_issues_command(client, args))
elif command == "expanse-get-issue":
return_results(get_issue_command(client, args))
elif command == "expanse-get-issue-updates":
return_results(get_issue_updates_command(client, args))
elif command == "expanse-update-issue":
return_results(update_issue_command(client, args))
elif command == "expanse-get-issue-comments":
return_results(get_issue_comments_command(client, args))
elif command == "expanse-list-businessunits":
return_results(list_businessunits_command(client, args))
elif command == "expanse-list-providers":
return_results(list_providers_command(client, args))
elif command == "expanse-list-tags":
return_results(list_tags_command(client, args))
elif command == "expanse-get-iprange":
return_results(get_iprange_command(client, args))
elif command == "expanse-create-tag":
return_results(create_tag_command(client, args))
elif command == "expanse-assign-tags-to-asset":
args['operation_type'] = 'ASSIGN'
return_results(manage_asset_tags_command(client, args))
elif command == "expanse-unassign-tags-from-asset":
args['operation_type'] = 'UNASSIGN'
return_results(manage_asset_tags_command(client, args))
elif command == "expanse-assign-tags-to-iprange":
args['operation_type'] = 'ASSIGN'
args['asset_type'] = 'IpRange'
return_results(manage_asset_tags_command(client, args))
elif command == "expanse-unassign-tags-from-iprange":
args['operation_type'] = 'UNASSIGN'
args['asset_type'] = 'IpRange'
return_results(manage_asset_tags_command(client, args))
elif command == "expanse-assign-tags-to-certificate":
args['operation_type'] = 'ASSIGN'
args['asset_type'] = 'Certificate'
return_results(manage_asset_tags_command(client, args))
elif command == "expanse-unassign-tags-from-certificate":
args['operation_type'] = 'UNASSIGN'
args['asset_type'] = 'Certificate'
return_results(manage_asset_tags_command(client, args))
elif command == "expanse-assign-tags-to-domain":
args['operation_type'] = 'ASSIGN'
args['asset_type'] = 'Domain'
return_results(manage_asset_tags_command(client, args))
elif command == "expanse-unassign-tags-from-domain":
args['operation_type'] = 'UNASSIGN'
args['asset_type'] = 'Domain'
return_results(manage_asset_tags_command(client, args))
elif command == "expanse-get-domain":
return_results(get_domain_command(client, args))
elif command == "expanse-get-certificate":
return_results(get_certificate_command(client, args))
elif command == "expanse-get-associated-domains":
return_results(get_associated_domains_command(client, args))
elif command == "certificate":
return_results(certificate_command(client, args))
elif command == "domain":
return_results(domain_command(client, args))
elif command == "ip":
return_results(ip_command(client, args))
elif command == "cidr":
return_results(cidr_command(client, args))
elif command == "expanse-get-cloud-resources":
return_results(get_cloud_resource_command(client, args))
elif command == "expanse-get-cloud-resource":
return_results(get_cloud_resource_command(client, args))
elif command == "expanse-get-risky-flows":
get_risky_flows_command(client, args) # deprecated
elif command == "expanse-list-risk-rules":
list_risk_rules_command(client, args) # deprecated
elif command == "expanse-get-services":
return_results(get_services_command(client, args))
elif command == "expanse-get-service":
return_results(get_service_command(client, args))
elif command == "expanse-list-pocs":
return_results(list_pocs_command(client, args))
elif command == "expanse-create-poc":
return_results(create_poc_command(client, args))
elif command == "expanse-assign-pocs-to-asset":
args['operation_type'] = 'ASSIGN'
return_results(manage_asset_pocs_command(client, args))
elif command == "expanse-unassign-pocs-from-asset":
args['operation_type'] = 'UNASSIGN'
return_results(manage_asset_pocs_command(client, args))
elif command == "expanse-assign-pocs-to-iprange":
args['operation_type'] = 'ASSIGN'
args['asset_type'] = 'IpRange'
return_results(manage_asset_pocs_command(client, args))
elif command == "expanse-unassign-pocs-from-iprange":
args['operation_type'] = 'UNASSIGN'
args['asset_type'] = 'IpRange'
return_results(manage_asset_pocs_command(client, args))
elif command == "expanse-assign-pocs-to-certificate":
args['operation_type'] = 'ASSIGN'
args['asset_type'] = 'Certificate'
return_results(manage_asset_pocs_command(client, args))
elif command == "expanse-unassign-pocs-from-certificate":
args['operation_type'] = 'UNASSIGN'
args['asset_type'] = 'Certificate'
return_results(manage_asset_pocs_command(client, args))
elif command == "expanse-assign-pocs-to-domain":
args['operation_type'] = 'ASSIGN'
args['asset_type'] = 'Domain'
return_results(manage_asset_pocs_command(client, args))
elif command == "expanse-unassign-pocs-from-domain":
args['operation_type'] = 'UNASSIGN'
args['asset_type'] = 'Domain'
return_results(manage_asset_pocs_command(client, args))
elif command == 'expanse-get-domains-for-certificate':
return_results(domains_for_certificate_command(client, args))
else:
raise NotImplementedError(f'Command {command} is not implemented.')
# Log exceptions and return errors
except Exception as e:
# To be compatible with 6.1
if 'not implemented' in str(e):
raise e
return_error(
f"Failed to execute {command} command.\nError:\n{str(e)}"
)
""" ENTRY POINT """
if __name__ in ("__main__", "__builtin__", "builtins"):
main()
| mit | 9779ed37bf99925da08d5629198ade5f | 38.543734 | 129 | 0.594256 | 3.915373 | false | false | false | false |
demisto/content | Packs/CiscoEmailSecurity/Integrations/CiscoEmailSecurity/CiscoEmailSecurity.py | 2 | 27604 | import demistomock as demisto
from CommonServerPython import *
import urllib3
from typing import Any, Dict
# Disable insecure warnings
urllib3.disable_warnings()
MAX_MESSAGES_TO_GET = 20
class Client(BaseClient):
def __init__(self, params):
self.username = params.get('credentials').get('identifier')
self.password = params.get('credentials').get('password')
self.timeout = int(params.get('timeout'))
super().__init__(base_url=params.get('base_url'), verify=not params.get('insecure', False),
ok_codes=tuple(), proxy=params.get('proxy', False))
self._jwt_token = self._generate_jwt_token()
self._headers = {
'Content-Type': 'application/json',
'Accept': 'application/json',
'jwtToken': self._jwt_token
}
def _generate_jwt_token(self) -> str:
headers = {
'Content-Type': 'application/json',
'Accept': 'application/json'
}
data = {
"data":
{
"userName": base64.b64encode(self.username.encode('ascii')).decode('utf-8'),
"passphrase": base64.b64encode(self.password.encode('ascii')).decode('utf-8')
}
}
response_token = self._http_request('POST', '/sma/api/v2.0/login', json_data=data, headers=headers)
jwt_token = response_token.get('data').get('jwtToken')
return jwt_token
def list_report(self, url_params) -> Dict[str, Any]:
return self._http_request(
method='GET',
url_suffix='/sma/api/v2.0/reporting' + url_params,
timeout=self.timeout
)
def list_messages(self, url_params) -> Dict[str, Any]:
return self._http_request(
method='GET',
url_suffix='/sma/api/v2.0/message-tracking/messages' + url_params,
timeout=self.timeout
)
def list_get_message_details(self, url_params):
return self._http_request(
method='GET',
url_suffix='/sma/api/v2.0/message-tracking/details' + url_params,
timeout=self.timeout
)
def list_get_dlp_details(self, url_params):
return self._http_request(
method='GET',
url_suffix='/sma/api/v2.0/message-tracking/dlp-details' + url_params,
timeout=self.timeout
)
def list_get_amp_details(self, url_params):
return self._http_request(
method='GET',
url_suffix='/sma/api/v2.0/message-tracking/amp-details' + url_params,
timeout=self.timeout
)
def list_get_url_details(self, url_params):
return self._http_request(
method='GET',
url_suffix='/sma/api/v2.0/message-tracking/url-details' + url_params,
timeout=self.timeout
)
def list_spam_quarantine(self, url_params):
return self._http_request(
method='GET',
url_suffix='/sma/api/v2.0/quarantine/messages' + url_params,
timeout=self.timeout
)
def list_quarantine_get_details(self, message_id):
return self._http_request(
method='GET',
url_suffix=f'/sma/api/v2.0/quarantine/messages/details?mid={message_id}&quarantineType=spam',
timeout=self.timeout
)
def list_delete_quarantine_messages(self, request_body):
return self._http_request(
method='DELETE',
url_suffix='/sma/api/v2.0/quarantine/messages',
json_data=request_body,
timeout=self.timeout
)
def list_release_quarantine_messages(self, request_body):
return self._http_request(
method='POST',
url_suffix='/sma/api/v2.0/quarantine/messages',
json_data=request_body,
timeout=self.timeout
)
def list_entries_get(self, url_params, list_type):
return self._http_request(
method='GET',
url_suffix=f"/sma/api/v2.0/quarantine/{list_type}" + url_params,
timeout=self.timeout
)
def list_entries_add(self, list_type, request_body):
return self._http_request(
method='POST',
url_suffix=f"/sma/api/v2.0/quarantine/{list_type}",
json_data=request_body,
timeout=self.timeout
)
def list_entries_delete(self, list_type, request_body):
return self._http_request(
method='DELETE',
url_suffix=f"/sma/api/v2.0/quarantine/{list_type}",
json_data=request_body,
timeout=self.timeout
)
def parse_dates_to_ces_format(date_str):
splitted_date = date_str.split('T')
date_day = splitted_date[0]
full_date_time = splitted_date[1]
date_time_without_ms = full_date_time.split('.')[0]
date_time_without_seconds = date_time_without_ms[:-3]
date_time_with_zero_seconds = f'{date_time_without_seconds}:00.000Z'
return f'{date_day}T{date_time_with_zero_seconds}'
def get_dates_for_test_module():
now = datetime.now()
start = now - timedelta(days=2)
end = now - timedelta(days=1)
start_date = start.isoformat()
end_date = end.isoformat()
start_date = parse_dates_to_ces_format(start_date)
end_date = parse_dates_to_ces_format(end_date)
return start_date, end_date
def test_module(client: Client) -> str:
start_date, end_date = get_dates_for_test_module()
suffix_url = f"?startDate={start_date}&" \
f"endDate={end_date}&ciscoHost=All_Hosts&searchOption=messages&offset=0&limit=20"
try:
client.list_messages(suffix_url)
except DemistoException as e:
if 'Forbidden' in str(e):
return 'Authorization Error: make sure API Key or Service URL are correctly set'
else:
raise e
return 'ok'
def date_to_cisco_date(date):
"""
This function gets a date and returns it according to the standard of Cisco Email security.
Args:
date: YYYY-MM-DD hh:mm:ss.
Returns:
The date according to the standard of Cisco Email security - YYYY-MM-DDThh:mm:ss.000Z.
"""
return date.replace(' ', 'T') + '.000Z'
def set_limit(limit):
return int(limit) if limit and int(limit) <= MAX_MESSAGES_TO_GET else MAX_MESSAGES_TO_GET
def message_ids_to_list_of_integers(args):
messages_ids = args.get('messages_ids').split(',')
messages_ids = [int(message_id) for message_id in messages_ids]
return messages_ids
def build_url_params_for_list_report(args, report_counter):
start_date = date_to_cisco_date(args.get('start_date'))
end_date = date_to_cisco_date(args.get('end_date'))
device_group_name = args.get('device_group_name')
url_params = f'/{report_counter}?startDate={start_date}&endDate={end_date}&device_type=esa' \
f'&device_group_name={device_group_name}'
return url_params
def set_var_to_output_prefix(counter):
"""
This function gets a variable and returns it according to the standard of outputs prefix.
Args:
counter: report counter - mail_incoming_traffic_summary.
Returns:
The counter according to the standard of outputs prefix - MailIncomingTrafficSummary.
"""
list_counter_words = counter.split('_')
counter_words = ''
for word in list_counter_words:
counter_words += word + ' '
counter_words_capital_letter = counter_words.title()
counter_output_prefix = counter_words_capital_letter.replace(' ', '')
return counter_output_prefix
def list_report_command(client: Client, args: Dict[str, Any]):
counter = args.get('counter')
url_params = build_url_params_for_list_report(args, counter)
report_response_data = client.list_report(url_params)
report_data = report_response_data.get('data', {}).get('resultSet')
counter_output_prefix = set_var_to_output_prefix(counter)
return CommandResults(
readable_output=f'{report_response_data}',
outputs_prefix=f'CiscoEmailSecurity.Report.{counter_output_prefix}',
outputs_key_field=counter_output_prefix,
outputs=report_data
)
def build_url_params_for_list_messages(args):
start_date = date_to_cisco_date(args.get('start_date'))
end_date = date_to_cisco_date(args.get('end_date'))
limit = set_limit(args.get('limit'))
offset = int(args.get('offset', '0'))
url_params = f'?startDate={start_date}&endDate={end_date}&searchOption=messages&ciscoHost=All_Hosts' \
f'&offset={offset}&limit={limit}'
arguments = assign_params(**args)
for key, value in arguments.items():
if key == 'attachment_name_value':
attachment_name_operator = arguments.get('attachment_name_operator', 'is')
url_params += f'&attachmentNameOperator={attachment_name_operator}&attachmentNameValue={value}'
elif key == 'recipient_filter_value':
recipient_operator = arguments.get('recipient_filter_operator', 'is')
url_params += f'&envelopeRecipientfilterOperator={recipient_operator}&envelopeRecipientfilterValue={value}'
elif key == 'sender_filter_value':
sender_filter_operator = arguments.get('sender_filter_operator', 'is')
url_params += f'&envelopeSenderfilterOperator={sender_filter_operator}&envelopeSenderfilterValue={value}'
elif key == 'subject_filter_value':
subject_filter_operator = arguments.get('subject_filter_operator', 'is')
url_params += f'&subjectfilterOperator={subject_filter_operator}&subjectfilterValue={value}'
elif key == 'domain_name_value':
domain_name_operator = arguments.get('domain_name_operator', 'is')
url_params += f'&domainNameOperator={domain_name_operator}&domainNameValue={value}'
elif key == 'spam_positive' and value == 'True':
url_params += f'&spamPositive={argToBoolean(value)}'
elif key == 'quarantined_as_spam' and value == 'True':
url_params += f'&quarantinedAsSpam={argToBoolean(value)}'
elif key == 'virus_positive' and value == 'True':
url_params += f'&virusPositive={argToBoolean(value)}'
elif key == 'contained_malicious_urls' and value == 'True':
url_params += f'&containedMaliciousUrls={argToBoolean(value)}'
elif key == 'contained_neutral_urls' and value == 'True':
url_params += f'&containedNeutralUrls={argToBoolean(value)}'
elif key == 'file_hash':
url_params += f'&fileSha256={value}'
elif key == 'message_id':
url_params += f'&messageIdHeader={int(value)}'
elif key == 'cisco_id':
url_params += f'&ciscoMid={int(value)}'
elif key == 'sender_ip':
url_params += f'&senderIp={value}'
elif key == 'message_direction':
url_params += f'&messageDirection={value}'
elif key == 'quarantine_status':
url_params += f'&quarantineStatus={value}'
elif key == 'url_reputation':
url_params += f'&urlReputation={value}'
elif key == 'macro_file_types_detected':
url_params += f'¯oFileTypesDetected={value}'
return url_params
def messages_to_human_readable(messages):
messages_readable_outputs = []
for message in messages:
readable_output = assign_params(message_id=dict_safe_get(message, ['attributes', 'mid'], None),
cisco_id=dict_safe_get(message, ['attributes', 'icid'], None),
sender=dict_safe_get(message, ['attributes', 'sender'], None),
sender_ip=dict_safe_get(message, ['attributes', 'senderIp'], None),
subject=dict_safe_get(message, ['attributes', 'subject'], None),
serial_number=dict_safe_get(message, ['attributes', 'serialNumber'], None),
timestamp=dict_safe_get(message, ['attributes', 'timestamp'], None))
messages_readable_outputs.append(readable_output)
headers = ['timestamp', 'message_id', 'cisco_id', 'sender', 'sender_ip', 'subject', 'serial_number']
human_readable = tableToMarkdown('CiscoEmailSecurity Messages', messages_readable_outputs, headers, removeNull=True)
return human_readable
def list_search_messages_command(client, args):
url_params = build_url_params_for_list_messages(args)
messages_response_data = client.list_messages(url_params)
messages_data = messages_response_data.get('data')
for message in messages_data:
message_details = message.get('attributes', {})
message_id = message_details.get('mid', [None])[0]
message['attributes']['mid'] = message_id
human_readable = messages_to_human_readable(messages_data)
return CommandResults(
readable_output=human_readable,
outputs_prefix='CiscoEmailSecurity.Message',
outputs_key_field='attributes.mid',
outputs=messages_data
)
def build_url_params_for_get_details(args):
start_date = date_to_cisco_date(args.get('start_date'))
end_date = date_to_cisco_date(args.get('end_date'))
message_id = args.get('message_id')
cisco_id = args.get('cisco_id')
appliance_serial_number = args.get('appliance_serial_number')
url_params = f'?startDate={start_date}&endDate={end_date}&mid={message_id}&icid={cisco_id}' \
f'&serialNumber={appliance_serial_number}'
return url_params
def details_get_to_human_readable(message):
readable_output = assign_params(message_id=message.get('mid'), direction=message.get('direction'),
sender=message.get('sender'), recipient=message.get('recipient'),
subject=message.get('subject'), timestamp=message.get('timestamp'))
headers = ['message_id', 'direction', 'sender', 'recipient', 'subject', 'timestamp']
human_readable = tableToMarkdown('CiscoEmailSecurity Messages', readable_output, headers, removeNull=True)
return human_readable
def response_data_to_context_and_human_readable(response_data):
context_data = response_data.get('data')
context_message = context_data.get('messages')
context_message['mid'] = context_message.get('mid', [None])[0]
human_readable = details_get_to_human_readable(context_message)
return context_message, human_readable
def list_get_message_details_command(client, args):
url_params = build_url_params_for_get_details(args)
message_get_details_response_data = client.list_get_message_details(url_params)
message_data, human_readable = response_data_to_context_and_human_readable(message_get_details_response_data)
return CommandResults(
readable_output=human_readable,
outputs_prefix='CiscoEmailSecurity.Message',
outputs_key_field='mid',
outputs=message_data
)
def list_get_dlp_details_command(client, args):
url_params = build_url_params_for_get_details(args)
message_get_details_response_data = client.list_get_dlp_details(url_params)
message_data, human_readable = response_data_to_context_and_human_readable(message_get_details_response_data)
return CommandResults(
readable_output=human_readable,
outputs_prefix='CiscoEmailSecurity.DLP',
outputs_key_field='mid',
outputs=message_data
)
def list_get_amp_details_command(client, args):
url_params = build_url_params_for_get_details(args)
message_get_details_response_data = client.list_get_amp_details(url_params)
message_data, human_readable = response_data_to_context_and_human_readable(message_get_details_response_data)
return CommandResults(
readable_output=human_readable,
outputs_prefix='CiscoEmailSecurity.AMP',
outputs_key_field='mid',
outputs=message_data
)
def list_get_url_details_command(client, args):
url_params = build_url_params_for_get_details(args)
message_get_details_response_data = client.list_get_url_details(url_params)
message_data, human_readable = response_data_to_context_and_human_readable(message_get_details_response_data)
return CommandResults(
readable_output=human_readable,
outputs_prefix='CiscoEmailSecurity.URL',
outputs_key_field='mid',
outputs=message_data
)
def build_url_params_for_spam_quarantine(args):
start_date = date_to_cisco_date(args.get('start_date'))
end_date = date_to_cisco_date(args.get('end_date'))
limit = set_limit(args.get('limit'))
offset = int(args.get('offset', '0'))
url_params = f'?startDate={start_date}&endDate={end_date}&quarantineType=spam&offset={offset}&limit={limit}'
arguments = assign_params(**args)
for key, value in arguments.items():
if key == 'order_by':
order_dir = arguments.get('order_dir', 'asc')
url_params += f'&orderBy={value}&orderDir={order_dir}'
elif key == 'recipient_value':
recipient_operator = arguments.get('recipient_operator', 'is')
url_params += f'&envelopeRecipientfilterOperator={recipient_operator}&envelopeRecipientfilterValue={value}'
elif key == 'filter_value':
filter_operator = arguments.get('filter_operator', 'is')
url_params += f'&filterOperator={filter_operator}&filterValue={value}'
return url_params
def spam_quarantine_to_human_readable(spam_quarantine):
spam_quarantine_readable_outputs = []
for message in spam_quarantine:
readable_output = assign_params(message_id=message.get('mid'),
recipient=dict_safe_get(message, ['attributes', 'envelopeRecipient'], None),
to_address=dict_safe_get(message, ['attributes', 'toAddress'], None),
subject=dict_safe_get(message, ['attributes', 'subject'], None),
date=dict_safe_get(message, ['attributes', 'date'], None),
from_address=dict_safe_get(message, ['attributes', 'fromAddress'], None))
spam_quarantine_readable_outputs.append(readable_output)
headers = ['message_id', 'recipient', 'to_address', 'from_address', 'subject', 'date']
human_readable = tableToMarkdown('CiscoEmailSecurity The Quarantine Messages', spam_quarantine_readable_outputs,
headers, removeNull=True)
return human_readable
def list_search_spam_quarantine_command(client, args):
url_params = build_url_params_for_spam_quarantine(args)
spam_quarantine_response_data = client.list_spam_quarantine(url_params)
spam_quarantine_data = spam_quarantine_response_data.get('data')
human_readable = spam_quarantine_to_human_readable(spam_quarantine_data)
return CommandResults(
readable_output=human_readable,
outputs_prefix='CiscoEmailSecurity.SpamQuarantine',
outputs_key_field='mid',
outputs=spam_quarantine_data
)
def quarantine_message_details_data_to_human_readable(message):
readable_output = assign_params(recipient=message.get('envelopeRecipient'), date=message.get('date'),
to_address=message.get('toAddress'), subject=message.get('subject'),
from_address=message.get('fromAddress'))
headers = ['recipient', 'to_address', 'from_address', 'subject', 'date']
human_readable = tableToMarkdown('CiscoEmailSecurity QuarantineMessageDetails', readable_output, headers,
removeNull=True)
return human_readable
def list_get_quarantine_message_details_command(client, args):
message_id = args.get('message_id')
quarantine_message_details_response = client.list_quarantine_get_details(message_id)
quarantine_message_details = quarantine_message_details_response.get('data')
human_readable = quarantine_message_details_data_to_human_readable(quarantine_message_details.get('attributes'))
return CommandResults(
readable_output=human_readable,
outputs_prefix='CiscoEmailSecurity.QuarantineMessageDetail',
outputs_key_field='mid',
outputs=quarantine_message_details
)
def list_delete_quarantine_messages_command(client, args):
messages_ids = message_ids_to_list_of_integers(args)
request_body = {
"quarantineType": "spam",
"mids": messages_ids
}
delete_quarantine_messages_response = client.list_delete_quarantine_messages(request_body)
total_count = dict_safe_get(delete_quarantine_messages_response, ['data', 'totalCount'], None)
return CommandResults(
readable_output=f'{total_count} messages successfully deleted from quarantine list',
)
def list_release_quarantine_messages_command(client, args):
messages_ids = message_ids_to_list_of_integers(args)
request_body = {
"action": "release",
"quarantineType": "spam",
"mids": messages_ids
}
release_quarantine_messages_response = client.list_release_quarantine_messages(request_body)
total_count = dict_safe_get(release_quarantine_messages_response, ['data', 'totalCount'], None)
return CommandResults(
readable_output=f'{total_count} messages successfully released from quarantine list',
)
def build_url_filter_for_get_list_entries(args):
limit = set_limit(args.get('limit'))
offset = int(args.get('offset', '0'))
view_by = args.get('view_by')
order_by = args.get('order_by')
url_params = f"?action=view&limit={limit}&offset={offset}&quarantineType=spam&orderDir=desc&viewBy={view_by}" \
f"&orderBy={order_by}"
return url_params
def list_entries_get_command(client, args):
list_type = args.get('list_type')
url_params = build_url_filter_for_get_list_entries(args)
list_entries_response = client.list_entries_get(url_params, list_type)
list_entries = list_entries_response.get('data', [None])
output_prefix = list_type.title()
return CommandResults(
readable_output=list_entries,
outputs_prefix=f'CiscoEmailSecurity.ListEntry.{output_prefix}',
outputs_key_field=output_prefix,
outputs=list_entries
)
def build_request_body_for_add_list_entries(args):
request_body = {
"action": args.get('action'),
"quarantineType": "spam",
"viewBy": args.get('view_by')
}
if 'recipient_addresses' in args:
request_body["recipientAddresses"] = args.get('recipient_addresses').split(',')
if 'recipient_list' in args:
request_body["recipientList"] = args.get('recipient_list').split(',')
if 'sender_addresses' in args:
request_body["senderAddresses"] = args.get('sender_addresses').split(',')
if 'sender_list' in args:
request_body["senderList"] = args.get('sender_list').split(',')
return request_body
def set_outputs_key_for_list_recipient_and_sender(args):
"""
This function checks which argument used and returns it for the outputs prefix.
Args:
args: The recipient list or the sender list.
Returns:
The recipient list or the sender list, depending on what was used.
"""
return args.get('recipient_list') if args.get('recipient_list') else args.get('sender_list')
def list_entries_add_command(client, args):
list_type = args.get('list_type')
request_body = build_request_body_for_add_list_entries(args)
list_entries_response = client.list_entries_add(list_type, request_body)
list_entries = list_entries_response.get('data')
output_prefix = list_type.title()
outputs_key_field = set_outputs_key_for_list_recipient_and_sender(args)
return CommandResults(
readable_output=list_entries,
outputs_prefix=f'CiscoEmailSecurity.listEntry.{output_prefix}',
outputs_key_field=outputs_key_field,
)
def build_request_body_for_delete_list_entries(args):
request_body = {
"quarantineType": "spam",
"viewBy": args.get('view_by')
}
if args.get('recipient_list'):
request_body["recipientList"] = args.get('recipient_list').split(',')
if args.get('sender_list'):
request_body["senderList"] = args.get('sender_list').split(',')
return request_body
def list_entries_delete_command(client, args):
list_type = args.get('list_type')
request_body = build_request_body_for_delete_list_entries(args)
list_entries_response = client.list_entries_delete(list_type, request_body)
list_entries = list_entries_response.get('data')
output_prefix = list_type.title()
outputs_key_field = set_outputs_key_for_list_recipient_and_sender(args)
return CommandResults(
readable_output=list_entries,
outputs_prefix=f'CiscoEmailSecurity.listEntry.{output_prefix}',
outputs_key_field=outputs_key_field,
)
def main() -> None:
params = demisto.params()
args = demisto.args()
demisto.debug(f'Command being called is {demisto.command()}')
try:
client = Client(params)
if demisto.command() == 'test-module':
result = test_module(client)
return_results(result)
elif demisto.command() == 'cisco-email-security-report-get':
return_results(list_report_command(client, args))
elif demisto.command() == 'cisco-email-security-messages-search':
return_results(list_search_messages_command(client, args))
elif demisto.command() == 'cisco-email-security-message-details-get':
return_results(list_get_message_details_command(client, args))
elif demisto.command() == 'cisco-email-security-spam-quarantine-search':
return_results(list_search_spam_quarantine_command(client, args))
elif demisto.command() == 'cisco-email-security-spam-quarantine-message-details-get':
return_results(list_get_quarantine_message_details_command(client, args))
elif demisto.command() == 'cisco-email-security-spam-quarantine-messages-delete':
return_results(list_delete_quarantine_messages_command(client, args))
elif demisto.command() == 'cisco-email-security-spam-quarantine-messages-release':
return_results(list_release_quarantine_messages_command(client, args))
elif demisto.command() == 'cisco-email-security-dlp-details-get':
return_results(list_get_dlp_details_command(client, args))
elif demisto.command() == 'cisco-email-security-amp-details-get':
return_results(list_get_amp_details_command(client, args))
elif demisto.command() == 'cisco-email-security-url-details-get':
return_results(list_get_url_details_command(client, args))
elif demisto.command() == 'cisco-email-security-list-entries-get':
return_results(list_entries_get_command(client, args))
elif demisto.command() == 'cisco-email-security-list-entry-add':
return_results(list_entries_add_command(client, args))
elif demisto.command() == 'cisco-email-security-list-entry-delete':
return_results(list_entries_delete_command(client, args))
# Log exceptions and return errors
except Exception as e:
return_error(f'Failed to execute {demisto.command()} command.\nError:\n{str(e)}')
''' ENTRY POINT '''
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | e0684391683c1bf5f4f5f63136afb324 | 41.015221 | 120 | 0.641429 | 3.69482 | false | false | false | false |
demisto/content | Packs/PrismaCloud/Scripts/PrismaCloudAttribution/PrismaCloudAttribution_test.py | 2 | 2207 | from PrismaCloudAttribution import attribution_command
import demistomock as demisto # noqa # pylint: disable=unused-wildcard-import
import json
import io
MOCK_RESULT = [
{
"accountId": "123456",
"accountName": "aws-user-personal",
"cloudType": "aws",
"fqdn": [
"application-lb-123456.us-east-1.elb.amazonaws.com"
],
"hasAlert": False,
"id": "arn:aws:elasticloadbalancing:us-east-1:123456:loadbalancer/app/application-lb/1398164320221c02",
"ip": None,
"regionId": "us-east-1",
"resourceName": "application-lb",
"resourceType": "Managed Load Balancer",
"rrn": ("rrn::managedLb:us-east-1:123456:b38d940663c047b02c2116be49695cf353976dff:arn%3Aaws"
"%3Aelasticloadbalancing%3Aus-east-1%3A123456%3Aloadbalancer%2Fapp%2Fapplication-lb"
"%2F1398164320221c02"),
"service": "Amazon Elastic Load Balancing"
},
{
"accountId": "123456",
"accountName": "aws-user-personal",
"cloudType": "aws",
"fqdn": [
"ec2-35-180-1-1.eu-west-3.compute.amazonaws.com"
],
"hasAlert": False,
"id": "i-654321b",
"ip": [
"35.180.1.1"
],
"regionId": "eu-west-3",
"resourceName": "testvm",
"resourceType": "Instance",
"rrn": "rrn::instance:eu-west-3:123456:9db2db5fdba47606863c8da86d3ae594fb5aee2b:i-654321b",
"service": "Amazon EC2"
}
]
def util_load_json(path):
with io.open(path, mode='r', encoding='utf-8') as f:
return json.loads(f.read())
def test_match_ip_in_cidr_indicators(mocker):
"""
Given:
- list of prisma cloud assets as returned by redlock-search-config
When
- extracting a subset of fields from Prisma Cloud asset list
Then
- the expected subset of fields is returned to Context
"""
mock_assets = util_load_json('test_data/assets.json')
result = attribution_command({
'assets': mock_assets
})
assert result.outputs_prefix == "PrismaCloud.Attribution"
assert result.outputs_key_field == "rrn"
assert result.outputs == MOCK_RESULT
| mit | a38a79f0d2a944cec1795d84f4ca6fa8 | 31.455882 | 111 | 0.606253 | 3.25997 | false | false | false | false |
demisto/content | Packs/HealthCheck/Scripts/HealthCheckDockerLog/HealthCheckDockerLog.py | 2 | 5467 | import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
import re
DESCRIPTION = [
"Too many running containers: There are {} containers running on the server",
"There are {} containers that are running with over 10% CPU Usage - Please check docker.log",
"There are {} containers that are running with over 10% RAM Usage - Please check docker.log",
]
RESOLUTION = 'Docker containers overloaded: https://docs.paloaltonetworks.com/cortex/cortex-xsoar/6-0/' \
'cortex-xsoar-admin/cortex-xsoar-overview/performance-tuning-of-cortex-xsoar-server'
def container_analytics(containers):
res = []
for container in containers:
if float(container['cpu_usage']) > 80.0:
res.append({
"category": "Docker",
"severity": "High",
"description": "Container {} uses more then 80% of the CPU".format(container['containerid']),
"resolution": RESOLUTION,
})
return res
def image_analytics(images):
lres = []
numberContainers = len(images)
if numberContainers > 200:
lres.append({"category": "Docker", "severity": "Medium",
"description": DESCRIPTION[0].format(numberContainers),
"resolution": RESOLUTION,
})
return lres
def main(args):
# get the file path from the given entry ID
path = demisto.getFilePath(args['entryID'])['path']
getimages = re.compile(
r'(?P<repository>[\w*/.\-<>]*)\s+(?P<tag>\d[.\d]*|\blatest\b|\b<none>\b|\b1\.\d-alpine\b)'
r'\s+(?P<ImageID>\w+)\s+(?P<Created>\d{0,2}\s(?:\byears\b|\bmonths\b|weeks\b) ago)\s+(?P<size>\d+.*B)')
getcontainers = re.compile(
r'^(?P<container>[\w]+)\s+(?P<name>[\w\d.-]+)\s+(?P<cpu>[\d.]+)%\s+(?P<memusage>[\d.]+(?:MiB|GiB))\s+/\s+'
r'(?P<memlimit>[\d.]+(?:MiB|GiB))\s+(?P<mempercent>[\d.]+)%\s+(?P<netI>[\d.]+(?:B|kB|MB))\s+/\s+'
r'(?P<netO>[\d.]+(?:B|kB|MB))\s+(?P<blockI>[\d.]+(?:B|kB|MB))\s+/\s+(?P<blockO>[\d.]+(?:B|kB|MB))\s+'
r'(?P<pids>\d+)',
re.MULTILINE)
usage = re.compile(r'(\d+\.\d+)%')
config = re.compile(r'([ \w]+): ([\d\w .,-]+)')
image_array = []
res = []
container_array = []
dataset = {}
try:
with open(path, 'r') as f:
all_lines = f.read()
# fetch all data items and create a dataset
except UnicodeDecodeError:
return 'Could not read file'
all_images = [m.groups() for m in getimages.finditer(all_lines)]
for item in all_images:
image_array.append({"image": item[0], "version": item[1], 'imageid': item[2],
'last_update': item[3], 'size': item[4]})
all_containers = [m.groups() for m in getcontainers.finditer(all_lines)]
for item in all_containers:
if len(item) == 11:
container_array.append({"containerid": item[0], "name": item[1], 'cpu_usage': item[2],
'mem_used': item[3], 'mem_limit': item[4],
'mem_usage': item[5], 'net_in': item[6],
'net_out': item[7], 'block_in': item[8],
'block_out': item[9], 'pids': item[10],
})
return_outputs(readable_output=tableToMarkdown("Containers", container_array,
['containerid', 'name', 'cpu_usage', 'mem_usage']))
return_outputs(readable_output=tableToMarkdown("Images", image_array, [
'imageid', 'image', 'version', 'last_update', 'size']))
getconfig = [m.groups() for m in config.finditer(all_lines)]
for m in getconfig:
dataset.update({m[0].lstrip(): m[1].strip()})
usage_all = [m.groups() for m in usage.finditer(all_lines)]
countCPU = 0
countMEM = 0
count = 0
for m in usage_all:
if float(m[0]) > 10.0:
if (count % 2) == 0:
countCPU += 1
elif (count % 2) != 0:
countMEM += 1
count += 1
if countCPU:
res.append({"category": "Docker", "severity": "Medium",
"description": DESCRIPTION[1].format(countCPU)})
if countMEM:
res.append({"category": "Docker", "severity": "Medium",
"description": DESCRIPTION[2].format(countMEM)})
res = res + image_analytics(image_array)
res = res + container_analytics(container_array)
if 'Operating System' in dataset:
demisto.executeCommand("setIncident", {
'xsoarcpu': dataset['CPUs'],
'xsoaros': dataset['Operating System'],
'xsoarmemory': dataset['Total Memory'],
'healthcheckdockercontainers': dataset['Containers'],
'healthcheckdockerrunning': dataset['Running'],
'healthcheckdockerpaused': dataset['Paused'],
'healthcheckdockerstop': dataset['Stopped'],
'healthcheckdockerversion': dataset['Server Version'],
'healthcheckdockercontainersstats': container_array,
'healthcheckdockerimages': image_array
})
return CommandResults(
readable_output="HealthCheckDockerLog Done",
outputs_prefix="HealthCheck.ActionableItems",
outputs=res)
if __name__ in ('__main__', '__builtin__', 'builtins'): # pragma: no cover
return_results(main(demisto.args()))
| mit | bf80690c76ffa0cbb3696d90a50635a7 | 39.798507 | 114 | 0.550576 | 3.618134 | false | false | false | false |
demisto/content | Packs/AWS-EC2/Scripts/AwsEC2GetPublicSGRules/AwsEC2GetPublicSGRules.py | 2 | 4157 | import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
import json
import copy
def get_dict_value(data, key):
""" Returns dict value for a given key (case insensitive) """
for key_name in data.keys():
if key_name.lower() == key.lower():
return data[key_name]
return None
def get_ec2_sg_public_rules(group_id, ip_permissions, checked_protocol=None, checked_from_port=None,
checked_to_port=None, region=None, include_ipv6='no'):
"""
Get the list of public
which can be passed on to the following command:
aws-ec2-revoke-security-group-ingress-rule
"""
# If the SG only has one rule, we have to convert the dict to a list with one element
if (isinstance(ip_permissions, dict)):
ip_permissions = [ip_permissions]
public_rules = []
for rule in ip_permissions:
# Check protocol
protocol = get_dict_value(rule, 'IpProtocol')
if protocol != '-1':
if checked_protocol.lower() != protocol.lower():
continue
bad_rule = {
'groupId': group_id,
'ipProtocol': protocol
}
if region:
bad_rule.update(region=region)
# Check the ports
from_port = get_dict_value(rule, 'FromPort')
to_port = get_dict_value(rule, 'ToPort')
if from_port and to_port:
if from_port < checked_from_port and to_port < checked_from_port:
continue
elif from_port > checked_to_port and to_port > checked_to_port:
continue
bad_rule.update({
'fromPort': from_port,
'toPort': to_port
})
# Process IPV4
ip_ranges = get_dict_value(rule, 'ipv4Ranges')
if not ip_ranges:
ip_ranges = get_dict_value(rule, 'IpRanges')
if ip_ranges:
for ip_range in ip_ranges:
cidr_ip = get_dict_value(ip_range, 'CidrIp')
if cidr_ip == '0.0.0.0/0':
tmp = copy.copy(bad_rule)
tmp['cidrIp'] = '0.0.0.0/0'
public_rules.append(tmp)
# Process IPv6
if include_ipv6 == 'yes':
ip_ranges = get_dict_value(rule, 'Ipv6Ranges')
if ip_ranges:
for ip_range in ip_ranges:
cidr_ip = get_dict_value(ip_range, 'CidrIpv6')
if cidr_ip == '::/0':
tmp = copy.copy(bad_rule)
tmp['cidrIp'] = '::/0'
public_rules.append(tmp)
return public_rules
def main(args):
ip_perms = args.get('ipPermissions')
if isinstance(ip_perms, str):
try:
ip_perms = json.loads(ip_perms)
except json.JSONDecodeError:
return_error('Unable to parse ipPermissions. Invalid JSON string.')
# If checked from_port or to_port is not specified
# it will default to 0-65535 (all port)
if args.get('fromPort'):
from_port = int(args.get('fromPort'))
else:
from_port = 0
if args.get('toPort'):
to_port = int(args.get('toPort'))
else:
to_port = 65535
public_rules = get_ec2_sg_public_rules(
group_id=args.get('groupId'),
ip_permissions=ip_perms,
checked_protocol=args.get('protocol'),
checked_from_port=from_port,
checked_to_port=to_port,
region=args.get('region'),
include_ipv6=args.get('includeIPv6')
)
readable_output = tableToMarkdown('Public Security Group Rules', public_rules,
['groupId', 'ipProtocol', 'fromPort', 'toPort', 'cidrIp', 'region']
)
context = {
'AWS': {
'EC2': {
'SecurityGroup': {
'PublicRules': public_rules
}
}
}
}
return_outputs(readable_output, context, raw_response=public_rules)
if __name__ in ('builtins', '__builtin__'):
main(demisto.args())
| mit | 47c716bd4bf77fa6d9e22163cceb646d | 29.792593 | 105 | 0.533317 | 3.888681 | false | false | false | false |
demisto/content | Packs/Campaign/Scripts/CollectCampaignRecipients/CollectCampaignRecipients.py | 2 | 2180 |
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
ALL_INCIDENTS = 'All'
CAMPAIGN_EMAIL_TO_FIELD = 'campaignemailto'
def get_campaign_incidents():
"""
Get the campaign incidents form the incident's context
:rtype: ``list``
:return: list of campaign incidents
"""
incident = demisto.incidents()[0]
incident_id = incident.get('id') or incident.get('investigationId')
res = demisto.executeCommand('getContext', {'id': incident_id})
if isError(res):
return_error(f'Error occurred while trying to get the incident context: {get_error(res)}')
return demisto.get(res[0], 'Contents.context.EmailCampaign.incidents')
def collect_campaign_recipients(args):
"""
Collect the campaign unique recipients from all the campaign incidents
:type args: ``dict``
:param args: args from demisto
:rtype: ``str``
:return: unique recipients in CSV
"""
try:
selected_ids = args['new']
if not selected_ids:
return ''
incidents = get_campaign_incidents()
if ALL_INCIDENTS not in selected_ids:
incidents = filter(lambda incident: incident['id'] in selected_ids, incidents)
recipient_set = {recipient for incident in incidents for recipient in incident['recipients']}
return ','.join(recipient_set)
except KeyError as e:
raise DemistoException(f'Missing required arg: {str(e)}') from e
def update_campaign_email_to_field(recipients):
"""
Update the campaignemailto field with the collected recipients
"""
incident_id = demisto.incidents()[0]['id']
demisto.executeCommand('setIncident', {'id': incident_id, 'customFields': {CAMPAIGN_EMAIL_TO_FIELD: recipients}})
def main():
try:
args = demisto.args()
recipients = collect_campaign_recipients(args)
update_campaign_email_to_field(recipients)
except Exception as e:
return_error(f'Failed to execute CollectCampaignRecipients. Error: {str(e)}', error=traceback.format_exc())
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | 6abcb9b0ad5fe8254ab3e2e04c2cb22b | 30.142857 | 117 | 0.651376 | 3.720137 | false | false | false | false |
demisto/content | Packs/CommonScripts/Scripts/ExtractIndicatorsFromWordFile/ExtractIndicatorsFromWordFile.py | 2 | 5641 | import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
import subprocess
from docx import Document
from typing import List, Dict
from docx.opc.constants import RELATIONSHIP_TYPE as RT
from docx.opc.exceptions import PackageNotFoundError
class WordParser:
def __init__(self):
self.res = [] # type: List[Dict[str, str]]
self.errEntry = {
"Type": entryTypes["error"],
"ContentsFormat": formats["text"],
"Contents": ""
}
self.file_path = ""
self.file_name = ""
self.file_type = ""
self.all_data = ""
def get_file_details(self):
file_path_data = demisto.getFilePath(demisto.args().get("entryID"))
self.file_path = file_path_data.get('path')
self.file_name = file_path_data.get('name')
file_entry = demisto.dt(self.get_context(), "File(val.EntryID === '{}')".format(demisto.args().get('entryID')))
if isinstance(file_entry, list):
file_entry = file_entry[0]
self.file_type = file_entry.get("Type")
def convert_doc_to_docx(self):
output = subprocess.check_output(
['soffice', '--headless', '-env:UserInstallation=file:///tmp/.config/extractindicators', '--convert-to',
'docx', self.file_path], stderr=subprocess.STDOUT)
demisto.debug("soffice output: [{}]".format(str(output)))
# Requires office-utils docker image
output_file_name = self.file_name[0:self.file_name.rfind('.')] + '.docx'
self.file_path = self.file_path + ".docx"
try:
with open(self.file_path, 'rb') as f:
f_data = f.read()
self.res = fileResult(output_file_name, f_data)
except IOError:
return_error("Error: was not able to convert the input file to docx format.")
def extract_indicators(self):
try:
document = Document(self.file_path)
self.all_data = self.get_paragraphs(document)
self.all_data += self.get_tables(document)
self.all_data += self.get_core_properties(document)
self.all_data += self.get_hyperlinks(document)
except PackageNotFoundError:
self.errEntry["Contents"] = "Input file is not a valid docx/doc file."
self.res = self.errEntry # type: ignore
except BaseException as e:
self.errEntry["Contents"] = "Error occurred while parsing input file.\nException info: " + str(e)
self.res = self.errEntry # type: ignore
def get_paragraphs(self, document):
return '\n'.join([para.text for para in document.paragraphs])
def get_tables(self, document):
all_cells_txt = ""
if document.tables:
for table in document.tables:
for row in table.rows:
for cell in row.cells:
for para in cell.paragraphs:
all_cells_txt += (" " + para.text)
return " ".join(all_cells_txt.split()) # Removes extra whitespaces
def get_core_properties(self, document):
all_properties_txt = document.core_properties.author + " " + \
document.core_properties.category + " " + \
document.core_properties.comments + " " + \
document.core_properties.identifier + " " + \
document.core_properties.keywords + " " + \
document.core_properties.subject + " " + \
document.core_properties.title + " "
return " " + " ".join(all_properties_txt.split()) + " "
def get_hyperlinks(self, document):
all_hyperlinks = ""
rels = document.part.rels
for rel in rels:
if rels[rel].reltype == RT.HYPERLINK:
all_hyperlinks += (rels[rel]._target + " ")
return all_hyperlinks
def parse_word(self):
self.get_file_details()
if self.file_name.lower().endswith(".doc") or "Composite Document File V2 Document" in self.file_type:
self.convert_doc_to_docx()
self.extract_indicators()
elif self.file_name.lower().endswith(".docx") or "Microsoft Word 2007+" in self.file_type:
self.extract_indicators()
else:
return_error("Input file is not a doc file.")
def get_context(self):
incident_id = demisto.incident()['id']
res = execute_command('getContext', {'id': incident_id})
return demisto.get(res, 'context')
def main():
# Parsing:
parser = WordParser()
try:
parser.parse_word()
except subprocess.CalledProcessError as perr:
return_error("ProcessError: exit code: {}. Output: {}".format(perr.returncode, perr.output))
except Exception as e:
return_error(str(e))
# Returning Indicators:
indicators_hr = demisto.executeCommand("extractIndicators", {
'text': parser.all_data})[0]['Contents']
demisto.results({
'Type': entryTypes['note'],
'ContentsFormat': formats['text'],
'Contents': indicators_hr,
'HumanReadable': indicators_hr
})
# Returning all parsed data:
demisto.results(parser.all_data.encode('utf-8'))
# Returning error:
if parser.res: # If there was an error:
contents = parser.res["Contents"] # type: ignore
if "Error occurred while parsing input" in contents or "Input file is not a valid" in contents:
demisto.results(parser.res) # Return error too
# python2 uses __builtin__ python3 uses builtins
if __name__ == "__builtin__" or __name__ == "builtins":
main()
| mit | 1e28db7cabef7e97821d1d8e3243a0f4 | 38.447552 | 119 | 0.594221 | 3.821816 | false | false | false | false |
demisto/content | Packs/fireeye/Scripts/FireEyeDetonateFile/FireEyeDetonateFile.py | 2 | 4649 | import json
from time import sleep
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
TIMEOUT = 960
INTERVAL = 10
def detonate_file(args):
should_continue = True
file = demisto.get(args, 'file')
feDone = False
feSubmissionKeys = {}
# profiles = ['win10x64','win7-sp1','win7x64-sp1','winxp-sp3']
profiles = argToList(args['profiles'])
analysistype = args.get('analysistype', 0)
prefetch = args.get('prefetch', 1)
# Make sure fireeye available
if demisto.executeCommand('IsIntegrationAvailable', {'brandname': 'fireeye'})[0]['Contents'] != 'yes':
feDone = True
# Upload file and get submission_key
if not feDone:
bArgs = {'analysistype': analysistype,
'application': '0',
'force': "true",
'prefetch': prefetch,
'priority': "1",
'timeout': "180",
'upload': file}
for profile in profiles:
bArgs['profiles'] = profile
resp = demisto.executeCommand('fe-submit', bArgs)
if isError(resp[0]):
demisto.results(resp)
should_continue = False
break
else:
feSubmissionKey = demisto.get(resp[0], 'Contents')
if isinstance(feSubmissionKey, str):
feSubmissionKey = json.loads(feSubmissionKey)
feSubmissionKeys[profile] = demisto.get(feSubmissionKey[0], 'ID')
else:
demisto.results({"Type": entryTypes["error"], "ContentsFormat": formats["text"],
"Contents": 'FireEye: Integration not available.'})
should_continue = False
if should_continue:
poll_stage(feDone, feSubmissionKeys, profiles, file)
def poll_stage(feDone, feSubmissionKeys, profiles, file):
should_continue = True
status = None
sec = 0
stauses = {}
while sec < TIMEOUT and feSubmissionKeys:
if not feDone:
status = "Done"
# Get status
for profile in profiles:
resp = demisto.executeCommand('fe-submit-status', {'submission_Key': feSubmissionKeys[profile]})
if isError(resp[0]):
demisto.results(resp)
should_continue = False
break
stauses[profile] = demisto.get(resp[0], 'Contents.submissionStatus')
if stauses[profile] in ["In Progress"]:
status = "In Progress"
if not should_continue:
break
# find status
if status in ["In Progress"]:
sec += INTERVAL
sleep(INTERVAL) # pylint: disable=sleep-exists
# continue loop
else:
# loop done failed
feDone = True
else:
break
if should_continue:
get_results(feDone, profiles, stauses, feSubmissionKeys, file)
def get_results(feDone, profiles, stauses, feSubmissionKeys, file):
if not feDone:
demisto.results({"Type": entryTypes["error"], "ContentsFormat": formats["text"],
"Contents": 'Could not retrieve results from FireEye (may be due to timeout).'})
for profile in profiles:
status = stauses[profile]
if status in ["Done"]:
resp = demisto.executeCommand('fe-submit-result ', {'submission_Key': feSubmissionKeys[profile]})
if isError(resp[0]):
demisto.results(resp)
else:
data = demisto.get(resp[0], "Contents.alerts.alert")
if data:
data = data if isinstance(data, list) else [data]
data = [{k: formatCell(row[k]).replace("\n", "<br>") for k in row} for row in data]
data = tblToMd(profile, data)
demisto.results({"ContentsFormat": formats["markdown"], "Type": entryTypes["note"], "Contents": data})
else:
demisto.results("No results.")
else:
demisto.results({"Type": entryTypes["error"], "ContentsFormat": formats["text"],
"Contents": 'FireEye: Failed to detonate file {0}, exit status = {1}'.format(file, status)})
def main(): # pragma: no cover
args = demisto.args()
try:
detonate_file(args)
except Exception as e:
err_msg = f'Encountered an error while running the script: [{e}]'
return_error(err_msg, error=e)
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | a1731600f3899ffc985b7a62f24b4c8c | 35.896825 | 122 | 0.550441 | 4.188288 | false | false | false | false |
demisto/content | Packs/ShiftManagement/Scripts/GetOnCallHoursPerUser/GetOnCallHoursPerUser.py | 2 | 2350 | import operator
from functools import reduce
from typing import Dict, List
import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
def count_hours_in_shift(shift: Dict) -> int:
from_day = shift.get('fromDay', 0)
to_day = shift.get('toDay', 0)
from_hour = shift.get('fromHour', 0)
to_hour = shift.get('toHour', 0)
hours_in_shift = 0
for day in range(from_day, to_day + 1):
if day == from_day:
if day == to_day:
hours_in_shift += (to_hour - from_hour)
else:
hours_in_shift += (24 - from_hour)
elif day == to_day:
hours_in_shift += to_hour
else:
hours_in_shift += 24
return hours_in_shift
def main():
get_roles_response: List = demisto.executeCommand('getRoles', {})
if is_error(get_roles_response):
demisto.error(f'Failed to get roles: {str(get_error(get_roles_response))}')
else:
hours_per_user: Dict[str, int] = {}
get_users_response: List = demisto.executeCommand('getUsers', {})
if is_error(get_users_response):
demisto.error(f'Failed to get users: {str(get_error(get_users_response))}')
else:
users = get_users_response[0]['Contents']
roles = get_roles_response[0]['Contents']
for role in roles:
role_on_call_hours = 0
shifts = role.get('shifts') or []
for shift in shifts:
role_on_call_hours += count_hours_in_shift(shift)
role_users = map(
lambda role_user: role_user.get('name', ''),
filter(lambda u: role.get('name') in reduce(operator.add, u.get('roles', {}).values()), users)
)
for username in role_users:
if username in hours_per_user:
hours_per_user[username] += role_on_call_hours
else:
hours_per_user[username] = role_on_call_hours
bar_widget = BarColumnPieWidget()
for user, number_of_hours in hours_per_user.items():
bar_widget.add_category(name=user, number=number_of_hours)
return_results(bar_widget)
if __name__ in ('__builtin__', 'builtins', '__main__'):
main()
| mit | ed33c7e5e4590f84c337e63de5b76b13 | 35.71875 | 114 | 0.552766 | 3.71248 | false | false | false | false |
demisto/content | Packs/Campaign/Scripts/GetCampaignLowSimilarityIncidentsInfo/GetCampaignLowSimilarityIncidentsInfo.py | 2 | 5796 | import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
import copy
"""
This Script is a duplicate of Packs/Campaign/Scripts/GetCampaignIncidentsInfo with the only change of the context field
the data is taken from. The reason is that dynamic section in layout cannot use arguments in the scripts they use.
"""
DEFAULT_HEADERS = ['id', 'name', 'emailfrom', 'recipients', 'severity', 'status', 'created']
KEYS_FETCHED_BY_QUERY = ['status', 'severity']
NO_CAMPAIGN_INCIDENTS_MSG = 'There is no Campaign Incidents in the Context'
LINKABLE_ID_FORMAT = '[{incident_id}](#/Details/{incident_id})'
STATUS_DICT = {
0: "Pending",
1: "Active",
2: "Closed",
3: "Archive",
}
DEFAULT_CUSTOM_FIELDS = {
'campaignclosenotes': 'Notes explaining why the incident was closed',
'campaignemailsubject': 'Campaign detected',
'campaignemailbody': 'Fill here message for the recipients',
'selectcampaignincidents': ['All']
}
SEVERITIES = {
4: 'Critical',
3: 'High',
2: 'Medium',
1: 'Low',
0.5: 'Info',
0: 'Unknown'
}
def update_incident_with_required_keys(incidents, required_keys):
"""
Update the given incident dict (from context) with values retrieved by GetIncidentsByQuery command
:type incidents: ``list``
:param incidents: campaign incidents from the context
:type required_keys: ``list``
:param required_keys: keys need to be updated
"""
ids = [str(incident['id']) for incident in incidents]
res = demisto.executeCommand('GetIncidentsByQuery', {
'query': "id:({})".format(' '.join(ids))
})
if isError(res):
return_error(f'Error occurred while trying to get incidents by query: {get_error(res)}')
incidents_from_query = json.loads(res[0]['Contents'])
id_to_updated_incident_map = {incident['id']: incident for incident in incidents_from_query}
for incident in incidents:
updated_incident = id_to_updated_incident_map[incident['id']]
for key in required_keys:
incident[key] = updated_incident.get(key)
def convert_incident_to_hr(incident):
"""
Get the value from incident dict and convert it in some cases e.g. make id linkable etc.
Note: this script change the original incident
:type incident: ``dict``
:param incident: the incident to get the value from
:type key: ``str``
:param key: the key in dict
:rtype: ``None``
:return None
"""
converted_incident = copy.deepcopy(incident)
for key in converted_incident.keys():
if key == 'status':
converted_incident[key] = STATUS_DICT.get(converted_incident.get(key))
if key == 'id':
converted_incident[key] = LINKABLE_ID_FORMAT.format(incident_id=converted_incident.get(key))
if key == 'severity':
converted_incident[key] = SEVERITIES.get(converted_incident.get(key), '')
if key == 'similarity':
if str(converted_incident[key])[0] == '1':
converted_incident[key] = '1'
elif len(str(converted_incident[key])) > 4:
converted_incident[key] = str(round(converted_incident[key], 3))
converted_incident[key] = converted_incident[key][:-1] if len(converted_incident[key]) > 4 \
else converted_incident[key]
else:
converted_incident[key] = str(converted_incident[key])
converted_incident[key] = converted_incident.get(key.replace('_', ''))
return converted_incident
def get_campaign_incidents_from_context():
return demisto.get(demisto.context(), 'EmailCampaign.LowerSimilarityIncidents')
def get_incidents_info_md(incidents, fields_to_display=None):
"""
Get the campaign incidents relevant info in MD table
:type incidents: ``list``
:param incidents: the campaign incidents to collect the info from
:type fields_to_display: ``list``
:param fields_to_display: list of result headers
:rtype: ``str``
:return the MD table str
"""
if incidents:
if not fields_to_display:
headers = DEFAULT_HEADERS
else:
headers = fields_to_display
converted_incidents = [convert_incident_to_hr(incident) for incident in incidents]
return tableToMarkdown(
name='',
t=converted_incidents,
headerTransform=string_to_table_header,
headers=headers,
removeNull=True,
)
return None
def update_empty_fields():
"""
Update the campaign dynamic section empty field with default values in order for them to appear in the page
"""
incident = demisto.incidents()[0]
custom_fields = incident.get('customFields', {})
for field in DEFAULT_CUSTOM_FIELDS.keys():
if not custom_fields.get(field):
custom_fields[field] = DEFAULT_CUSTOM_FIELDS[field]
demisto.executeCommand('setIncident', {'id': incident['id'], 'customFields': custom_fields})
def main():
try:
incidents = get_campaign_incidents_from_context()
fields_to_display = demisto.get(demisto.context(), 'EmailCampaign.fieldsToDisplay')
if incidents:
update_incident_with_required_keys(incidents, KEYS_FETCHED_BY_QUERY)
update_empty_fields()
readable_output = get_incidents_info_md(incidents, fields_to_display)
else:
readable_output = NO_CAMPAIGN_INCIDENTS_MSG
return_results(CommandResults(readable_output=readable_output, raw_response=readable_output))
except Exception as err:
return_error(str(err))
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | 8d6d9d1bae4dd3fe99fe76984b5bc7c1 | 32.310345 | 119 | 0.636646 | 3.768531 | false | false | false | false |
demisto/content | Packs/RiskSense/Scripts/RiskSenseGetRansomewareCVEScript/RiskSenseGetRansomewareCVEScript_test.py | 2 | 3156 | EXPECTED_RANSOMWARE_CVES = [
{
"Cve": "CVE-0000-0000",
"CVSS": 7.5,
"VRR": 5.67,
"ThreatCount": 1,
"Trending": "false",
"VulnLastTrendingOn": "2018-05-01",
"Description": "remote code execution",
"Threats": [{
"Title": "Hunter Exploit Kit",
"Category": "Ransomware",
"Severity": "null",
"Description": "",
"Cve": [
"CVE-0000-0000"
],
"Source": "MCAFEE",
"Published": "2017-08-03T00:00:00",
"Updated": "2019-08-16T15:50:04",
"ThreatLastTrendingOn": "2018-02-23",
"Trending": "false"
}]
}
]
HOST_FINDING_DATA = [{
"HostID": 3569982,
"Vulnerability": [
{
"Cve": "CVE-0000-0000",
"BaseScore": 7.5,
"ThreatCount": 0,
"AttackVector": "Network",
"AccessComplexity": "Low",
"Authentication": "None",
"ConfidentialityImpact": "Partial",
"Integrity": "Partial",
"AvailabilityImpact": "Partial",
"Trending": "false",
"VulnLastTrendingOn": "2018-05-01",
"Description": "remote code execution"
}
],
"ThreatCount": 0,
"Threat": [
{
"Title": "Hunter Exploit Kit",
"Category": "Ransomware",
"Severity": "null",
"Description": "",
"Details": "",
"Cve": [
"CVE-0000-0000"
],
"Source": "MCAFEE",
"Published": "2017-08-03T00:00:00",
"Updated": "2019-08-16T15:50:04",
"ThreatLastTrendingOn": "2018-02-23",
"Trending": "false",
"Link": ""
}
],
"RiskRating": 5.67
}]
def test_header_transform():
from RiskSenseGetRansomewareCVEScript import header_transform
assert header_transform('CVSS') == 'CVSS Score'
assert header_transform('VRR') == 'VRR Score'
assert header_transform('ThreatCount') == 'Threat Count'
assert header_transform('VulnLastTrendingOn') == 'Last Trending On Date'
assert header_transform('Trending') == 'Trending'
def test_get_ransomware_cves():
from RiskSenseGetRansomewareCVEScript import get_ransomware_cves
ransomware_cves = get_ransomware_cves(HOST_FINDING_DATA)
assert ransomware_cves == EXPECTED_RANSOMWARE_CVES
def test_display_ransomware_trending_cve_results():
from RiskSenseGetRansomewareCVEScript import display_ransomware_trending_cve_results
result = display_ransomware_trending_cve_results(EXPECTED_RANSOMWARE_CVES)
assert result.outputs_prefix == 'RiskSense.RansomwareTrendingCves'
assert result.outputs_key_field == 'Cve'
def test_display_ransomware_cve_results():
from RiskSenseGetRansomewareCVEScript import display_ransomware_cve_results
result = display_ransomware_cve_results(EXPECTED_RANSOMWARE_CVES)
assert result.outputs_prefix == 'RiskSense.RansomwareCves'
assert result.outputs_key_field == 'Cve'
assert result.outputs == EXPECTED_RANSOMWARE_CVES
| mit | e2503172be8290fc2e0c658c77bbf7a2 | 31.536082 | 88 | 0.569708 | 3.263702 | false | false | false | false |
demisto/content | Packs/Base/Scripts/CheckDockerImageAvailable/CheckDockerImageAvailable_test.py | 2 | 3306 | from CheckDockerImageAvailable import docker_auth, main, docker_min_layer, parse_www_auth
import demistomock as demisto
import json
import pytest
RETURN_ERROR_TARGET = 'CheckDockerImageAvailable.return_error'
@pytest.mark.filterwarnings('ignore::urllib3.exceptions.InsecureRequestWarning')
def test_auth():
token = docker_auth('demisto/python', verify_ssl=False)
assert token is not None
def test_parse_www_auth():
res = parse_www_auth('Bearer realm="https://auth.docker.io/token",service="registry.docker.io"')
assert len(res) == 2
assert res[0] == 'https://auth.docker.io/token'
assert res[1] == 'registry.docker.io'
res = parse_www_auth('Bearer realm="https://gcr.io/v2/token",service="gcr.io"')
assert len(res) == 2
assert res[0] == 'https://gcr.io/v2/token'
assert res[1] == 'gcr.io'
def test_min_layer():
layers_text = """
[
{
"mediaType": "application/vnd.docker.image.rootfs.diff.tar.gzip",
"size": 2207038,
"digest": "sha256:169185f82c45a6eb72e0ca4ee66152626e7ace92a0cbc53624fb46d0a553f0bd"
},
{
"mediaType": "application/vnd.docker.image.rootfs.diff.tar.gzip",
"size": 309123,
"digest": "sha256:ef00a8db125d3a25e193b96e6786193f744e24b01db96dab132e687e53848f9a"
},
{
"mediaType": "application/vnd.docker.image.rootfs.diff.tar.gzip",
"size": 24623747,
"digest": "sha256:b5c6e736c1549dc0f0b4e41465ad17defc8d2af10f7c28e0a3bfc530298a8a42"
},
{
"mediaType": "application/vnd.docker.image.rootfs.diff.tar.gzip",
"size": 233,
"digest": "sha256:ae23d06361f0ec0edf69341d705ab828a0b28c162a47e7733217ca7e4003606c"
}
]
"""
layers = json.loads(layers_text)
min_layer = docker_min_layer(layers)
assert min_layer['size'] == 233
def test_valid_docker_image(mocker):
demisto_image = 'demisto/python:2.7.15.155' # disable-secrets-detection
args = {'input': demisto_image, 'trust_any_certificate': 'yes'}
mocker.patch.object(demisto, 'args', return_value=args)
mocker.patch.object(demisto, 'results')
# validate our mocks are good
assert demisto.args()['input'] == demisto_image
main()
assert demisto.results.call_count == 1
# call_args is tuple (args list, kwargs). we only need the first one
results = demisto.results.call_args[0]
assert len(results) == 1
assert results[0] == 'ok'
demisto.results.reset_mock()
gcr_image = 'gcr.io/google-containers/alpine-with-bash:1.0' # disable-secrets-detection
args['input'] = gcr_image
assert demisto.args()['input'] == gcr_image
main()
results = demisto.results.call_args[0]
assert len(results) == 1
assert results[0] == 'ok'
def test_invalid_docker_image(mocker):
image_name = 'demisto/python:bad_tag'
mocker.patch.object(demisto, 'args', return_value={'input': image_name, 'trust_any_certificate': 'yes'})
return_error_mock = mocker.patch(RETURN_ERROR_TARGET)
# validate our mocks are good
assert demisto.args()['input'] == image_name
main()
assert return_error_mock.call_count == 1
# call_args last call with a tuple of args list and kwargs
err_msg = return_error_mock.call_args[0][0]
assert err_msg is not None
| mit | 533f199e4c3249dbf0c0def8791477c0 | 35.32967 | 108 | 0.668179 | 3.038603 | false | true | false | false |
demisto/content | Packs/Malwr/Integrations/Malwr/Malwr_test.py | 2 | 4058 | from Malwr import MalwrAPI
from requests_mock import Mocker
import tempfile
import pytest
TEST_HTML = """
<form action="/action_page.php">
<label for="math_captcha_question">Math Captcha Question:</label>
<input type="text" id="math_captcha_question" name="math_captcha_question" value="what is the answer?"><br><br>
<label for="csrfmiddlewaretoken">CSRF Middleware Token:</label>
<input type="text" id="csrfmiddlewaretoken" name="csrfmiddlewaretoken" value="some_token"><br><br>
<p id="somemath">
10 + 14 = 24
2 * 7 = 14
2 - 7 = -5
</p>
</form>
"""
def test_find_submission_links_links_in_response(mocker):
"""Test MalwrAPI static method find_submission_links
Given:
- MalwrAPI client
- Response from submitting a sample
When:
- The response contains submissions links
Then:
- Ensure submission links are returned by the method
"""
malwr = MalwrAPI(url='https://fake.com', username='y', password='z')
malwr.logged = True
resp_text = 'blah/analysis/abcd1234efgh/\nxabvafg/analysis/11234557788943/blah/blah'
with Mocker(session=malwr.session) as session_mock:
session_mock.get(url='https://fake.com/submission/', request_headers=malwr.HEADERS, text=TEST_HTML)
resp = session_mock.post(
url='https://fake.com/submission/', request_headers=malwr.HEADERS,
text=resp_text
)
find_submission_links_og = MalwrAPI.find_submission_links
find_submission_links_mock = mocker.MagicMock(name='find_submission_links')
def side_effect(req):
return find_submission_links_og(req)
find_submission_links_mock.side_effect = side_effect
MalwrAPI.find_submission_links = find_submission_links_mock
with tempfile.NamedTemporaryFile() as temp:
temp.write(b'blah blah blah')
temp.flush()
result, _ = malwr.submit_sample(temp.name)
assert result.get('analysis_link', '') == '/analysis/abcd1234efgh/'
find_submission_links_mock.assert_called_once()
mocked_call_arg_text = find_submission_links_mock.call_args.args[0].text
mocked_session_post_response_text = resp._responses[0].get_response(session_mock.last_request).text
assert mocked_call_arg_text == mocked_session_post_response_text
testdata = [
('file like this waiting for processing, submission aborted.', 'File already submitted, check its status.'),
('blah blah blah blah blah blah blah blah blah blah blah bl.', 'Error with the file.'),
]
@pytest.mark.parametrize("post_resp_text,expected_result", testdata)
def test_find_submission_links_links_not_in_response(mocker, post_resp_text, expected_result):
"""Test MalwrAPI static method find_submission_links
Given:
- MalwrAPI client
- Response from submitting a sample
When:
- The response does not contain submissions links
Then:
- Ensure submission links are not returned by the method
"""
malwr = MalwrAPI(url='https://fake.com', username='y', password='z')
malwr.logged = True
with Mocker(session=malwr.session) as session_mock:
session_mock.get(url='https://fake.com/submission/', request_headers=malwr.HEADERS, text=TEST_HTML)
session_mock.post(
url='https://fake.com/submission/', request_headers=malwr.HEADERS,
text=post_resp_text
)
find_submission_links_og = MalwrAPI.find_submission_links
find_submission_links_mock = mocker.MagicMock(name='find_submission_links')
def side_effect(req):
return find_submission_links_og(req)
find_submission_links_mock.side_effect = side_effect
MalwrAPI.find_submission_links = find_submission_links_mock
with tempfile.NamedTemporaryFile() as temp:
temp.write(b'blah blah blah')
temp.flush()
result, _ = malwr.submit_sample(temp.name)
find_submission_links_mock.assert_called_once()
assert result == expected_result
| mit | f8e22e4cf9fa13fcdb746e76f46bdf53 | 38.019231 | 113 | 0.669049 | 3.750462 | false | true | false | false |
demisto/content | Packs/CommonScripts/Scripts/GetDomainDNSDetails/GetDomainDNSDetails.py | 2 | 2841 | import demistomock as demisto # noqa # pylint: disable=unused-wildcard-import
from CommonServerPython import * # noqa # pylint: disable=unused-wildcard-import
from CommonServerUserPython import * # noqa # pylint: disable=unused-wildcard-import
from typing import (
Dict, Any, Optional,
Union, List
)
import traceback
import dns.message
import dns.resolver
import dns.rdatatype
import dns.rdataclass
import dns.rdata
DNS_QUERY_TTL = 10.0
QTYPES = ["CNAME", "NS", "A", "AAAA"]
''' STANDALONE FUNCTION '''
def make_query(resolver: dns.resolver.Resolver, qname: str, qtype: str, use_tcp: bool) -> Dict[str, Any]:
q_rdtype = dns.rdatatype.from_text(qtype)
q_rdclass = dns.rdataclass.from_text("IN")
try:
ans = resolver.resolve(
qname,
q_rdtype, q_rdclass,
tcp=use_tcp,
lifetime=DNS_QUERY_TTL,
raise_on_no_answer=True
)
except (dns.resolver.NoAnswer, dns.resolver.NXDOMAIN):
return {}
if ans.rrset is None:
return {}
result: Dict[str, List[str]] = {}
result[qtype] = [
rr.to_text()
for rr in ans.rrset
if (rr is not None and rr.rdtype == q_rdtype and rr.rdclass == q_rdclass)
]
return result
''' COMMAND FUNCTION '''
def get_domain_dns_details_command(args: Dict[str, Any]) -> CommandResults:
outputs: Optional[Dict[str, Dict[str, Any]]]
answer: Union[str, Dict[str, Any]]
server = args.get('server')
use_tcp = argToBoolean(args.get('use_tcp', 'Yes'))
qtypes = QTYPES
if (arg_qtype := args.get('qtype')) is not None:
qtypes = argToList(arg_qtype)
qname = args.get('domain')
if qname is None:
raise ValueError("domain is required")
resolver = dns.resolver.Resolver()
if server is not None:
resolver.nameservers = [server]
answer = {
'domain': qname,
'server': server if server is not None else 'system'
}
# we ask specifically for CNAMEs
for qtype in qtypes:
answer.update(make_query(resolver, qname, qtype, use_tcp=use_tcp))
outputs = {
'DomainDNSDetails': answer
}
markdown = tableToMarkdown(
f' Domain DNS Details for {qname}',
answer,
headers=["domain", "server"] + qtypes
)
return CommandResults(
readable_output=markdown,
outputs=outputs,
outputs_key_field=['domain', 'server']
)
''' MAIN FUNCTION '''
def main():
try:
return_results(get_domain_dns_details_command(demisto.args()))
except Exception as ex:
demisto.error(traceback.format_exc()) # print the traceback
return_error(f'Failed to execute GetDomainDNSDetails. Error: {str(ex)}')
''' ENTRY POINT '''
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | 86d25bbcdeb4ed3bda6ca416932d96b6 | 23.067797 | 105 | 0.617606 | 3.567839 | false | false | false | false |
demisto/content | Packs/Slack/Scripts/SlackAsk/SlackAsk_test.py | 2 | 9406 | import demistomock as demisto
from CommonServerPython import entryTypes
import json
import dateparser
import datetime
import SlackAsk
BLOCKS = [{
'type': 'section',
'text': {
'type': 'mrkdwn',
'text': 'wat up'
}
}, {
'type': 'actions',
'elements': [{
'type': 'button',
'text': {
'type': 'plain_text',
'emoji': True,
'text': 'yes'
},
'value': '{\"entitlement\": \"4404dae8-2d45-46bd-85fa-64779c12abe8@22\", \"reply\": \"Thank you brother.\"}',
'style': 'danger'
}, {
'type': 'button',
'text': {
'type': 'plain_text',
'emoji': True,
'text': 'no'
},
'value': '{\"entitlement\": \"4404dae8-2d45-46bd-85fa-64779c12abe8@22\", \"reply\": \"Thank you brother.\"}',
'style': 'danger'
}]}]
BLOCKS_ADDITIONAL = [{
'type': 'section',
'text': {
'type': 'mrkdwn',
'text': 'wat up'
}
}, {
'type': 'actions',
'elements': [{
'type': 'button',
'text': {
'type': 'plain_text',
'emoji': True,
'text': 'yes'
},
'value': '{\"entitlement\": \"4404dae8-2d45-46bd-85fa-64779c12abe8@22\", \"reply\": \"Thank you brother.\"}',
'style': 'danger'
}, {
'type': 'button',
'text': {
'type': 'plain_text',
'emoji': True,
'text': 'no'
},
'value': '{\"entitlement\": \"4404dae8-2d45-46bd-85fa-64779c12abe8@22\", \"reply\": \"Thank you brother.\"}',
'style': 'danger'
}, {
'type': 'button',
'text': {
'type': 'plain_text',
'emoji': True,
'text': 'maybe'
},
'value': '{\"entitlement\": \"4404dae8-2d45-46bd-85fa-64779c12abe8@22\", \"reply\": \"Thank you brother.\"}',
}]}]
def execute_command(command, args):
if command == 'addEntitlement':
return [{
'Type': entryTypes['note'],
'Contents': '4404dae8-2d45-46bd-85fa-64779c12abe8'
}]
return []
def test_slack_ask_user(mocker):
# Set
mocker.patch.object(demisto, 'executeCommand', side_effect=execute_command)
mocker.patch.object(demisto, 'investigation', return_value={'id': '22'})
mocker.patch.object(demisto, 'args', return_value={
'user': 'alexios', 'message': 'wat up', 'option1': 'yes#red', 'option2': 'no#red',
'reply': 'Thank you brother.', 'lifetime': '24 hours', 'defaultResponse': 'NoResponse', 'using-brand': 'SlackV2'
})
mocker.patch.object(demisto, 'results')
mocker.patch.object(dateparser, 'parse', return_value=datetime.datetime(2019, 9, 26, 18, 38, 25))
# Arrange
SlackAsk.main()
call_args = demisto.executeCommand.call_args[0]
# Assert
assert call_args[1] == {
'ignoreAddURL': 'true',
'using-brand': 'SlackV2',
'blocks': json.dumps({
'blocks': json.dumps(BLOCKS),
'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8@22',
'reply': 'Thank you brother.',
'expiry': '2019-09-26 18:38:25',
'default_response': 'NoResponse'
}),
'message': 'wat up',
'to': 'alexios'
}
def test_slack_ask_user_additional(mocker):
# Set
mocker.patch.object(demisto, 'executeCommand', side_effect=execute_command)
mocker.patch.object(demisto, 'investigation', return_value={'id': '22'})
mocker.patch.object(demisto, 'args', return_value={
'user': 'alexios', 'message': 'wat up', 'option1': 'yes#red', 'option2': 'no#red',
'additionalOptions': 'maybe', 'reply': 'Thank you brother.', 'lifetime': '24 hours',
'defaultResponse': 'NoResponse', 'using-brand': 'SlackV2'
})
mocker.patch.object(demisto, 'results')
mocker.patch.object(dateparser, 'parse', return_value=datetime.datetime(2019, 9, 26, 18, 38, 25))
# Arrange
SlackAsk.main()
call_args = demisto.executeCommand.call_args[0]
# Assert
assert call_args[1] == {
'ignoreAddURL': 'true',
'using-brand': 'SlackV2',
'blocks': json.dumps({
'blocks': json.dumps(BLOCKS_ADDITIONAL),
'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8@22',
'reply': 'Thank you brother.',
'expiry': '2019-09-26 18:38:25',
'default_response': 'NoResponse'
}),
'message': 'wat up',
'to': 'alexios'
}
def test_slack_ask_channel(mocker):
# Set
mocker.patch.object(demisto, 'executeCommand', side_effect=execute_command)
mocker.patch.object(demisto, 'investigation', return_value={'id': '22'})
mocker.patch.object(demisto, 'args', return_value={
'channel': 'general', 'message': 'wat up', 'option1': 'yes#red', 'option2': 'no#red',
'reply': 'Thank you brother.', 'lifetime': '24 hours', 'defaultResponse': 'NoResponse', 'using-brand': 'SlackV2'
})
mocker.patch.object(demisto, 'results')
mocker.patch.object(dateparser, 'parse', return_value=datetime.datetime(2019, 9, 26, 18, 38, 25))
# Arrange
SlackAsk.main()
call_args = demisto.executeCommand.call_args[0]
# Assert
assert call_args[1] == {
'ignoreAddURL': 'true',
'using-brand': 'SlackV2',
'blocks': json.dumps({
'blocks': json.dumps(BLOCKS),
'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8@22',
'reply': 'Thank you brother.',
'expiry': '2019-09-26 18:38:25',
'default_response': 'NoResponse'
}),
'message': 'wat up',
'channel': 'general'
}
def test_slack_ask_user_threads(mocker):
# Set
mocker.patch.object(demisto, 'executeCommand', side_effect=execute_command)
mocker.patch.object(demisto, 'investigation', return_value={'id': '22'})
mocker.patch.object(demisto, 'args', return_value={
'user': 'alexios', 'message': 'wat up', 'responseType': 'thread', 'option1': 'yes#red', 'option2': 'no#red',
'reply': 'Thank you brother.', 'lifetime': '24 hours', 'defaultResponse': 'NoResponse', 'using-brand': 'SlackV2'
})
mocker.patch.object(demisto, 'results')
mocker.patch.object(dateparser, 'parse', return_value=datetime.datetime(2019, 9, 26, 18, 38, 25))
# Arrange
SlackAsk.main()
call_args = demisto.executeCommand.call_args[0]
# Assert
assert call_args[1] == {
'message': json.dumps({
'message': 'wat up - Please reply to this thread with `yes` or `no`.',
'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8@22',
'reply': 'Thank you brother.',
'expiry': '2019-09-26 18:38:25',
'default_response': 'NoResponse'
}),
'ignoreAddURL': 'true',
'using-brand': 'SlackV2',
'to': 'alexios',
}
def test_slack_ask_user_threads_additional(mocker):
# Set
mocker.patch.object(demisto, 'executeCommand', side_effect=execute_command)
mocker.patch.object(demisto, 'investigation', return_value={'id': '22'})
mocker.patch.object(demisto, 'args', return_value={
'user': 'alexios', 'message': 'wat up', 'option1': 'yes#red', 'option2': 'no#red',
'additionalOptions': 'maybe', 'responseType': 'thread', 'reply': 'Thank you brother.',
'lifetime': '24 hours', 'defaultResponse': 'NoResponse', 'using-brand': 'SlackV2'
})
mocker.patch.object(demisto, 'results')
mocker.patch.object(dateparser, 'parse', return_value=datetime.datetime(2019, 9, 26, 18, 38, 25))
# Arrange
SlackAsk.main()
call_args = demisto.executeCommand.call_args[0]
# Assert
assert call_args[1] == {
'message': json.dumps({
'message': 'wat up - Please reply to this thread with `yes` or `no` or `maybe`.',
'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8@22',
'reply': 'Thank you brother.',
'expiry': '2019-09-26 18:38:25',
'default_response': 'NoResponse'
}),
'ignoreAddURL': 'true',
'using-brand': 'SlackV2',
'to': 'alexios',
}
def test_slack_ask_channel_threads(mocker):
# Set
mocker.patch.object(demisto, 'executeCommand', side_effect=execute_command)
mocker.patch.object(demisto, 'investigation', return_value={'id': '22'})
mocker.patch.object(demisto, 'args', return_value={
'channel': 'general', 'message': 'wat up', 'responseType': 'thread', 'option1': 'yes#red', 'option2': 'no#red',
'reply': 'Thank you brother.', 'lifetime': '24 hours', 'defaultResponse': 'NoResponse', 'using-brand': 'SlackV2'
})
mocker.patch.object(demisto, 'results')
mocker.patch.object(dateparser, 'parse', return_value=datetime.datetime(2019, 9, 26, 18, 38, 25))
# Arrange
SlackAsk.main()
call_args = demisto.executeCommand.call_args[0]
# Assert
assert call_args[1] == {
'message': json.dumps({
'message': 'wat up - Please reply to this thread with `yes` or `no`.',
'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8@22',
'reply': 'Thank you brother.',
'expiry': '2019-09-26 18:38:25',
'default_response': 'NoResponse'
}),
'ignoreAddURL': 'true',
'using-brand': 'SlackV2',
'channel': 'general',
}
| mit | 72a846ecf5454584cbbb5ca54cc0af56 | 34.360902 | 120 | 0.565384 | 3.174485 | false | false | false | false |
demisto/content | Packs/CommonScripts/Scripts/ReadFile/ReadFile_test.py | 2 | 1115 | import pytest
from ReadFile import *
def test_extract_indicators(mocker):
"""
Given:
A file containing text.
When:
Running script on file
Then:
Validate the right output returns.
"""
mocker.patch.object(demisto, 'executeCommand', return_value=[{'Contents': {'path': './test_data/test_file.txt'}}])
results = read_file({})
assert results == {'Type': 1, 'ContentsFormat': 'text', 'Contents': {'FileData': 'abcabcabc'},
'HumanReadable': 'Read 9 bytes from file.', 'EntryContext': {'FileData': 'abcabcabc'}}
def test_extract_indicators_empty_file(mocker):
"""
Given:
Name of empty file.
When:
Running script on file
Then:
Validate the right output returns.
"""
args = {'maxFileSize': 1024 ** 2}
mocker.patch.object(demisto, 'executeCommand', return_value=[{'Contents': {'path': './test_data/test_file_empty.txt'}}])
with pytest.raises(Exception) as e:
read_file(args)
if not e:
assert False
| mit | 38fd53e78dfe0a2e47f1c6ac3f6ed52e | 27.589744 | 124 | 0.567713 | 4.114391 | false | true | false | false |
demisto/content | Packs/FeedThreatConnect/Integrations/FeedThreatConnect/FeedThreatConnect_test.py | 2 | 6906 | import json
import demistomock as demisto
import pytest
from FeedThreatConnect import create_or_query, parse_indicator, set_tql_query, create_types_query, should_send_request, \
build_url_with_query_params, set_fields_query, get_updated_last_run
def load_json_file(path):
with open(path, 'r') as _json_file:
return json.load(_json_file)
@pytest.mark.parametrize(argnames="threatconnect_score, dbot_score",
argvalues=[(1000, 3),
(830, 3),
(664, 2),
(498, 2),
(332, 1),
(166, 1),
(0, 0)])
def test_calculate_dbot_score(threatconnect_score, dbot_score):
from FeedThreatConnect import calculate_dbot_score
assert calculate_dbot_score(threatconnect_score) == dbot_score
def test_parse_indicator(mocker):
mocker.patch.object(demisto, 'params', return_value={'createRelationships': True, 'tlpcolor': None})
data_dir = {
'parsed_indicator.json': './test_data/parsed_indicator.json', # type: ignore # noqa
'indicators.json': './test_data/indicators.json'} # type: ignore # noqa
indicator = parse_indicator(load_json_file(data_dir['indicators.json']))
assert load_json_file(data_dir['parsed_indicator.json']) == indicator
def test_create_or_query():
assert create_or_query('test', '1,2,3,4,5') == 'test="1" OR test="2" OR test="3" OR test="4" OR test="5" '
@pytest.mark.parametrize("params, expected_result, endpoint",
[({'indicator_active': False, "indicator_type": ['All'],
'createRelationships': False, "confidence": 0, "threat_assess_score": 0}, '', 'indicators'),
({'indicator_active': True, "group_type": ['File'],
'createRelationships': False, "confidence": 0, "threat_assess_score": 0},
'typeName IN ("File")', 'groups'),
({'indicator_active': False, "group_type": ['Tool'],
'createRelationships': False, "confidence": 50, "threat_assess_score": 80},
'typeName IN ("Tool")', 'groups')])
def test_set_tql_query(params, expected_result, endpoint):
"""
Given:
- an empty from_date value and demisto params
Case 1: expecting no tql query
Case 2: expecting a specific group type, and only active indicators
When:
- running set_tql_query command
Then:
- validate the tql output
"""
from_date = ''
output = set_tql_query(from_date, params, endpoint)
assert output == expected_result
@pytest.mark.parametrize("params, expected_result, endpoint",
[({"group_type": ['All'], "indicator_type": []}, 'typeName IN ("Attack Pattern","Campaign",'
'"Course of Action","Intrusion Set","Malware","Report","Tool","Vulnerability")', 'groups'),
({"group_type": ['File'], "indicator_type": []}, 'typeName IN ("File")', 'groups'),
({"group_type": ['File'], "indicator_type": ['All']}, 'typeName IN ("File")', 'groups')])
def test_create_types_query(params, expected_result, endpoint):
"""
Given:
- demisto params and an endpoint
When:
- running create_types_query command
Then:
- validate the output
"""
output = create_types_query(params, endpoint)
assert output == expected_result
@pytest.mark.parametrize("params, expected_result, endpoint",
[({"group_type": ['All'], "indicator_type": []}, False, 'indicators'),
({"group_type": [], "indicator_type": ['All']}, True, 'indicators')])
def test_should_send_request(params, expected_result, endpoint):
"""
Given:
- demisto params and an endpoint
When:
- running should_send_request command
Then:
- validate the result
"""
output = should_send_request(params, endpoint)
assert output == expected_result
@pytest.mark.parametrize("params, expected_result, endpoint",
[({"indicator_type": ['All'], 'indicator_query': '', 'createRelationships': False},
'/api/v3/indicators?tql=indicatorActive%20EQ%20True&fields=tags&fields=threatAssess&resultStart'
'=0&resultLimit=100&sorting=dateAdded%20ASC', 'indicators'),
({"group_type": ['All'], 'indicator_query': 'indicatorActive EQ False', 'createRelationships': True},
'/api/v3/groups?tql=indicatorActive%20EQ%20False&fields=tags&fields=associatedGroups'
'&fields=associatedIndicators&resultStart=0&resultLimit=100&sorting=dateAdded%20ASC', 'groups')])
def test_build_url_with_query_params(mocker, params, expected_result, endpoint):
"""
Given:
- demisto params and an endpoint
When:
- running build_url_with_query_params command
Then:
- validate the result
"""
mocker.patch('FeedThreatConnect.set_tql_query', return_value='indicatorActive EQ True')
output = build_url_with_query_params(params, endpoint, {})
assert output == expected_result
@pytest.mark.parametrize("params, expected_result, endpoint",
[({'createRelationships': False}, '&fields=tags&fields=threatAssess', 'indicators'),
({'createRelationships': True}, '&fields=tags&fields=associatedGroups&fields=associatedIndicators',
'groups')])
def test_set_fields_query(params, expected_result, endpoint):
"""
Given:
- demisto params and an endpoint
When:
- running set_fields_query command
Then:
- validate the result
"""
output = set_fields_query(params, endpoint)
assert output == expected_result
@pytest.mark.parametrize("indicators, groups, previous_run, expected_result",
[([{'dateAdded': 'dateAdded'}], [{'dateAdded': 'dateAdded'}], {},
{'indicators': {'from_date': 'dateAdded'}, 'groups': {'from_date': 'dateAdded'}}),
(([{'dateAdded': 'dateAdded'}], [], {'groups': {'from_date': 'from_date'}},
{'indicators': {'from_date': 'dateAdded'}, 'groups': {'from_date': 'from_date'}}))])
def test_get_updated_last_run(indicators, groups, previous_run, expected_result):
"""
Given:
- list of indicators, list of groups, and a previouse run
When:
- running get_updated_last_run command
Then:
- validate the result
"""
output = get_updated_last_run(indicators, groups, previous_run)
assert output == expected_result
| mit | 68b4dacc9469837ba74f1360e3d854c6 | 42.433962 | 127 | 0.5766 | 4.045694 | false | true | false | false |
demisto/content | Packs/CTIX/Integrations/CTIXv3/CTIXv3.py | 2 | 54051 | # Uncomment while development
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
"""IMPORTS"""
import base64
import hashlib
import hmac
import json
import time
import urllib.parse
from typing import Any, Dict
import requests
import urllib3
# Disable insecure warnings
urllib3.disable_warnings()
"""GLOBALS"""
domain_regex = (
"([a-z¡-\uffff0-9](?:[a-z¡-\uffff0-9-]{0,61}"
"[a-z¡-\uffff0-9])?(?:\\.(?!-)[a-z¡-\uffff0-9-]{1,63}(?<!-))*"
"\\.(?!-)(?!(jpg|jpeg|exif|tiff|tif|png|gif|otf|ttf|fnt|dtd|xhtml|css"
"|html)$)(?:[a-z¡-\uffff-]{2,63}|xn--[a-z0-9]{1,59})(?<!-)\\.?$"
"|localhost)"
)
tag_colors = {
"blue": "#0068FA",
"purple": "#5236E2",
"orange": "#EB9C00",
"red": "#FF5330",
"green": "#27865F",
"yellow": "#C4C81D",
"turquoise": "#00A2C2",
"pink": "#C341E7",
"light-red": "#AD6B76",
"grey": "#95A1B1"
}
CTIX_DBOT_MAP = {
"ipv4-addr": "ip",
"ipv6-addr": "ip",
"MD5": "file",
"SHA-1": "file",
"SHA-224": "file",
"SHA-256": "file",
"SHA-384": "file",
"SHA-512": "file",
"SSDEEP": "file",
"domain-name": "domain",
"domain": "domain",
"email-addr": "email",
"email-message": "email",
"artifact": "custom",
"network-traffic": "custom",
"user-agent": "custom",
"windows-registry-key": "custom",
"directory": "custom",
"process": "custom",
"software": "custom",
"user-account": "custom",
"mac-addr": "custom",
"mutex": "custom",
"autonomous-system": "custom",
"cidr": "custom",
"certificate": "x509-certificate",
"url": "url"
}
REGEX_MAP = {
"url": re.compile(urlRegex, regexFlags),
"domain": re.compile(domain_regex, regexFlags),
"hash": re.compile(hashRegex, regexFlags),
}
""" CLIENT CLASS """
class Client(BaseClient):
"""
Client to use in the CTIX integration. Overrides BaseClient
"""
def __init__(
self,
base_url: str,
access_id: str,
secret_key: str,
verify: bool,
proxies: dict,
) -> None:
self.base_url = base_url
self.access_id = access_id
self.secret_key = secret_key
self.verify = verify
self.proxies = proxies
def signature(self, expires: int):
'''
Signature Generation
:param int expires: Epoch time in which time when signature will expire
:return str signature : signature queryset
'''
to_sign = "%s\n%i" % (self.access_id, expires)
return base64.b64encode(
hmac.new(
self.secret_key.encode("utf-8"), to_sign.encode("utf-8"), hashlib.sha1
).digest()
).decode("utf-8")
def add_common_params(self, params: dict):
'''
Add Common Params
:param dict params: Paramters to be added in request
:return dict: Params dictionary with AccessID, Expires and Signature
'''
expires = int(time.time() + 5)
params["AccessID"] = self.access_id
params["Expires"] = expires
params["Signature"] = self.signature(expires)
return params
def get_http_request(self, full_url: str, payload: dict = None, **kwargs):
'''
GET HTTP Request
:param str full_url: URL to be called
:param dict payload: Request body, defaults to None
:raises DemistoException: If Any error is found will be raised on XSOAR
:return dict: Response object
'''
kwargs = self.add_common_params(kwargs)
full_url = full_url + "?" + urllib.parse.urlencode(kwargs)
headers = {"content-type": "application/json"}
resp = requests.get(
full_url,
verify=self.verify,
proxies=self.proxies,
timeout=5,
headers=headers,
json=payload,
)
status_code = resp.status_code
try:
resp.raise_for_status() # Raising an exception for non-200 status code
response = {"data": resp.json(), "status": status_code}
return response
except requests.exceptions.HTTPError:
return_error(f'Error: status-> {status_code!r}; Reason-> {resp.reason!r}]')
def post_http_request(self, full_url: str, payload: dict, params):
'''
POST HTTP Request
:param str full_url: URL to be called
:param dict payload: Request body, defaults to None
:raises DemistoException: If Any error is found will be raised on XSOAR
:return dict: Response object
'''
headers = {"content-type": "application/json"}
params = self.add_common_params(params)
full_url = full_url + "?" + urllib.parse.urlencode(params)
resp = requests.post(
full_url,
verify=self.verify,
proxies=self.proxies,
json=payload,
headers=headers,
timeout=5,
)
status_code = resp.status_code
try:
resp.raise_for_status() # Raising an exception for non-200 status code
response = {"data": resp.json(), "status": status_code}
return response
except requests.exceptions.HTTPError:
return_error(f'Error: status-> {status_code!r}; Reason-> {resp.reason!r}]')
def test_auth(self):
'''
Test authentication
:return dict: Returns result for ping
'''
client_url = self.base_url + "ping/"
return self.get_http_request(client_url)
def create_tag(self, name: str, color_code: str):
"""Creates a tag in ctix platform
:type name: ``str``
:param name: Name of the tag
:type color_code: ``str``
:param color_code: Hex color code of the tag e.g #111111
:return: dict containing the details of newly created tag
:rtype: ``Dict[str, Any]``
"""
url_suffix = "ingestion/tags/"
client_url = self.base_url + url_suffix
payload = {"name": name, "color_code": color_code}
return self.post_http_request(full_url=client_url, payload=payload, params={})
def get_tags(self, page: int, page_size: int, q: str):
"""Paginated list of tags from ctix platform using page_number and page_size
:type page: int
:param page: page number for the pagination for list api
:type page_size: int
:param page_size: page size for the pagination for list api
:type q: str
:param q: search query string for the list api
"""
url_suffix = "ingestion/tags/"
client_url = self.base_url + url_suffix
params = {"page": page, "page_size": page_size}
if q:
params["q"] = q # type: ignore
return self.get_http_request(client_url, params)
def delete_tag(self, tag_id: str):
"""Deletes a tag from the ctix instance
:type tag_id: ``str``
:param name: id of the tag to be deleted
"""
url_suffix = "ingestion/tags/bulk-actions/"
client_url = self.base_url + url_suffix
return self.post_http_request(
client_url, {"ids": tag_id, "action": "delete"}, {}
)
def whitelist_iocs(self, ioc_type, values, reason):
url_suffix = "conversion/whitelist/"
client_url = self.base_url + url_suffix
payload = {"type": ioc_type, "values": values, "reason": reason}
return self.post_http_request(client_url, payload, {})
def get_whitelist_iocs(self, page: int, page_size: int, q: str):
"""Paginated list of tags from ctix platform using page_number and page_size
:type page: int
:param page: page number for the pagination for list api
:type page_size: int
:param page_size: page size for the pagination for list api
:type q: str
:param q: search query string for the list api
"""
url_suffix = "conversion/whitelist/"
client_url = self.base_url + url_suffix
params = {"page": page, "page_size": page_size}
if q:
params["q"] = q # type: ignore
return self.get_http_request(client_url, {}, **params)
def remove_whitelisted_ioc(self, whitelist_id: str):
"""Removes whitelisted ioc with given `whitelist_id`
:type whitelist_id: str
:param whitelist_id: id of the whitelisted ioc to be removed
"""
url_suffix = "conversion/whitelist/bulk-actions/"
client_url = self.base_url + url_suffix
return self.post_http_request(
client_url, {"ids": whitelist_id, "action": "delete"}, {}
)
def get_threat_data(self, page: int, page_size: int, query: str):
'''
Get Threat Data
:param int page: Paginated number from where data will be polled
:param int page_size: Size of the result
:param str query: CQL query for polling specific result
:return dict: Returns response for query
'''
url_suffix = "ingestion/threat-data/list/"
client_url = self.base_url + url_suffix
params = {"page": page, "page_size": page_size}
payload = {"query": query}
return self.post_http_request(client_url, payload=payload, params=params)
def get_saved_searches(self, page: int, page_size: int):
'''
Get Saved Searches
:param int page: Paginated number from where data will be polled
:param int page_size: Size of the result
:return dict: Returns response for query
'''
url_suffix = "ingestion/saved-searches/"
client_url = self.base_url + url_suffix
params = {"page": page, "page_size": page_size}
return self.get_http_request(client_url, {}, **params)
def get_server_collections(self, page: int, page_size: int):
'''
Get Server Collections
:param int page: Paginated number from where data will be polled
:param int page_size: Size of the result
:return dict: Returns response for query
'''
url_suffix = "publishing/collection/"
client_url = self.base_url + url_suffix
params = {"page": page, "page_size": page_size}
return self.get_http_request(client_url, {}, **params)
def get_actions(self, page: int, page_size: int, params: Dict[str, Any]):
'''
Get Actions
:param int page: Paginated number from where data will be polled
:param int page_size: Size of the result
:param Dict[str, Any] params: Params to be send with request
:return dict: Returns response for query
'''
url_suffix = "ingestion/actions/"
client_url = self.base_url + url_suffix
params["page"] = page
params["page_size"] = page_size
return self.get_http_request(client_url, **params)
def add_indicator_as_false_positive(self, object_ids: list[str], object_type: str):
'''
Add Indicator as False Positive
:param list[str] object_ids: Object IDs of the IOCs
:param str object_type: Object type of the IOCs
:return dict: Returns response for query
'''
url_suffix = "ingestion/threat-data/bulk-action/false_positive/"
client_url = self.base_url + url_suffix
payload = {"object_ids": object_ids, "object_type": object_type, "data": {}}
return self.post_http_request(client_url, payload, {})
def add_ioc_to_manual_review(self, object_ids: list[str], object_type: str):
'''
Add IOC to Manual Review
:param list[str] object_ids: Object IDs of the IOCs
:param str object_type: Object type of the IOCs
:return dict: Returns response for query
'''
url_suffix = "ingestion/threat-data/bulk-action/manual_review/"
client_url = self.base_url + url_suffix
payload = {"object_ids": object_ids, "object_type": object_type, "data": {}}
return self.post_http_request(client_url, payload, {})
def deprecate_ioc(self, object_ids: str, object_type: str):
'''
Deprecate IOC
:param str object_ids: Object ID of the IOC
:param str object_type: Object type of the IOC
:return dict: Returns response for query
'''
url_suffix = "ingestion/threat-data/bulk-action/deprecate/"
client_url = self.base_url + url_suffix
payload = {"object_ids": object_ids, "object_type": object_type, "data": {}}
return self.post_http_request(client_url, payload, {})
def add_analyst_tlp(self, object_id: str, object_type: str, data):
'''
Add Analyst TLP
:param str object_id: Object ID of the IOCs
:param str object_type: _Object type of the IOCs
:param dict data: data to be send over POST request
:return dict: Returns response for query
'''
url_suffix = "ingestion/threat-data/action/analyst_tlp/"
client_url = self.base_url + url_suffix
payload = {"object_id": object_id, "object_type": object_type, "data": data}
return self.post_http_request(client_url, payload, {})
def add_analyst_score(self, object_id: str, object_type, data):
'''
Add Analyst Score
:param str object_id: Object ID of the IOCs
:param str object_type: Object type of the IOCs
:param dict data: Request body to be send over POST request
:return dict: Returns response for query
'''
url_suffix = "ingestion/threat-data/action/analyst_score/"
client_url = self.base_url + url_suffix
payload = {"object_id": object_id, "object_type": object_type, "data": data}
return self.post_http_request(client_url, payload, {})
def saved_result_set(self, page: int, page_size: int, label_name: str, query: str):
'''
Saved Result Set
:param int page: Paginated number from where data will be polled
:param int page_size: Size of the result
:param str label_name: Label name used to get the data from the rule
:param str query: CQL query to get specific data
:return dict: Returns response for query
'''
url_suffix = "ingestion/threat-data/list/"
client_url = self.base_url + url_suffix
params = {}
params.update({"page": page})
params.update({"page_size": page_size})
if query is None:
query = "type=indicator"
payload = {"label_name": label_name, "query": query}
return self.post_http_request(client_url, payload, params)
def tag_indicator_updation(
self,
q: str,
page: int,
page_size: int,
object_id: str,
object_type: str,
tag_id: str,
operation: str,
):
'''
Tag Indicator Updation
:param str q: query to be send
:param int page: Paginated number from where data will be polled
:param int page_size: Size of the result
:param str object_id: Object ID of the IOCs
:param str object_type: Object type of the IOCs
:param str tag_id: Tag ID that will be removed or added
:param str operation: Addition or Removal of tag operation
:return dict: Returns response for query
'''
tags_data = self.get_indicator_tags(
object_type, object_id, {"page": page, "page_size": page_size}
)["data"]
tags = [_["id"] for _ in tags_data["tags"]]
if operation == "add_tag_indicator":
tags.extend([_.strip() for _ in tag_id.split(",")])
elif operation == "remove_tag_from_indicator":
removable_tags = [_.strip() for _ in tag_id.split(",")]
for r_tag in removable_tags:
if r_tag in tags:
tags.remove(r_tag)
final_tags = list(set(tags))
url_suffix = "ingestion/threat-data/action/add_tag/"
client_url = self.base_url + url_suffix
params = {"page": page, "page_size": page_size, "q": q}
payload = {
"object_id": object_id,
"object_type": object_type,
"data": {"tag_id": final_tags},
}
return self.post_http_request(client_url, payload, params)
def search_for_tag(self, params: dict):
'''
Search for tag
:param dict params: Paramters to be added in request
:return dict: Returns response for query
'''
url_suffix = "ingestion/tags/"
client_url = self.base_url + url_suffix
return self.get_http_request(client_url, **params)
def get_indicator_details(self, object_type: str, object_id: str, params: dict):
'''
Get Indicator Details
:param str object_type: Object type of the IOCs
:param str object_id: Object ID of the IOCs
:param dict params: Paramters to be added in request
:return dict: Returns response for query
'''
url_suffix = f"ingestion/threat-data/{object_type}/{object_id}/basic/"
client_url = self.base_url + url_suffix
return self.get_http_request(client_url, **params)
def get_indicator_tags(self, object_type: str, object_id: str, params: dict):
'''
Get Indicator Tags
:param str object_type: Object type of the IOCs
:param str object_id: Object ID of the IOCs
:param dict params: Paramters to be added in request
:return dict: Returns response for query
'''
url_suffix = f"ingestion/threat-data/{object_type}/{object_id}/quick-actions/"
client_url = self.base_url + url_suffix
return self.get_http_request(client_url, **params)
def get_indicator_relations(self, object_type: str, object_id: str, params: dict):
'''
Get Indicator Relations
:param str object_type: Object type of the IOCs
:param str object_id: Object ID of the IOCs
:param dict params: Paramters to be added in request
:return dict: Returns response for query
'''
url_suffix = f"ingestion/threat-data/{object_type}/{object_id}/relations/"
client_url = self.base_url + url_suffix
return self.get_http_request(client_url, **params)
def get_indicator_observations(self, params: dict):
'''
Get Indicator Observations
:param dict params: Paramters to be added in request
:return dict: Returns response for query
'''
url_suffix = "ingestion/threat-data/source-references/"
client_url = self.base_url + url_suffix
return self.get_http_request(client_url, **params)
def get_conversion_feed_source(self, params: dict):
'''
Get Conversion Feed Source
:param dict params: Paramters to be added in request
:return dict: Returns response for query
'''
url_suffix = "conversion/feed-sources/"
client_url = self.base_url + url_suffix
return self.get_http_request(client_url, **params)
def get_lookup_threat_data(
self, object_type: str, object_names: list, params: dict
):
'''
Get Lookup Threat Data
:param str object_type: Object type of the IOCs
:param list object_names: Indicator/IOCs names
:param dict params: Paramters to be added in request
:return dict: Returns response for query
'''
url_suffix = "ingestion/threat-data/list/"
if len(object_names) == 1:
query = f"type={object_type} AND value IN ('{object_names[0]}')"
else:
query = f"type={object_type} AND value IN {tuple(object_names)}"
payload = {"query": query}
client_url = self.base_url + url_suffix
return self.post_http_request(client_url, payload, params)
""" HELPER FUNCTIONS """
def to_dbot_score(ctix_score: int) -> int:
"""
Maps CTIX Score to DBotScore
"""
if ctix_score == 0:
dbot_score = Common.DBotScore.NONE # unknown
elif ctix_score <= 30:
dbot_score = Common.DBotScore.GOOD # good
elif ctix_score <= 70:
dbot_score = Common.DBotScore.SUSPICIOUS # suspicious
else:
dbot_score = Common.DBotScore.BAD
return dbot_score
def no_result_found(data: Any):
if data in ('', ' ', None, [], {}):
result = CommandResults(
readable_output='No results were found',
outputs=None,
raw_response=None,
)
else:
result = data
return result
def check_for_empty_variable(value: str, default: Any):
return value if value not in ('', ' ', None) else default
def iter_dbot_score(data: list, score_key: str, type_key: str, table_name: str,
output_prefix: str, outputs_key_field: str):
final_data = []
for value in data:
if value[type_key] is not None:
indicator_type = CTIX_DBOT_MAP[value[type_key]]
score = to_dbot_score(value.get(score_key, 0))
if indicator_type == 'ip':
dbot_score = Common.DBotScore(
indicator=value.get("id"),
indicator_type=DBotScoreType.IP,
integration_name='CTIX',
score=score
)
ip_standard_context = Common.IP(
ip=value.get("name"),
asn=value.get("asn"),
dbot_score=dbot_score
)
final_data.append(CommandResults(
readable_output=tableToMarkdown(table_name, value, removeNull=True),
outputs_prefix=output_prefix,
outputs_key_field=outputs_key_field,
outputs=value,
indicator=ip_standard_context,
raw_response=value
))
elif indicator_type == 'file':
dbot_score = Common.DBotScore(
indicator=value.get("id"),
indicator_type=DBotScoreType.FILE,
integration_name='CTIX',
score=score
)
file_standard_context = Common.File(
name=value.get("name"),
dbot_score=dbot_score
)
file_key = value.get("name")
hash_type = value.get("attribute_field", "Unknown").lower()
if hash_type == "md5":
file_standard_context.md5 = file_key
elif hash_type == "sha-1":
file_standard_context.sha1 = file_key
elif hash_type == "sha-256":
file_standard_context.sha256 = file_key
elif hash_type == "sha-512":
file_standard_context.sha512 == file_key
final_data.append(CommandResults(
readable_output=tableToMarkdown(table_name, value, removeNull=True),
outputs_prefix=output_prefix,
outputs_key_field=outputs_key_field,
outputs=value,
indicator=file_standard_context,
raw_response=value
))
elif indicator_type == 'domain':
dbot_score = Common.DBotScore(
indicator=value.get("id"),
indicator_type=DBotScoreType.DOMAIN,
integration_name='CTIX',
score=score
)
domain_standard_context = Common.Domain(
domain=value.get("name"),
dbot_score=dbot_score
)
final_data.append(CommandResults(
readable_output=tableToMarkdown(table_name, value, removeNull=True),
outputs_prefix=output_prefix,
outputs_key_field=outputs_key_field,
outputs=value,
indicator=domain_standard_context,
raw_response=value
))
elif indicator_type == 'email':
dbot_score = Common.DBotScore(
indicator=value.get("id"),
indicator_type=DBotScoreType.EMAIL,
integration_name='CTIX',
score=score
)
email_standard_context = Common.Domain(
domain=value.get("name"),
dbot_score=dbot_score
)
final_data.append(CommandResults(
readable_output=tableToMarkdown(table_name, value, removeNull=True),
outputs_prefix=output_prefix,
outputs_key_field=outputs_key_field,
outputs=value,
indicator=email_standard_context,
raw_response=value
))
elif indicator_type == 'url':
dbot_score = Common.DBotScore(
indicator=value.get("id"),
indicator_type=DBotScoreType.URL,
integration_name='CTIX',
score=score,
)
url_standard_context = Common.URL(
url=value.get("name"),
dbot_score=dbot_score
)
final_data.append(CommandResults(
readable_output=tableToMarkdown(table_name, value, removeNull=True),
outputs_prefix=output_prefix,
outputs_key_field=outputs_key_field,
outputs=value,
indicator=url_standard_context,
raw_response=value
))
else: # indicator_type == 'custom'
final_data.append(CommandResults(
readable_output=tableToMarkdown(table_name, value, removeNull=True),
outputs_prefix=output_prefix,
outputs_key_field=outputs_key_field,
outputs=value,
raw_response=value
))
else:
final_data.append(CommandResults(
readable_output=tableToMarkdown(table_name, value, removeNull=True),
outputs_prefix=output_prefix,
outputs_key_field=outputs_key_field,
outputs=value,
raw_response=value
))
return final_data
""" COMMAND FUNCTIONS """
def test_module(client: Client):
"""
Performs basic get request to get sample ip details.
"""
client.test_auth()
# test was successful
demisto.results("ok")
def create_tag_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
create_tag command: Creates a new tag in the CTIX platform
"""
name = args["tag_name"]
color_name = args["color"]
color_code = tag_colors[color_name]
response = client.create_tag(name, color_code)
data = response.get("data")
data = no_result_found(data)
if isinstance(data, CommandResults):
return data
else:
results = CommandResults(
readable_output=tableToMarkdown("Tag Data", data, removeNull=True),
outputs_prefix="CTIX.Tag",
outputs_key_field="name",
outputs=data,
raw_response=data,
)
return results
def get_tags_command(client: Client, args=Dict[str, Any]) -> List[CommandResults]:
"""
get_tags commands: Returns paginated list of tags
"""
page = args["page"]
page = check_for_empty_variable(page, 1)
page_size = args["page_size"]
page_size = check_for_empty_variable(page_size, 10)
query = args.get("q", '')
response = client.get_tags(page, page_size, query)
tags_list = response.get("data", {}).get("results", [])
tags_list = no_result_found(tags_list)
if isinstance(tags_list, CommandResults):
return [tags_list]
else:
results = []
for tag in tags_list:
results.append(
CommandResults(
readable_output=tableToMarkdown("Tag Data", tag, removeNull=True),
outputs_prefix="CTIX.Tag",
outputs_key_field="name",
outputs=tag,
)
)
return results
def delete_tag_command(client: Client, args: dict) -> CommandResults:
"""
delete_tag command: Deletes a tag with given tag_name
"""
tag_name = argToList(args.get("tag_name"))
final_result = []
for tag in tag_name:
search_result = client.get_tags(1, 10, tag)
tags = search_result.get("data", {}).get("results", [])
response = client.delete_tag(tags[0]["id"])
final_result.append(response.get("data"))
final_result = no_result_found(final_result)
if isinstance(final_result, CommandResults):
return final_result
else:
results = CommandResults(
readable_output=tableToMarkdown("Tag Response", final_result, removeNull=True),
outputs_prefix="CTIX.DeleteTag",
outputs_key_field="result",
outputs=final_result,
raw_response=final_result,
)
return results
def whitelist_iocs_command(client: Client, args: Dict[str, Any]) -> CommandResults:
'''
Whitelist IOCs command
:Description Whitelist IOCs for a given value
:param Dict[str, Any] args: Paramters to be send to in request
:return CommandResults: XSOAR based result
'''
ioc_type = args.get("type")
values = args.get("values")
values = argToList(values)
reason = args.get("reason")
data = (
client.whitelist_iocs(ioc_type, values, reason)
.get("data", {})
.get("details", {})
)
data = no_result_found(data)
if isinstance(data, CommandResults):
return data
else:
results = CommandResults(
readable_output=tableToMarkdown("Whitelist IOC", data, removeNull=True),
outputs_prefix="CTIX.AllowedIOC",
outputs=data,
raw_response=data,
)
return results
def get_whitelist_iocs_command(
client: Client, args=Dict[str, Any]
) -> List[CommandResults]:
"""
get_tags commands: Returns paginated list of tags
"""
page = args["page"]
page = check_for_empty_variable(page, 1)
page_size = args["page_size"]
page_size = check_for_empty_variable(page_size, 10)
query = args.get("q")
response = client.get_whitelist_iocs(page, page_size, query)
ioc_list = response.get("data", {}).get("results", [])
ioc_list = no_result_found(ioc_list)
if isinstance(ioc_list, CommandResults):
return [ioc_list]
else:
results = []
for ioc in ioc_list:
results.append(
CommandResults(
readable_output=tableToMarkdown("Whitelist IOC", ioc, removeNull=True),
outputs_prefix="CTIX.IOC",
outputs_key_field="value",
outputs=ioc,
)
)
return results
def remove_whitelisted_ioc_command(
client: Client, args=Dict[str, Any]
) -> CommandResults:
"""
remove_whitelist_ioc: Deletes a whitelisted ioc with given id
"""
whitelist_id = argToList(args.get("ids"))
response = client.remove_whitelisted_ioc(whitelist_id)
data = response.get("data")
data = no_result_found(data)
if isinstance(data, CommandResults):
return data
else:
results = CommandResults(
readable_output=tableToMarkdown("Details", data, removeNull=True),
outputs_prefix="CTIX.RemovedIOC",
outputs_key_field="detail",
outputs=data,
raw_response=data,
)
return results
def get_threat_data_command(client: Client, args=Dict[str, Any]) -> List[CommandResults]:
"""
get_threat_data: List thread data and allow query
"""
page = args["page"]
page = check_for_empty_variable(page, 1)
page_size = args["page_size"]
page_size = check_for_empty_variable(page_size, 10)
query = args.get("query", "type=indicator")
response = client.get_threat_data(page, page_size, query)
threat_data_list = response.get("data", {}).get("results", [])
results = [data for data in threat_data_list]
results = no_result_found(results)
if isinstance(results, CommandResults):
return [results]
else:
result = iter_dbot_score(results, 'confidence_score', 'ioc_type', "Threat Data", "CTIX.ThreatData", "id")
return result
def get_saved_searches_command(client: Client, args=Dict[str, Any]) -> CommandResults:
"""
get_saved_searches: List saved search data
"""
page = args["page"]
page = check_for_empty_variable(page, 1)
page_size = args["page_size"]
page_size = check_for_empty_variable(page_size, 10)
response = client.get_saved_searches(page, page_size)
data_list = response.get("data", {}).get("results", [])
results = [data for data in data_list]
results = no_result_found(results)
if isinstance(results, CommandResults):
return results
else:
result = CommandResults(
readable_output=tableToMarkdown("Saved Search", results, removeNull=True),
outputs_prefix="CTIX.SavedSearch",
outputs_key_field="id",
outputs=results,
raw_response=results,
)
return result
def get_server_collections_command(
client: Client, args=Dict[str, Any]
) -> CommandResults:
"""
get_server_collections: List server collections
"""
page = args["page"]
page = check_for_empty_variable(page, 1)
page_size = args["page_size"]
page_size = check_for_empty_variable(page_size, 10)
response = client.get_server_collections(page, page_size)
data_list = response.get("data", {}).get("results", [])
results = [data for data in data_list]
results = no_result_found(results)
if isinstance(results, CommandResults):
return results
else:
result = CommandResults(
readable_output=tableToMarkdown("Server Collection", results, removeNull=True),
outputs_prefix="CTIX.ServerCollection",
outputs_key_field="id",
outputs=results,
raw_response=results,
)
return result
def get_actions_command(client: Client, args=Dict[str, Any]) -> CommandResults:
"""
get_actions: List Actions
"""
page = args["page"]
page = check_for_empty_variable(page, 1)
page_size = args["page_size"]
page_size = check_for_empty_variable(page_size, 10)
object_type = args.get("object_type")
action_type = args.get("actions_type")
params = {}
if action_type:
params["action_type"] = action_type
if object_type:
params["object_type"] = object_type
response = client.get_actions(page, page_size, params)
data_list = response.get("data", {}).get("results", [])
results = [data for data in data_list]
results = no_result_found(results)
if isinstance(results, CommandResults):
return results
else:
result = CommandResults(
readable_output=tableToMarkdown("Actions", results, removeNull=True),
outputs_prefix="CTIX.Action",
outputs_key_field="id",
outputs=results,
raw_response=results,
)
return result
def add_indicator_as_false_positive_command(
client: Client, args: Dict[str, str]
) -> CommandResults:
'''
Add Indicator as False Positive Command
:Description Add Indicator as False Positive for a given Indicator
:param Dict[str, str] args: Paramters to be send to in request
:return CommandResults: XSOAR based result
'''
object_ids = args.get("object_ids")
object_type = args.get("object_type", "indicator")
object_ids = argToList(object_ids)
response = client.add_indicator_as_false_positive(object_ids, object_type)
data = response.get("data")
data = no_result_found(data)
if isinstance(data, CommandResults):
return data
else:
results = CommandResults(
readable_output=tableToMarkdown("Indicator False Positive", data, removeNull=True),
outputs_prefix="CTIX.IndicatorFalsePositive",
outputs=data,
raw_response=data,
)
return results
def add_ioc_manual_review_command(
client: Client, args: Dict[str, Any]
) -> CommandResults:
'''
Add IOC for Manual Review Command
:Description Add IOC for Manual Review for a given Indicator
:param Dict[str, str] args: Paramters to be send to in request
:return CommandResults: XSOAR based result
'''
object_ids = args.get("object_ids")
object_type = args.get("object_type", "indicator")
object_ids = argToList(object_ids)
response = client.add_ioc_to_manual_review(object_ids, object_type)
data = response.get("data")
data = no_result_found(data)
if isinstance(data, CommandResults):
return data
else:
results = CommandResults(
readable_output=tableToMarkdown("IOC Manual Review", data, removeNull=True),
outputs_prefix="CTIX.IOCManualReview",
outputs=data,
raw_response=data,
)
return results
def deprecate_ioc_command(client: Client, args: dict) -> CommandResults:
"""
deprecate_ioc command: Deprecate indicators bulk api
"""
object_ids = args.get("object_ids")
object_type = args["object_type"]
object_ids = argToList(object_ids)
response = client.deprecate_ioc(object_ids, object_type)
data = response.get("data")
data = no_result_found(data)
if isinstance(data, CommandResults):
return data
else:
results = CommandResults(
readable_output=tableToMarkdown("Deprecate IOC", data, removeNull=True),
outputs_prefix="CTIX.DeprecateIOC",
outputs=data,
raw_response=data,
)
return results
def add_analyst_tlp_command(client: Client, args: dict) -> CommandResults:
'''
Add Analyst TLP Command
:Description Add Analyst TLP for a given Indicator
:param Dict[str, str] args: Paramters to be send to in request
:return CommandResults: XSOAR based result
'''
object_id = args["object_id"]
object_type = args["object_type"]
data = json.loads(args["data"])
analyst_tlp = data.get("analyst_tlp")
if not analyst_tlp:
raise DemistoException("analyst_tlp not provided")
response = client.add_analyst_tlp(object_id, object_type, data)
data = response.get("data")
data = no_result_found(data)
if isinstance(data, CommandResults):
return data
else:
results = CommandResults(
readable_output=tableToMarkdown("Add Analyst TLP", data, removeNull=True),
outputs_prefix="CTIX.AddAnalystTLP",
outputs=data,
raw_response=data,
)
return results
def add_analyst_score_command(client: Client, args: dict) -> CommandResults:
'''
Add Analyst Score Command
:Description Add Analyst Score for a given Indicator
:param Dict[str, str] args: Paramters to be send to in request
:return CommandResults: XSOAR based result
'''
object_id = args["object_id"]
object_type = args.get("object_type")
data = json.loads(args.get("data", "{}"))
analyst_tlp = data.get("analyst_score")
if not analyst_tlp:
raise DemistoException("analyst_score not provided")
response = client.add_analyst_score(object_id, object_type, data)
data = response.get("data")
data = no_result_found(data)
if isinstance(data, CommandResults):
return data
else:
results = CommandResults(
readable_output=tableToMarkdown("Add Analyst Score", data, removeNull=True),
outputs_prefix="CTIX.AddAnalystScore",
outputs=data,
raw_response=data,
)
return results
def saved_result_set_command(client: Client, args: Dict[str, Any]) -> CommandResults:
'''
Get Saved Result Set data Command
:Description Get Saved Result Set data
:param Dict[str, str] args: Paramters to be send to in request
:return CommandResults: XSOAR based result
'''
page = args["page"]
page = check_for_empty_variable(page, 1)
page_size = args["page_size"]
page_size = check_for_empty_variable(page_size, 10)
label_name = args.get("label_name", "test")
query = args.get("query", "type=indicator")
response = client.saved_result_set(page, page_size, label_name, query)
data_list = response.get("data", {}).get("results", [])
results = no_result_found(data_list)
if isinstance(results, CommandResults):
return results
else:
results = iter_dbot_score(results, 'confidence_score', 'ioc_type', "Saved Result Set", "CTIX.SavedResultSet", "id")
return results
def tag_indicator_updation_command(
client: Client, args: Dict[str, Any], operation: str
) -> CommandResults:
'''
Tag Indicator Updation Command
:Description Updating Tag of a given Indicator
:param Dict[str, str] args: Paramters to be send to in request
:return CommandResults: XSOAR based result
'''
page = args.get("page", 1)
page_size = args.get("page_size", 10)
object_id = args["object_id"]
object_type = args["object_type"]
tag_id = args["tag_id"]
query = args.get("q", {})
response = client.tag_indicator_updation(
query, page, page_size, object_id, object_type, tag_id, operation
)
data = response.get("data")
data = no_result_found(data)
if isinstance(data, CommandResults):
return data
else:
results = CommandResults(
readable_output=tableToMarkdown("Tag Indicator Updation", data, removeNull=True),
outputs_prefix="CTIX.TagUpdation",
outputs=data,
raw_response=data,
)
return results
def search_for_tag_command(client: Client, args: Dict[str, Any]) -> CommandResults:
'''
Search for Tag Command
:Description Search for Tag
:param Dict[str, str] args: Paramters to be send to in request
:return CommandResults: XSOAR based result
'''
page = args.get("page", 1)
page_size = args.get("page_size", 10)
q = args.get("q")
params = {"page": page, "page_size": page_size, "q": q}
response = client.search_for_tag(params)
data = response.get("data", {}).get('results', [])
data = no_result_found(data)
if isinstance(data, CommandResults):
return data
else:
results = CommandResults(
readable_output=tableToMarkdown("Search for Tag", data, removeNull=True),
outputs_prefix="CTIX.SearchTag",
outputs=data,
raw_response=data,
)
return results
def get_indicator_details_command(
client: Client, args: Dict[str, Any]
) -> CommandResults:
'''
Get Indicator Details Command
:Description Get Indicator Details
:param Dict[str, str] args: Paramters to be send to in request
:return CommandResults: XSOAR based result
'''
page = args.get("page", 1)
page_size = args.get("page_size", 10)
object_id = args["object_id"]
object_type = args["object_type"]
params = {"page": page, "page_size": page_size}
response = client.get_indicator_details(object_type, object_id, params)
data = response.get("data")
data = no_result_found(data)
if isinstance(data, CommandResults):
return data
else:
results = CommandResults(
readable_output=tableToMarkdown("Get Indicator Details", data, removeNull=True),
outputs_prefix="CTIX.IndicatorDetails",
outputs=data,
raw_response=data,
)
return results
def get_indicator_tags_command(client: Client, args: Dict[str, Any]) -> CommandResults:
'''
Get Indicator Tags Command
:Description Get Tags Details
:param Dict[str, str] args: Paramters to be send to in request
:return CommandResults: XSOAR based result
'''
page = args.get("page", 1)
page_size = args.get("page_size", 10)
object_id = args["object_id"]
object_type = args["object_type"]
params = {"page": page, "page_size": page_size}
response = client.get_indicator_tags(object_type, object_id, params)
data = response.get("data", {})
data = no_result_found(data)
if isinstance(data, CommandResults):
return data
else:
results = CommandResults(
readable_output=tableToMarkdown("Get Indicator Tags", data, removeNull=True),
outputs_prefix="CTIX.IndicatorTags",
outputs=data,
raw_response=data,
)
return results
def get_indicator_relations_command(
client: Client, args: Dict[str, Any]
) -> CommandResults:
'''
Get Indicator Relations Command
:Description Get Relations Details
:param Dict[str, str] args: Paramters to be send to in request
:return CommandResults: XSOAR based result
'''
page = args.get("page", 1)
page_size = args.get("page_size", 10)
object_id = args["object_id"]
object_type = args["object_type"]
params = {"page": page, "page_size": page_size}
response = client.get_indicator_relations(object_type, object_id, params)
data = response.get("data", {}).get('results', {})
data = no_result_found(data)
if isinstance(data, CommandResults):
return data
else:
results = CommandResults(
readable_output=tableToMarkdown("Get Indicator Relations", data, removeNull=True),
outputs_prefix="CTIX.IndicatorRelations",
outputs=data,
raw_response=data,
)
return results
def get_indicator_observations_command(
client: Client, args: Dict[str, Any]
) -> CommandResults:
'''
Get Indicator Observations Command
:Description Get Indicator Observations
:param Dict[str, str] args: Paramters to be send to in request
:return CommandResults: XSOAR based result
'''
page = args.get("page", 1)
page_size = args.get("page_size", 10)
object_id = args.get("object_id")
object_type = args.get("object_type")
params = {
"page": page,
"page_size": page_size,
"object_id": object_id,
"object_type": object_type,
}
response = client.get_indicator_observations(params)
data = response.get("data", {}).get('results', {})
data = no_result_found(data)
if isinstance(data, CommandResults):
return data
else:
results = CommandResults(
readable_output=tableToMarkdown("Get Indicator Observations", data, removeNull=True),
outputs_prefix="CTIX.IndicatorObservations",
outputs=data,
raw_response=data,
)
return results
def get_conversion_feed_source_command(
client: Client, args: Dict[str, Any]
) -> CommandResults:
'''
Get Conversion Feed Source Command
:Description Get Conversion Feed Source
:param Dict[str, str] args: Paramters to be send to in request
:return CommandResults: XSOAR based result
'''
page = args.get("page", 1)
page_size = args.get("page_size", 10)
object_id = args.get("object_id")
object_type = args.get("object_type")
params = {
"page": page,
"page_size": page_size,
"object_id": object_id,
"object_type": object_type,
}
q = args.get("q")
if q is not None:
params.update({"q": q})
response = client.get_conversion_feed_source(params)
data = response.get("data", []).get('results', {})
data = no_result_found(data)
if isinstance(data, CommandResults):
return data
else:
results = CommandResults(
readable_output=tableToMarkdown("Conversion Feed Source", data, removeNull=True),
outputs_prefix="CTIX.ConversionFeedSource",
outputs=data,
raw_response=data,
)
return results
def get_lookup_threat_data_command(
client: Client, args: Dict[str, Any]
) -> List[CommandResults]:
'''
Get Lookup Threat Data Command
:Description Get Lookup Threat Data
:param Dict[str, str] args: Paramters to be send to in request
:return CommandResults: XSOAR based result
'''
object_type = args.get("object_type", "indicator")
object_names = argToList(args.get("object_names"))
page_size = args.get("page_size", 10)
params = {"page_size": page_size}
response = client.get_lookup_threat_data(object_type, object_names, params)
data_set = response.get("data").get("results")
results = no_result_found(data_set)
if isinstance(results, CommandResults):
return [results]
else:
results = iter_dbot_score(results, 'confidence_score', 'ioc_type', "Lookup Data", "CTIX.ThreatDataLookup", "id")
return results
def main() -> None:
base_url = demisto.params().get("base_url")
access_id = demisto.params().get("access_id")
secret_key = demisto.params().get("secret_key")
verify = not demisto.params().get("insecure", False)
proxies = handle_proxy(proxy_param_name="proxy")
demisto.debug(f"Command being called is {demisto.command()}")
try:
client = Client(
base_url=base_url,
access_id=access_id,
secret_key=secret_key,
verify=verify,
proxies=proxies,
)
if demisto.command() == "test-module":
test_module(client)
elif demisto.command() == "ctix-create-tag":
return_results(create_tag_command(client, demisto.args()))
elif demisto.command() == "ctix-get-tags":
return_results(get_tags_command(client, demisto.args()))
elif demisto.command() == "ctix-delete-tag":
return_results(delete_tag_command(client, demisto.args()))
elif demisto.command() == "ctix-allowed-iocs":
return_results(whitelist_iocs_command(client, demisto.args()))
elif demisto.command() == "ctix-get-allowed-iocs":
return_results(get_whitelist_iocs_command(client, demisto.args()))
elif demisto.command() == "ctix-remove-allowed-ioc":
return_results(remove_whitelisted_ioc_command(client, demisto.args()))
elif demisto.command() == "ctix-get-threat-data":
return_results(get_threat_data_command(client, demisto.args()))
elif demisto.command() == "ctix-get-saved-searches":
return_results(get_saved_searches_command(client, demisto.args()))
elif demisto.command() == "ctix-get-server-collections":
return_results(get_server_collections_command(client, demisto.args()))
elif demisto.command() == "ctix-get-actions":
return_results(get_actions_command(client, demisto.args()))
elif demisto.command() == "ctix-ioc-manual-review":
return_results(add_ioc_manual_review_command(client, demisto.args()))
elif demisto.command() == "ctix-deprecate-ioc":
return_results(deprecate_ioc_command(client, demisto.args()))
elif demisto.command() == "ctix-add-analyst-tlp":
return_results(add_analyst_tlp_command(client, demisto.args()))
elif demisto.command() == "ctix-add-analyst-score":
return_results(add_analyst_score_command(client, demisto.args()))
elif demisto.command() == "ctix-saved-result-set":
return_results(saved_result_set_command(client, demisto.args()))
elif demisto.command() == "ctix-add-tag-indicator":
return_results(
tag_indicator_updation_command(
client, demisto.args(), "add_tag_indicator"
)
)
elif demisto.command() == "ctix-remove-tag-from-indicator":
return_results(
tag_indicator_updation_command(
client, demisto.args(), "remove_tag_from_indicator"
)
)
elif demisto.command() == "ctix-search-for-tag":
return_results(search_for_tag_command(client, demisto.args()))
elif demisto.command() == "ctix-get-indicator-details":
return_results(get_indicator_details_command(client, demisto.args()))
elif demisto.command() == "ctix-get-indicator-tags":
return_results(get_indicator_tags_command(client, demisto.args()))
elif demisto.command() == "ctix-get-indicator-relations":
return_results(get_indicator_relations_command(client, demisto.args()))
elif demisto.command() == "ctix-get-indicator-observations":
return_results(get_indicator_observations_command(client, demisto.args()))
elif demisto.command() == "ctix-get-conversion-feed-source":
return_results(get_conversion_feed_source_command(client, demisto.args()))
elif demisto.command() == "ctix-get-lookup-threat-data":
return_results(get_lookup_threat_data_command(client, demisto.args()))
elif demisto.command() == "ctix-add-indicator-as-false-positive":
return_results(
add_indicator_as_false_positive_command(client, demisto.args())
)
except Exception as e:
demisto.error(traceback.format_exc()) # print the traceback
return_error(
f"Failed to execute {demisto.command()} command.\nError:\n{str(e)} \
{traceback.format_exc()}"
)
if __name__ in ("__main__", "__builtin__", "builtins"):
main()
| mit | 057961a5d5ac5b9c0be9cbda67111adf | 34.556579 | 123 | 0.592995 | 3.837404 | false | false | false | false |
demisto/content | Packs/CiscoUmbrellaReporting/Integrations/CiscoUmbrellaReporting/CiscoUmbrellaReporting.py | 1 | 53522 | import demistomock as demisto
from CommonServerPython import *
from typing import Dict, List, Optional
from datetime import datetime
import requests
import urllib3
# Disable insecure warnings
urllib3.disable_warnings() # pylint: disable=no-member
''' CONSTANTS '''
DEFAULT_PAGE_SIZE = 50
DEFAULT_FROM_DATE = "-7days"
DEFAULT_TO_DATE = "now"
DEFAULT_OFFSET = 0
INTEGRATION_CONTEXT_NAME = 'UmbrellaReporting'
TOKEN_ENDPOINT = "https://management.api.umbrella.com/auth/v2/oauth2/token"
IP_PARAM = 'ip'
DOMAIN_PARAM = 'domains'
URL_PARAM = 'urls'
SHA256_PARAM = 'sha256'
INTRUSION_ACTION = 'intrusion_action'
DATE_TIME_FORMAT = '%Y-%m-%dT%H:%M:%SZ' # ISO8601 format with UTC, default in XSOAR
PAGE_NUMBER_ERROR_MSG = 'Invalid Input Error: page number should be greater than zero.'
PAGE_SIZE_ERROR_MSG = 'Invalid Input Error: page size should be greater than zero.'
INVALID_ORG_ID_ERROR_MSG = 'Authorization Error: The provided Organization ID is invalid.'
INVALID_CREDENTIALS_ERROR_MSG = 'Authorization Error: The provided credentials for Cisco Umbrella Reporting are' \
' invalid. Please provide a valid Client ID and Client Secret.'
ACTIVITY_TRAFFIC_TYPE_DICT = {
"dns": ["traffic_type", "limit", "from", "to", "offset", "domains", "ip", "verdict",
"threats", "threat_types", "identity_types", "page", "page_size"],
"proxy": ["traffic_type", "limit", "from", "to", "offset", "domains",
"ip", "verdict", "threats", "threat_types", "urls", "ports",
"identity_types", "file_name", "amp_disposition", "page", "page_size"],
"firewall": ["traffic_type", "limit", "from", "to", "offset", "ip", "ports", "verdict",
"page", "page_size"],
"intrusion": ["traffic_type", "limit", "from", "to", "offset", "ip", "ports",
"signatures", "intrusion_action", "page", "page_size"],
"ip": ["traffic_type", "limit", "from", "to", "offset", "ip", "ports", "identity_types",
"verdict", "page", "page_size"],
"amp": ["traffic_type", "limit", "from", "to", "offset", "amp_disposition", "sha256",
"page", "page_size"]
}
SUMMARY_TYPE_DICT = {
"all": ["summary_type", "limit", "from", "to", "offset", "domains", "urls", "ip",
"identity_types", "verdict", "file_name", "threats",
"threat_types", "amp_disposition", "page", "page_size", "ports"],
"category": ["summary_type", "limit", "from", "to", "offset", "domains", "urls", "ip",
"identity_types", "verdict", "file_name", "threats",
"threat_types", "amp_disposition", "page", "page_size"],
"destination": ["summary_type", "limit", "from", "to", "offset", "domains", "urls", "ip",
"identity_types", "verdict", "file_name", "threats",
"threat_types", "amp_disposition", "page", "page_size"],
"intrusion_rule": ["summary_type", "limit", "from", "to", "offset", "signatures", "ip",
"identity_types", "intrusion_action", "ports", "page",
"page_size"]
}
''' CLIENT CLASS '''
class Client(BaseClient):
"""
This Client implements API calls, and does not contain any XSOAR logic.
Should only do requests and return data.
It inherits from BaseClient defined in CommonServer Python.
Most calls use _http_request() that handles proxy, SSL verification, etc.
For this implementation, no special attributes defined
"""
def __init__(self, base_url: str, organisation_id: str,
secret_key: str, client_key: str,
verify=None,
proxy=None):
super().__init__(
base_url,
verify=verify,
proxy=proxy
)
self.token_url = TOKEN_ENDPOINT
self.secret_key = secret_key
self.client_key = client_key
self.organisation_id = organisation_id
if not self.organisation_id.isdigit():
raise DemistoException("Invalid Input Error: The Organization ID must be a number.")
def get_access_token(self):
"""
Generate Access token
Returns:
Returns the access_token
"""
payload = {
"grant_type": 'client_credentials'
}
token_response = self._http_request(
method='POST',
full_url=self.token_url,
auth=(self.client_key, self.secret_key),
data=payload,
error_handler=cisco_umbrella_access_token_error_handler
)
return token_response.get('access_token')
def query_cisco_umbrella_api(self, end_point: str, params: dict) -> Dict:
"""
Call Cisco Umbrella Reporting API
Redirection:
Umbrella stores the reporting data in geolocated data warehouses.
EU: api.eu.reports.umbrella.com
US: api.us.reports.umbrella.com
If an HTTP client request does not originate from the same continent
as the location of the Umbrella data warehouse,
the Umbrella server responds with 302 Found.
Here in first request we make an API call and if users not belongs to
same continent as the location of the Umbrella data warehouse,
the status code will be in range of (300 - 310) and in the second call we take
the redirected url from the first response header location and make a new call.
for more info see:
https://developer.cisco.com/docs/cloud-security/#!api-reference-reports-reporting-overview/http-redirects-and-request-authorization-header
Args:
end_point (str): Cisco Umbrella Reporting endpoint
params (dict): Params
Returns:
Return the raw api response from Cisco Umbrella Reporting API.
"""
result: Dict = {}
url_path = f'{self._base_url}/v2/organizations' \
f'/{self.organisation_id}/{end_point}'
access_token = self.get_access_token()
response = self._http_request(
method='GET',
full_url=url_path,
headers={'Authorization': f'Bearer {access_token}'},
params=params,
resp_type='response',
allow_redirects=False,
error_handler=cisco_umbrella_error_handler
)
if response.status_code in range(300, 310): # Redirection - explained in the function's docstring
response = self._http_request(
method='GET',
full_url=response.headers['Location'],
headers={'Authorization': f'Bearer {access_token}'},
data={}, allow_redirects=True)
if response:
result = response
else: # Success response (status code == 200)
result = response.json()
return result
''' HELPER FUNCTIONS '''
def cisco_umbrella_access_token_error_handler(response: requests.Response):
"""
Error Handler for Cisco Umbrella access_token
Args:
response (response): Cisco Umbrella Token url response
Raise:
DemistoException
"""
if response.status_code == 401:
raise DemistoException(INVALID_CREDENTIALS_ERROR_MSG)
elif response.status_code >= 400:
raise DemistoException('Error: something went wrong, please try again.')
def cisco_umbrella_error_handler(response: requests.Response):
"""
Error Handler for Cisco Umbrella
Args:
response (response): Cisco Umbrella response
Raise:
DemistoException
"""
if response.status_code >= 400:
error_message = response.json().get('data', {}).get('error')
if 'invalid organization' in error_message:
raise DemistoException(INVALID_ORG_ID_ERROR_MSG)
elif 'unauthorized' in error_message:
raise DemistoException(INVALID_CREDENTIALS_ERROR_MSG)
raise DemistoException(error_message)
def check_valid_indicator_value(indicator_type: str,
indicator_value: str) -> bool:
"""
Check the validity of indicator values
Args:
indicator_type: Indicator type provided in the command
indicator_value: Indicator value provided in the command
Returns:
True if the provided indicator values are valid
"""
if indicator_type == DOMAIN_PARAM:
indicator_value_list = argToList(indicator_value)
for domain in indicator_value_list:
if not re.match(domainRegex, domain):
raise ValueError(
f'Domain {domain} is invalid')
elif indicator_type == URL_PARAM:
indicator_value_list = argToList(indicator_value)
for url in indicator_value_list:
if not re.match(urlRegex, url):
raise ValueError(
f'URL {url} is invalid')
elif indicator_type == IP_PARAM:
if not is_ip_valid(indicator_value, accept_v6_ips=True):
raise ValueError(f'IP "{indicator_value}" is invalid')
if indicator_type == SHA256_PARAM:
if not re.match(sha256Regex, indicator_value):
raise ValueError(
f'SHA256 value {indicator_value} is invalid')
if indicator_type == INTRUSION_ACTION:
intrusion_list = argToList(indicator_value)
for intrusion in intrusion_list:
if intrusion not in ["would_block", "blocked", "detected"]:
raise ValueError("Invalid input Error: supported values for "
"intrusion_action are: 'would_block', 'blocked' and 'detected'.")
return True
def get_command_title_string(sub_context: str, page: Optional[int],
page_size: Optional[int]) -> str:
"""
Define command title
Args:
sub_context: Commands sub_context
page: page_number
page_size: page_size
Returns:
Returns the title for the readable output
"""
if page and page_size and (page > 0 and page_size > 0):
return f'{sub_context} List\nCurrent page size: {page_size}\n' \
f'Showing page {page} out of others that may exist'
return f"{sub_context} List"
def destination_lookup_to_markdown(results: List[Dict], title: str) -> str:
"""
Parsing the Cisco Umbrella Reporting data
Args:
results (list): Cisco Umbrella Reporting data
title (str): Title string
Returns:
A string representation of the markdown table
"""
destination_list = []
for destination in results:
category = [label.get('label') for label in destination.get(
'categories', [])]
new = {
'Destination': destination.get('domain', ''),
'Category': ", ".join(category),
'Allowed': destination.get('counts', {}).get('allowedrequests', ''),
'Blocked': destination.get('counts', {}).get('blockedrequests', ''),
'Requests': destination.get('counts', {}).get('requests', '')
}
destination_list.append(new)
headers = destination_list[0] if destination_list else {}
headers = list(headers.keys())
markdown = tableToMarkdown(title, destination_list, headers=headers, removeNull=True)
return markdown
def categories_lookup_to_markdown(results: List[Dict], title: str) -> str:
"""
Parsing the Cisco Umbrella Reporting data
Args:
results (list): Cisco Umbrella Reporting data
title (str): Title string
Returns:
A string representation of the markdown table
"""
categories_list = []
for category in results:
new = {
'Category': category.get('category', {}).get('label', ''),
'Type': category.get('category', {}).get('type', ''),
'Activity': category.get('count', 0)
}
categories_list.append(new)
headers = categories_list[0] if categories_list else {}
headers = list(headers.keys())
markdown = tableToMarkdown(title, categories_list, headers=headers, removeNull=True)
return markdown
def summary_lookup_to_markdown(summary: Dict, title: str) -> str:
"""
Parsing the Cisco Umbrella Reporting data
Args:
summary (dict): Cisco Umbrella Reporting data
title (str): Title string
Returns:
A string representation of the markdown table
"""
summary_list = []
new = {
'Application': summary.get('applications', 0),
'Allowed Application': summary.get('applicationsallowed', 0),
'Blocked Application': summary.get('applicationsblocked', 0),
'Category': summary.get('categories', 0),
'Domain': summary.get('domains', 0),
'File': summary.get('files', 0),
'File Type': summary.get('filetypes', 0),
'Identity': summary.get('identities', 0),
'Identity Type': summary.get('identitytypes', 0),
'Policy Category': summary.get('policycategories', 0),
'Policy Request': summary.get('policyrequests', 0),
'Request': summary.get('requests', 0),
'Allowed Request': summary.get('requestsallowed', 0),
'Blocked Request': summary.get('requestsblocked', 0)
}
summary_list.append(new)
headers = summary_list[0] if summary_list else {}
headers = list(headers.keys())
markdown = tableToMarkdown(title, summary_list, headers=headers, removeNull=True)
return markdown
def summary_category_lookup_to_markdown(results: List[Dict], title: str) -> str:
"""
Parsing the Cisco Umbrella Reporting data
Args:
results (list): Cisco Umbrella Reporting data
title (str): Title string
Returns:
A string representation of the markdown table
"""
summary_category = []
for summary_cat in results:
summary = summary_cat.get('summary', {})
new = {
'Category Type': summary_cat.get('category', {}).get('type', ''),
'Category Name': summary_cat.get('category', {}).get('label', ''),
'Application': summary.get('applications', 0),
'Allowed Application': summary.get('applicationsallowed', 0),
'Blocked Application': summary.get('applicationsblocked', 0),
'Category': summary.get('categories', 0),
'Domain': summary.get('domains', 0),
'File': summary.get('files', 0),
'File Type': summary.get('filetypes', 0),
'Identity': summary.get('identities', 0),
'Identity Type': summary.get('identitytypes', 0),
'Policy Category': summary.get('policycategories', 0),
'Policy Request': summary.get('policyrequests', 0),
'Request': summary.get('requests', 0),
'Allowed Request': summary.get('requestsallowed', 0),
'Blocked Request': summary.get('requestsblocked', 0)
}
summary_category.append(new)
headers = summary_category[0] if summary_category else {}
headers = list(headers.keys())
markdown = tableToMarkdown(title, summary_category, headers=headers, removeNull=True)
return markdown
def summary_rule_lookup_to_markdown(results: List[Dict], title: str):
"""
Parsing the Cisco Umbrella Reporting data
Args:
results (list): Cisco Umbrella Reporting data
title (str): Title string
Returns:
A string representation of the markdown table
"""
summary_rule = []
for result in results:
sigantures = result.get('signatures', [])
for sign in sigantures:
new = {
"Blocked": sign.get('counts').get('blocked'),
"Detected": sign.get('counts').get('detected'),
"Would Block": sign.get('counts').get('wouldblock'),
"Last Event": sign.get('lasteventat')
}
summary_rule.append(new)
headers = ['Blocked', 'Detected', 'Would Block', "Last Event"]
markdown = tableToMarkdown(title, summary_rule, headers=headers, removeNull=True)
return markdown
def summary_destination_lookup_to_markdown(results: List[Dict], title: str) -> str:
"""
Parsing the Cisco Umbrella Reporting data
Args:
results (list): Cisco Umbrella Reporting data
title (str): Title string
Returns:
A string representation of the markdown table
"""
summary_dest = []
for destination in results:
summary = destination.get('summary', {})
new = {
'Destination': destination.get('domain', ''),
'Application': summary.get('applications', 0),
'Allowed Application': summary.get('applicationsallowed', 0),
'Blocked Application': summary.get('applicationsblocked', 0),
'Category': summary.get('categories', 0),
'Domain': summary.get('domains', 0),
'File': summary.get('files', 0),
'File Type': summary.get('filetypes', 0),
'Identity': summary.get('identities', 0),
'Identity Type': summary.get('identitytypes', 0),
'Policy Category': summary.get('policycategories', 0),
'Policy Request': summary.get('policyrequests', 0),
'Request': summary.get('requests', 0),
'Allowed Request': summary.get('requestsallowed', int),
'Blocked Request': summary.get('requestsblocked', 0)
}
summary_dest.append(new)
headers = summary_dest[0] if summary_dest else {}
headers = list(headers.keys())
markdown = tableToMarkdown(title, summary_dest, headers=headers, removeNull=True)
return markdown
def identities_lookup_to_markdown(results: List[Dict], title: str) -> str:
"""
Parsing the Cisco Umbrella Reporting data
Args:
results (list): Cisco Umbrella Reporting data
title (str): Title string
Returns:
A string representation of the markdown table
"""
identities_list = []
for identity in results:
new = {
'Identity': identity.get('identity', {}).get('label', ''),
'Requests': identity.get('requests', 0)
}
identities_list.append(new)
headers = identities_list[0] if identities_list else {}
headers = list(headers.keys())
markdown = tableToMarkdown(title, identities_list, headers=headers, removeNull=True)
return markdown
def file_type_lookup_to_markdown(results: List[Dict], title: str) -> str:
"""
Parsing the Cisco Umbrella Reporting data
Args:
results (list): Cisco Umbrella Reporting data
title (str): Title string
Returns:
A string representation of the markdown table
"""
file_list = []
for file in results:
category = []
type_list = []
for label in file.get('categories', []):
category.append(label.get('label', ''))
type_list.append(label.get('type', ''))
new = {
'Requests': file.get('requests', ''),
'Identity Count': file.get('identitycount', ''),
'SHA256': file.get('sha256', ''),
'Category': ", ".join(category),
'Category Type': ", ".join(type_list),
'File Name': ", ".join(file.get('filenames', [])),
'File Types': ", ".join(file.get('filetypes', []))
}
file_list.append(new)
headers = file_list[0] if file_list else {}
headers = list(headers.keys())
markdown = tableToMarkdown(title, file_list, headers=headers, removeNull=True)
return markdown
def event_types_lookup_to_markdown(results: List[Dict], title: str) -> str:
"""
Parsing the Cisco Umbrella Reporting data
Args:
results (list): Cisco Umbrella Reporting data
title (str): Title string
Returns:
A string representation of the markdown table
"""
event_list = []
for event in results:
new = {
'Event Type': event.get('eventtype', ''),
'Count': event.get('count', 0)
}
event_list.append(new)
headers = event_list[0] if event_list else {}
headers = list(headers.keys())
markdown = tableToMarkdown(title, event_list, headers=headers, removeNull=True)
return markdown
def threat_lookup_to_markdown(results: List[Dict], title: str) -> str:
"""
Parsing the Cisco Umbrella Reporting data
Args:
results (list): Cisco Umbrella Reporting data
title (str): Title string
Returns:
A string representation of the markdown table
"""
threat_list = []
for threat in results:
new = {
'Threat': threat.get('threat', ''),
'Threat Type': threat.get('threattype', ''),
'Count': threat.get('count', 0)
}
threat_list.append(new)
headers = threat_list[0] if threat_list else {}
headers = list(headers.keys())
markdown = tableToMarkdown(title, threat_list, headers=headers, removeNull=True)
return markdown
def activity_build_data(activity: Dict) -> dict:
"""
Build activity data
Args:
activity (dict): Single object from cisco data
Returns:
Return activity data
"""
category = [label.get("label") for label in
activity.get("categories", [])]
identity = [label.get("label") for label in
activity.get("identities", [])]
signature_cve = activity["signature"].get("cves") if activity.get(
"signature") else []
signature_lebel = activity["signature"].get("label") if activity.get(
"signature") else ""
all_application = []
application_category = []
for application in activity.get("allapplications", []):
all_application.append(application.get("label"))
application_category.append(
application.get("category").get("label"))
timestamp = activity.get("timestamp", 0)
timestamp_string = datetime.utcfromtimestamp(
timestamp / 1000.0).strftime(DATE_TIME_FORMAT)
activity_data = {
"category": category,
"identity": identity,
"all_application": all_application,
"application_category": application_category,
"timestamp_string": timestamp_string,
"signature_cve": signature_cve,
"signature_lebel": signature_lebel
}
return activity_data
def activity_lookup_to_markdown(results: List[Dict], title: str) -> str:
"""
Parsing the Cisco Umbrella Reporting data
Args:
results (list): Cisco Umbrella Reporting data
title (str): Title string
Returns:
A string representation of the markdown table
"""
activity_list = []
for activity in results:
activity_data = activity_build_data(activity)
new = {
"Request": activity.get("type", ''),
"Identity": ", ".join(activity_data.get('identity', [])),
"Policy or Ruleset Identity": ", ".join(activity_data.get(
'identity', [])),
"Destination": activity.get("domain", ''),
"Internal IP": activity.get("internalip", ''),
"External IP": activity.get("externalip", ''),
"DNS Type": activity.get("querytype", ''),
"Action": activity.get("verdict", ''),
"Categories": ", ".join(activity_data.get('category', [])),
"Public Application": ", ".join(activity_data.get(
'all_application', [])),
"Application Category": ", ".join(activity_data.get(
'application_category', [])),
"Date & Time": activity_data.get('timestamp_string')
}
activity_list.append(new)
headers = activity_list[0] if activity_list else {}
headers = list(headers.keys())
markdown = tableToMarkdown(title, activity_list, headers=headers, removeNull=True)
return markdown
def activity_dns_lookup_to_markdown(results: List[Dict], title: str) -> str:
"""
Parsing the Cisco Umbrella Reporting data
Args:
results (list): Cisco Umbrella Reporting data
title (str): Title string
Returns:
A string representation of the markdown table
"""
activity_list = []
for activity in results:
activity_data = activity_build_data(activity)
new = {
"Identity": ", ".join(activity_data.get('identity', [])),
"Policy or Ruleset Identity": ", ".join(activity_data.get(
'identity', [])),
"Destination": activity.get("domain", ''),
"Internal IP": activity.get("internalip", ''),
"External IP": activity.get("externalip", ''),
"DNS Type": activity.get("querytype", ''),
"Action": activity.get("verdict", ''),
"Categories": ", ".join(activity_data.get('category', [])),
"Public Application": ", ".join(activity_data.get(
'all_application', [])),
"Application Category": ", ".join(activity_data.get(
'application_category', [])),
"Date & Time": activity_data.get('timestamp_string')
}
activity_list.append(new)
headers = activity_list[0] if activity_list else {}
headers = list(headers.keys())
markdown = tableToMarkdown(title, activity_list, headers=headers, removeNull=True)
return markdown
def activity_proxy_lookup_to_markdown(results: List[Dict], title: str) -> str:
"""
Parsing the Cisco Umbrella Reporting data
Args:
results (list): Cisco Umbrella Reporting data
title (str): Title string
Returns:
A string representation of the markdown table
"""
activity_list = []
for activity in results:
activity_data = activity_build_data(activity)
new = {
"Identity": ", ".join(activity_data.get('identity', [])),
"Policy or Ruleset Identity": ", ".join(activity_data.get(
'identity', [])),
"Internal IP": activity.get("internalip", ''),
"External IP": activity.get("externalip", ''),
"Action": activity.get("verdict", ''),
"Categories": ", ".join(activity_data.get('category', [])),
"Public Application": ", ".join(activity_data.get(
'all_application', [])),
"Application Category": ", ".join(activity_data.get(
'application_category', [])),
"Date & Time": activity_data.get('timestamp_string')
}
activity_list.append(new)
headers = activity_list[0] if activity_list else {}
headers = list(headers.keys())
markdown = tableToMarkdown(title, activity_list, headers=headers, removeNull=True)
return markdown
def activity_firewall_lookup_to_markdown(results: List[Dict], title: str) -> str:
"""
Parsing the Cisco Umbrella Reporting data
Args:
results (list): Cisco Umbrella Reporting data
title (str): Title string
Returns:
A string representation of the markdown table
"""
activity_list = []
for activity in results:
activity_data = activity_build_data(activity)
new = {
"Identity": ", ".join(activity_data.get('identity', [])),
"Policy or Ruleset Identity": ", ".join(activity_data.get(
'identity', [])),
"Destination IP": activity.get("destinationip", ''),
"Source IP": activity.get("sourceip", ''),
"Source Port": activity.get("sourceport", ''),
"Destination Port": activity.get("destinationport", ''),
"Protocol": activity["protocol"].get("label") if activity.get(
"protocol") else '',
"Rule": activity["rule"].get("label") if activity.get("rule") else '',
"Type": activity.get("type", ''),
"Action": activity.get("verdict", ''),
"Public Application": ", ".join(activity_data.get(
'all_application', [])),
"Direction": activity.get("direction", ''),
"Date & Time": activity_data.get('timestamp_string')
}
activity_list.append(new)
headers = activity_list[0] if activity_list else {}
headers = list(headers.keys())
markdown = tableToMarkdown(title, activity_list, headers=headers, removeNull=True)
return markdown
def activity_intrusion_lookup_to_markdown(results: List[Dict], title: str) -> str:
"""
Parsing the Cisco Umbrella Reporting data
Args:
results (list): Cisco Umbrella Reporting data
title (str): Title string
Returns:
A string representation of the markdown table
"""
activity_list = []
for activity in results:
activity_data = activity_build_data(activity)
new = {
"Identity": ", ".join(activity_data.get('identity', [])),
"Classification": activity.get("classification", ''),
"Destination IP": activity.get("destinationip", ''),
"Source IP": activity.get("sourceip", ''),
"Source Port": activity.get("sourceport", ''),
"Destination Port": activity.get("destinationport", ''),
"Protocol": activity["protocol"].get("label") if activity.get(
"protocol") else '',
"Severity": activity.get("severity", ''),
"CVE": ", ".join(activity_data.get('signature_cve', [])),
"Signature": activity_data.get('signature_lebel'),
"Type": activity.get("type", ''),
"Action": activity.get("verdict", ''),
"Date & Time": activity_data.get('timestamp_string')
}
activity_list.append(new)
headers = activity_list[0] if activity_list else {}
headers = list(headers.keys())
markdown = tableToMarkdown(title, activity_list, headers=headers, removeNull=True)
return markdown
def activity_ip_lookup_to_markdown(results: List[Dict], title: str) -> \
str:
"""
Parsing the Cisco Umbrella Reporting data
Args:
results (list): Cisco Umbrella Reporting data
title (str): Title string
Returns:
A string representation of the markdown table
"""
activity_list = []
for activity in results:
activity_data = activity_build_data(activity)
new = {
"Identity": ", ".join(activity_data.get('identity', [])),
"Destination IP": activity.get("destinationip", ''),
"Source IP": activity.get("sourceip", ''),
"Source Port": activity.get("sourceport", ''),
"Destination Port": activity.get("destinationport", ''),
"Categories": ", ".join(activity_data.get('category', [])),
"Type": activity.get("type", ''),
"Action": activity.get("verdict", ''),
"Date & Time": activity_data.get('timestamp_string')
}
activity_list.append(new)
headers = activity_list[0] if activity_list else {}
headers = list(headers.keys())
markdown = tableToMarkdown(title, activity_list, headers=headers, removeNull=True)
return markdown
def activity_amp_lookup_to_markdown(results: List[Dict], title: str) -> str:
"""
Parsing the Cisco Umbrella Reporting data
Args:
results (list): Cisco Umbrella Reporting data
title (str): Title string
Returns:
A string representation of the markdown table
"""
activity_list = []
for activity in results:
timestamp = activity.get("timestamp", 0)
timestamp_string = datetime.utcfromtimestamp(
timestamp / 1000.0).strftime(DATE_TIME_FORMAT)
new = {
"First Seen": activity.get("firstseenat", ''),
"Disposition": activity.get("disposition", ''),
"Score": activity.get("score", ''),
"Host Name": activity.get("hostname", ''),
"Malware": activity.get("malwarename", ''),
"SHA256": activity.get("sha256", ''),
"Date & Time": timestamp_string
}
activity_list.append(new)
headers = activity_list[0] if activity_list else {}
headers = list(headers.keys())
markdown = tableToMarkdown(title, activity_list, headers=headers, removeNull=True)
return markdown
def pagination(page: Optional[int], page_size: Optional[int]):
"""
Define pagination.
Args:
page: The page number.
page_size: The number of requested results per page.
Returns:
limit (int): Records per page.
offset (int): The number of records to be skipped.
"""
if page is None:
page = DEFAULT_OFFSET
elif page <= 0:
raise DemistoException(PAGE_NUMBER_ERROR_MSG)
else:
page = page - 1
if page_size is None:
page_size = DEFAULT_PAGE_SIZE
elif page_size <= 0:
raise DemistoException(PAGE_SIZE_ERROR_MSG)
limit = page_size
offset = page * page_size
return limit, offset
def create_cisco_umbrella_args(limit: Optional[int], offset: Optional[int], args: Dict) -> Dict:
"""
This function creates a dictionary of the arguments sent to the Cisco Umbrella API based on the demisto.args().
Args:
limit: Records per page.
offset: The number of records to be skipped.
args: demisto.args()
Returns:
Return arguments dict.
"""
cisco_umbrella_args: Dict = {}
if sha256 := args.get('sha256'):
check_valid_indicator_value('sha256', sha256)
if ip := args.get('ip'):
check_valid_indicator_value('ip', ip)
if domains := args.get('domains'):
check_valid_indicator_value('domains', domains)
if urls := args.get('urls'):
check_valid_indicator_value('urls', urls)
if intrusion_action := args.get('intrusion_action'):
check_valid_indicator_value('intrusion_action', intrusion_action)
max_limit = arg_to_number(args.get('limit', DEFAULT_PAGE_SIZE), arg_name='limit')
cisco_umbrella_args['limit'] = limit if limit != DEFAULT_PAGE_SIZE else max_limit
cisco_umbrella_args['offset'] = offset
cisco_umbrella_args['from'] = args.get('from', DEFAULT_FROM_DATE)
cisco_umbrella_args['to'] = args.get('to', DEFAULT_TO_DATE)
cisco_umbrella_args['threattypes'] = args.get('threat_types')
cisco_umbrella_args['identitytypes'] = args.get('identity_types')
cisco_umbrella_args['ampdisposition'] = args.get('amp_disposition')
cisco_umbrella_args['filename'] = args.get('file_name')
cisco_umbrella_args['intrusionaction'] = intrusion_action
cisco_umbrella_args['domains'] = domains
cisco_umbrella_args['urls'] = urls
cisco_umbrella_args['ip'] = ip
cisco_umbrella_args['ports'] = args.get('ports')
cisco_umbrella_args['verdict'] = args.get('verdict')
cisco_umbrella_args['threats'] = args.get('threats')
cisco_umbrella_args['signatures'] = args.get('signatures')
cisco_umbrella_args['sha256'] = sha256
return cisco_umbrella_args
''' COMMAND FUNCTIONS '''
def test_module(client: Client) -> str:
"""
Tests API connectivity and authentication
When 'ok' is returned it indicates the integration works like
it is supposed to and connection to the service is successful.
Args:
client(Client): Client class object
Returns:
Connection ok
"""
params: Dict = {
'limit': 1,
'from': '-1days',
'to': 'now',
'offset': 0
}
client.query_cisco_umbrella_api('activity', params)
return 'ok'
def get_destinations_list_command(client: Client, args: Dict[str, Any]):
"""
get_destinations_list_command: List of destinations ordered by the number of requests made in descending order.
Args:
client: Cisco Umbrella Reporting client to use.
args: all command arguments, usually passed from ``demisto.args()``.
Returns:
CommandResults: A ``CommandResults`` object that is then passed to ``return_results``, that contains an updated
result.
"""
traffic_type = args.get("traffic_type")
endpoint = f'top-destinations/{traffic_type}' if traffic_type else 'top-destinations'
page = arg_to_number(args.get('page'), arg_name='page')
page_size = arg_to_number(args.get('page_size', DEFAULT_PAGE_SIZE), arg_name='page_size')
limit, offset = pagination(page, page_size)
cisco_umbrella_args = create_cisco_umbrella_args(limit, offset, args)
title = get_command_title_string('Destination', page, page_size)
raw_json_response = client.query_cisco_umbrella_api(endpoint, cisco_umbrella_args)
data = raw_json_response.get('data', [])
if data:
readable_output = destination_lookup_to_markdown(data, title)
else:
readable_output = 'No destinations to present.\n'
return CommandResults(
readable_output=readable_output,
outputs_prefix=f'{INTEGRATION_CONTEXT_NAME}.Destination',
outputs_key_field='domain',
outputs=data
)
def get_categories_list_command(client: Client, args: Dict[str, Any]):
"""
get_categories_list_command: List of categories ordered by the number of
requests made matching the categories in descending order.
* Due to a bug in the API - the limit and page_size arguments are not supported in the get_categories_list_command.
Args:
client: Cisco Umbrella Reporting client to use.
args: all command arguments, usually passed from ``demisto.args()``.
Returns:
CommandResults: A ``CommandResults`` object that is then passed to ``return_results``, that contains an updated
result.
"""
traffic_type = args.get('traffic_type')
endpoint = f'top-categories/{traffic_type}' if traffic_type else 'top-categories'
page = arg_to_number(args.get('page'), arg_name='page')
page_size = arg_to_number(args.get('page_size', DEFAULT_PAGE_SIZE), arg_name='page_size')
limit, offset = pagination(page, page_size)
cisco_umbrella_args = create_cisco_umbrella_args(limit, offset, args)
title = get_command_title_string('Category', page, page_size)
raw_json_response = client.query_cisco_umbrella_api(endpoint, cisco_umbrella_args)
data = raw_json_response.get('data', [])
if data:
readable_output = categories_lookup_to_markdown(data, title)
else:
readable_output = 'No categories to present.\n'
return CommandResults(
readable_output=readable_output,
outputs_prefix=f'{INTEGRATION_CONTEXT_NAME}.Category',
outputs_key_field='category.id',
outputs=data
)
def get_identities_list_command(client: Client, args: Dict[str, Any]):
"""
get_identities_list_command: List of identities ordered by the number of requests they made in descending order.
Args:
client: Cisco Umbrella Reporting client to use.
args: all command arguments, usually passed from ``demisto.args()``.
Returns:
CommandResults: A ``CommandResults`` object that is then passed to ``return_results``, that contains an updated
result.
"""
traffic_type = args.get('traffic_type')
endpoint = f'top-identities/{traffic_type}' if traffic_type else 'top-identities'
page = arg_to_number(args.get('page'), arg_name='page')
page_size = arg_to_number(args.get('page_size', DEFAULT_PAGE_SIZE), arg_name='page_size')
limit, offset = pagination(page, page_size)
cisco_umbrella_args = create_cisco_umbrella_args(limit, offset, args)
title = get_command_title_string('Identities', page, page_size)
raw_json_response = client.query_cisco_umbrella_api(endpoint, cisco_umbrella_args)
data = raw_json_response.get('data', [])
if data:
readable_output = identities_lookup_to_markdown(data, title)
else:
readable_output = 'No identities to present.\n'
return CommandResults(
readable_output=readable_output,
outputs_prefix=f'{INTEGRATION_CONTEXT_NAME}.Identity',
outputs_key_field='identity.id',
outputs=data
)
def get_file_list_command(client: Client, args: Dict[str, Any]):
"""
get_file_list_command: List of files within a timeframe. Only returns proxy data.
Args:
client: Cisco Umbrella Reporting client to use.
args: all command arguments, usually passed from ``demisto.args()``.
Returns:
CommandResults: A ``CommandResults`` object that is then passed to ``return_results``, that contains an updated
result.
"""
page = arg_to_number(args.get('page'), arg_name='page')
page_size = arg_to_number(args.get('page_size', DEFAULT_PAGE_SIZE), arg_name='page_size')
limit, offset = pagination(page, page_size)
cisco_umbrella_args = create_cisco_umbrella_args(limit, offset, args)
endpoint = 'top-files'
title = get_command_title_string('File', page, page_size)
raw_json_response = client.query_cisco_umbrella_api(endpoint, cisco_umbrella_args)
data = raw_json_response.get('data', [])
if data:
readable_output = file_type_lookup_to_markdown(data, title)
else:
readable_output = 'No files to present.\n'
return CommandResults(
readable_output=readable_output,
outputs_prefix=f'{INTEGRATION_CONTEXT_NAME}.File',
outputs_key_field='sha256',
outputs=data
)
def get_threat_list_command(client: Client, args: Dict[str, Any]):
"""
get_threat_list_command: List of threats within a timeframe. Returns both DNS and Proxy data.
Args:
client: Cisco Umbrella Reporting client to use.
args: all command arguments, usually passed from ``demisto.args()``.
Returns:
CommandResults: A ``CommandResults`` object that is then passed to ``return_results``, that contains an updated
result.
"""
traffic_type = args.get('traffic_type')
endpoint = f'top-threats/{traffic_type}' if traffic_type else 'top-threats'
page = arg_to_number(args.get('page'), arg_name='page')
page_size = arg_to_number(args.get('page_size', DEFAULT_PAGE_SIZE), arg_name='page_size')
limit, offset = pagination(page, page_size)
cisco_umbrella_args = create_cisco_umbrella_args(limit, offset, args)
title = get_command_title_string('Threat', page, page_size)
raw_json_response = client.query_cisco_umbrella_api(endpoint, cisco_umbrella_args)
data = raw_json_response.get('data', [])
if data:
readable_output = threat_lookup_to_markdown(data, title)
else:
readable_output = 'No threats to present.\n'
return CommandResults(
readable_output=readable_output,
outputs_prefix=f'{INTEGRATION_CONTEXT_NAME}.Threat',
outputs_key_field='threat',
outputs=data
)
def get_event_types_list_command(client: Client, args: Dict[str, Any]):
"""
get_event_types_list_command: List of event types ordered by the number
of requests made for each type of event in descending order.
Args:
client: Cisco Umbrella Reporting client to use.
args: all command arguments, usually passed from ``demisto.args()``.
Returns:
CommandResults: A ``CommandResults`` object that is then passed to ``return_results``, that contains an updated
result.
"""
page = arg_to_number(args.get('page'), arg_name='page')
page_size = arg_to_number(args.get('page_size', DEFAULT_PAGE_SIZE), arg_name='page_size')
limit, offset = pagination(page, page_size)
cisco_umbrella_args = create_cisco_umbrella_args(limit, offset, args)
endpoint = 'top-eventtypes'
title = get_command_title_string('Event Type', page, page_size)
raw_json_response = client.query_cisco_umbrella_api(endpoint, cisco_umbrella_args)
data = raw_json_response.get('data', [])
if data:
readable_output = event_types_lookup_to_markdown(data, title)
else:
readable_output = 'No event types to present.\n'
return CommandResults(
readable_output=readable_output,
outputs_prefix=f'{INTEGRATION_CONTEXT_NAME}.EventType',
outputs_key_field='eventtype',
outputs=data
)
def get_activity_list_command(client: Client, args: Dict[str, Any]):
"""
get_activity_list_command: List all activity entries (dns/proxy/firewall/ip/intrusion/amp) within timeframe.
Args:
client: Cisco Umbrella Reporting client to use.
args: all command arguments, usually passed from ``demisto.args()``.
Returns:
CommandResults: A ``CommandResults`` object that is then passed to ``return_results``, that contains an updated
result.
"""
page = arg_to_number(args.get('page'), arg_name='page')
page_size = arg_to_number(args.get('page_size', DEFAULT_PAGE_SIZE), arg_name='page_size')
limit, offset = pagination(page, page_size)
cisco_umbrella_args = create_cisco_umbrella_args(limit, offset, args)
endpoint = 'activity'
title = get_command_title_string('Activity', page, page_size)
raw_json_response = client.query_cisco_umbrella_api(endpoint, cisco_umbrella_args)
data = raw_json_response.get('data', [])
if data:
readable_output = activity_lookup_to_markdown(data, title)
else:
readable_output = 'No activities to present.\n'
return CommandResults(
readable_output=readable_output,
outputs_prefix=f'{INTEGRATION_CONTEXT_NAME}.Activity',
outputs_key_field='domain',
outputs=data,
)
def get_activity_by_traffic_type_command(client: Client, args: Dict[str, Any]):
"""
get_activity_by_traffic_type_command: List all entries within a timeframe
based on the activity type selected. Valid activity types are dns,
proxy, firewall, intrusion, ip, amp.
Args:
client: Cisco Umbrella Reporting client to use.
args: all command arguments, usually passed from ``demisto.args()``.
Returns:
CommandResults: A ``CommandResults`` object that is then passed
to ``return_results``, that contains an updated result.
"""
traffic_type = args.get('traffic_type')
if traffic_type:
endpoint = 'activity/amp-retrospective' if traffic_type == 'amp'\
else f'activity/{traffic_type}'
else:
raise DemistoException("Please select a traffic type.")
markdown_function = {
'dns': activity_dns_lookup_to_markdown,
'proxy': activity_proxy_lookup_to_markdown,
'firewall': activity_firewall_lookup_to_markdown,
'ip': activity_ip_lookup_to_markdown,
'intrusion': activity_intrusion_lookup_to_markdown,
'amp': activity_amp_lookup_to_markdown
}
context_output_name = {
'dns': 'ActivityDns',
'proxy': 'ActivityProxy',
'firewall': 'ActivityFirewall',
'intrusion': 'ActivityIntrusion',
'ip': 'ActivityIP',
'amp': 'ActivityAMPRetro'
}
traffic_type_params_list = ACTIVITY_TRAFFIC_TYPE_DICT[traffic_type]
if not set(args.keys()).issubset(traffic_type_params_list):
raise DemistoException(
f"Invalid optional parameter is selected for traffic type {traffic_type}.\n"
f"Supported optional parameters for {traffic_type} traffic type are:"
f" {', '.join(traffic_type_params_list)}.")
page = arg_to_number(args.get('page'), arg_name='page')
page_size = arg_to_number(args.get('page_size', DEFAULT_PAGE_SIZE), arg_name='page_size')
limit, offset = pagination(page, page_size)
cisco_umbrella_args = create_cisco_umbrella_args(limit, offset, args)
title = get_command_title_string(f'{traffic_type.capitalize()} Activity', page, page_size)
raw_json_response = client.query_cisco_umbrella_api(endpoint, cisco_umbrella_args)
data = raw_json_response.get('data', [])
if data:
readable_output = markdown_function[traffic_type](data, title)
else:
readable_output = f'No {traffic_type} activities to present.\n'
return CommandResults(
readable_output=readable_output,
outputs_prefix=f'{INTEGRATION_CONTEXT_NAME}.{context_output_name[traffic_type]}',
outputs_key_field='',
outputs=data,
)
def get_summary_list_command(client: Client, args: Dict[str, Any]):
"""
get_summary_list_command: Get the summary.
Args:
client: Cisco Umbrella Reporting client to use.
args: all command arguments, usually passed from ``demisto.args()``.
Returns:
CommandResults: A ``CommandResults`` object that is then passed
to ``return_results``, that contains an updated result.
"""
summary_outputs_key_field = {
'category': 'category.id',
'destination': 'domain',
'intrusion_rule': 'signaturelist.id'
}
summary_endpoint_dict = {
'category': 'summaries-by-category',
'destination': 'summaries-by-destination',
'intrusion_rule': 'summaries-by-rule/intrusion'
}
summary_markdown_dict = {
'category': summary_category_lookup_to_markdown,
'destination': summary_destination_lookup_to_markdown,
'intrusion_rule': summary_rule_lookup_to_markdown
}
context_output_name = {
'category': 'SummaryWithCategory',
'destination': 'SummaryWithDestination',
'intrusion_rule': 'SignatureListSummary'
}
summary_type = args.get('summary_type', '')
endpoint = summary_endpoint_dict.get(summary_type, 'summary')
category_type_param_list = SUMMARY_TYPE_DICT.get(summary_type,
SUMMARY_TYPE_DICT['all'])
if not set(args.keys()).issubset(category_type_param_list):
raise DemistoException(
f"Invalid optional parameter is selected for summary type {summary_type}.\n"
f"Supported optional parameters for {summary_type} summary type are:"
f" {', '.join(category_type_param_list)}.")
page = arg_to_number(args.get('page'), arg_name='page')
page_size = arg_to_number(args.get('page_size', DEFAULT_PAGE_SIZE), arg_name='page_size')
limit, offset = pagination(page, page_size)
cisco_umbrella_args = create_cisco_umbrella_args(limit, offset, args)
raw_json_response = client.query_cisco_umbrella_api(endpoint, cisco_umbrella_args)
if summary_type:
data = raw_json_response.get('data', [])
title = get_command_title_string(
f"Summary with "
f"{summary_type.split('_')[0].capitalize()}", page, page_size)
if data:
readable_output = summary_markdown_dict[summary_type](data, title)
else:
readable_output = f'No {summary_type} summary to present.\n'
return CommandResults(
readable_output=readable_output,
outputs_prefix=f'{INTEGRATION_CONTEXT_NAME}.{context_output_name[summary_type]}',
outputs_key_field=f'{summary_outputs_key_field[summary_type]}',
outputs=data
)
else:
data = raw_json_response.get('data', {})
title = get_command_title_string("Summary", page, page_size)
if data:
readable_output = summary_lookup_to_markdown(data, title)
else:
readable_output = 'No summary to present.\n'
return CommandResults(
readable_output=readable_output,
outputs_prefix=f'{INTEGRATION_CONTEXT_NAME}.Summary',
outputs_key_field='',
outputs=data
)
def main():
"""
PARSE AND VALIDATE INTEGRATION PARAMS
"""
args = demisto.args()
command = demisto.command()
params = demisto.params()
secret_key = params.get('credentials', {}).get('password')
client_key = params.get('credentials', {}).get('identifier')
organisation_id = params.get('organization_id')
# get the service API url
base_url = params.get("api_url")
proxy = params.get('proxy', False)
handle_proxy()
verify_certificate = not params.get('insecure', False)
demisto.debug(f'Command being called is {command}')
try:
client = Client(
base_url=base_url,
organisation_id=organisation_id,
secret_key=secret_key,
client_key=client_key,
proxy=proxy,
verify=verify_certificate
)
commands = {
"umbrella-reporting-destination-list":
get_destinations_list_command,
"umbrella-reporting-category-list":
get_categories_list_command,
"umbrella-reporting-identity-list":
get_identities_list_command,
"umbrella-reporting-event-type-list":
get_event_types_list_command,
"umbrella-reporting-file-list":
get_file_list_command,
"umbrella-reporting-threat-list":
get_threat_list_command,
"umbrella-reporting-activity-list":
get_activity_list_command,
"umbrella-reporting-activity-get":
get_activity_by_traffic_type_command,
"umbrella-reporting-summary-list":
get_summary_list_command
}
if command == "test-module":
return_results(test_module(client))
elif command in commands:
return_results(commands[command](client, args))
else:
raise NotImplementedError
# Log exceptions
except Exception as e:
return_error(
f'Failed to execute {command} command. Error: {str(e)}')
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | 651af63fcacffeab31726edfd4790f57 | 38.763744 | 147 | 0.61556 | 3.93602 | false | false | false | false |
demisto/content | Packs/CounterCraft/Integrations/CounterCraft/CounterCraft.py | 2 | 24930 | import demistomock as demisto
from CommonServerPython import *
""" IMPORTS """
import json
import urllib.parse
import requests
import hmac
from datetime import datetime
import time
import binascii
import uuid
from typing import Dict, List
from hashlib import sha1
# Disable insecure warnings
requests.packages.urllib3.disable_warnings()
""" GLOBALS"""
X_API_KEY_AUTHENTICATION_HEADER_PREFIX = "X-API-Key-Auth-"
SERVER = demisto.params().get("server").rstrip("/") + "/api"
API_KEY = demisto.params().get("api_key")
SECRET_KEY = demisto.params().get("secret_key")
VERIFY_CERTIFICATE = not demisto.params().get("insecure", False)
PROXY = demisto.params().get('proxy', False)
FETCH_DELTA = "24 hours"
DEF_HEADERS = {"Accept": "application/json", "Content-Type": "application/json"}
ERR_DICT = {
400: 'Bad request. Please check your arguments and Deception Director API manual',
401: 'User does not have the right permission',
404: 'Entity not found. Please make sure the entity does exist',
500: 'Bad request. Please check your arguments and Deception Director API manual',
}
CAMPAIGN_FIELDS = {
"id": "ID",
"name": "Name",
"description": "Description",
"status_code": "StatusCode",
}
DSN_FIELDS = {
"id": "ID",
"name": "Name",
"description": "Description",
"hostname": "Hostname",
"port": "Port",
}
HOST_FIELDS = {
"id": "ID",
"name": "Name",
"description": "Description",
"type_code": "TypeCode",
"status_code": "StatusCode",
}
SERVICE_FIELDS = {
"id": "ID",
"name": "Name",
"description": "Description",
"type_code": "TypeCode",
"status_code": "StatusCode",
}
BREADCRUMB_FIELDS = {
"id": "ID",
"name": "Name",
"description": "Description",
"type_code": "TypeCode",
"status_code": "StatusCode",
}
PROVIDER_FIELDS = {
"id": "ID",
"name": "Name",
"description": "Description",
"type_code": "TypeCode",
"status_code": "StatusCode",
}
INCIDENT_FIELDS = {
"id": "ID",
"name": "Name",
"description": "Description",
"status_code": "StatusCode",
"tlp_code": "TLPCode",
}
OBJECT_FIELDS = {
"id": "ID",
"value": "Value",
"hits": "Hits",
"score": "Score",
"type_code": "TypeCode",
"first_seen": "FirstSeen",
"last_seen": "LastSeen",
"events_count": "EventsCount",
"tags": "Tags",
}
EVENT_FIELDS = {
"id": "ID",
"campaign_name": "CampaignName",
"category_code": "CategoryCode",
"host_name": "HostName",
"service_name": "ServiceName",
"event_date": "EventDate",
"score": "Score",
"type_code": "TypeCode",
"data": "Data",
"tags": "Tags",
}
""" HELPERS """
def sign(secret_key, data):
"""
@param $secret_key the secret key to use for the HMAC-SHA digesting
@param $data the string to sign
@return string base64 encoding of the HMAC-SHA1 hash of the data parameter using {@code secret_key} as cipher key.
"""
sha1_hash = hmac.new(secret_key.encode(), data.encode(), sha1)
return binascii.b2a_base64(sha1_hash.digest())[
:-1
] # strip \n from base64 string result
def get_signature(request_method, request_headers, path, query_string, private_key):
"""
Calculate the authentication headers to be sent with a request to the API
@param $request_method the HTTP method (GET, POST, etc.)
@param $path the urlencoded string including the path (from the first forward slash) and the parameters
@param $x_headers HTTP headers specific to CounterCraft API
@return array a map with the Authorization and Date headers needed to sign a Latch API request
"""
x_headers = {
k: v
for k, v in request_headers.items()
if k.lower().startswith(X_API_KEY_AUTHENTICATION_HEADER_PREFIX.lower())
}
string_to_sign = (
request_method.upper().strip()
+ "\n"
+ get_serialized_headers(x_headers)
+ "\n"
+ (
path
if query_string.strip() == b""
else "%s?%s" % (path, query_string.strip().decode("utf-8"))
)
)
return sign(private_key, string_to_sign).decode("utf-8")
def get_serialized_headers(x_headers):
"""
Generate a string ready to be signed based on HTTP headers received
@param $x_headers a non neccesarily ordered map (array without duplicates) of the HTTP headers to be ordered.
@return string The serialized headers, an empty string if no headers are passed
"""
res = {} # type: Dict[str, List]
for k, v in x_headers.items():
if not k.strip().lower() in res:
res[k.lower()] = []
res[k.strip().lower()].append(v.strip())
return "\n".join(
"%s:%s" % (k, v)
for k, v in sorted({k: ",".join(v) for k, v in res.items()}.items())
)
def http_request(request_method, path, data={}, params=""):
"""
Send an HTTP request
@param $request_method the request method GET, POST, etc.
@param $path the HTTP path
@param $data the data included in the POST request
@param $params the URL params to be included
@return string A dict containing the response in JSON format or an error
"""
headers = {
X_API_KEY_AUTHENTICATION_HEADER_PREFIX
+ "Date": time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime())
}
headers.update(DEF_HEADERS)
signature = get_signature(
request_method,
headers,
path,
urllib.parse.urlencode(params).encode("utf-8"),
SECRET_KEY,
)
headers["Authorization"] = "APIKey %s:%s" % (API_KEY, signature)
url = SERVER + path
proxies = None
if PROXY:
proxies = handle_proxy()
res = requests.request(
request_method,
url,
data=json.dumps(data),
params=params,
headers=headers,
verify=VERIFY_CERTIFICATE,
proxies=proxies
)
if res.status_code not in [200, 201, 204]:
demisto.debug("Error doing the HTTP query. We got a %s: %s" % (res.status_code, res.text))
return_error(ERR_DICT[res.status_code])
try:
res_json = res.json()
return res_json
except Exception as ex:
demisto.debug(str(ex))
return_error(str(ex))
def return_host_standard_context(host):
host_standard_context = {}
if host['type_code'] == 'MACHINE':
host_standard_context["ID"] = host['uuid']
host_standard_context["IP"] = host['data']['ip_address']
if 'ansible_facts' in host['data']:
host_standard_context["Domain"] = host['data']['ansible_facts']['ansible_domain']
host_standard_context["Hostname"] = host['data']['ansible_facts']['ansible_hostname']
host_standard_context["BIOSVersion"] = host['data']['ansible_facts']['ansible_bios_version']
host_standard_context["Memory"] = host['data']['ansible_facts']['ansible_memtotal_mb']
host_standard_context["Model"] = host['data']['ansible_facts']['ansible_product_name']
host_standard_context["OS"] = host['data']['ansible_facts']['ansible_os_family']
host_standard_context["OSVersion"] = host['data']['ansible_facts']['ansible_distribution_version']
host_standard_context["Processor"] = ' '.join(host['data']['ansible_facts']['ansible_processor'])
host_standard_context["Processors"] = len(host['data']['ansible_facts']['ansible_processor'])
return host_standard_context
def return_entry_results(title, content, human_readable, context, headers):
"""
Generic function that receives a result json, and turns it into an entryObject
@param $title the table title
@param $content the object contents in JSON format
@param $human_readable human readable data for the table
@param $context the entry context
@param $headers table headers
@return dict the entryObject
"""
if len(content) == 0:
return_outputs(
readable_output="There is no output result",
outputs=context,
raw_response=content,
)
return
if headers:
if isinstance(headers, str):
headers = headers.split(",")
else:
if isinstance(content, dict):
headers = list(set(headers).intersection(set(content.keys())))
readable_output = tableToMarkdown(
title,
human_readable,
headers,
lambda h: h.title().replace("_", " ").replace(".", ":"),
)
return_outputs(
readable_output=readable_output, outputs=context, raw_response=content
)
""" COMMANDS """
def test_module_command():
http_request("GET", "/campaigns")
demisto.results("ok")
def list_dsns_command():
"""
Retrieve all deception support nodes
"""
res = http_request("GET", "/deception_support_nodes")
new_dsns = []
for o in res["data"]:
new_dsns.append(
{
new_key: o[old_key] if old_key in o else None
for old_key, new_key in DSN_FIELDS.items()
}
)
context = createContext(new_dsns, removeNull=True)
return_entry_results(
"Deception Support Node",
res["data"],
new_dsns,
{"CounterCraft.DSN(val.ID && val.ID === obj.ID)": context},
headers=["ID", "Name", "Description", "Hostname", "Port"],
)
def list_providers_command():
"""
Retrieve all providers
"""
res = http_request("GET", "/providers")
new_providers = []
for o in res["data"]:
new_providers.append(
{
new_key: o[old_key] if old_key in o else None
for old_key, new_key in PROVIDER_FIELDS.items()
}
)
context = createContext(new_providers, removeNull=True)
return_entry_results(
"Providers",
res["data"],
new_providers,
{"CounterCraft.Provider(val.ID && val.ID === obj.ID)": context},
headers=["ID", "Name", "Description", "TypeCode", "StatusCode"],
)
def list_campaigns_command():
"""
Retrieve all campaigns
"""
name = demisto.args().get("name")
if name is not None:
criteria = {"criteria": f"name:\"{name}\""}
else:
criteria = {}
res = http_request("GET", "/campaigns", params=criteria)
new_campaigns = []
for o in res["data"]:
new_campaigns.append(
{
new_key: o[old_key] if old_key in o else None
for old_key, new_key in CAMPAIGN_FIELDS.items()
}
)
context = createContext(new_campaigns, removeNull=True)
return_entry_results(
"Campaigns",
res["data"],
new_campaigns,
{"CounterCraft.Campaign(val.ID && val.ID === obj.ID)": context},
headers=["ID", "Name", "Description", "StatusCode"],
)
def list_hosts_command():
"""
Retrieve all hosts
"""
campaign_id = demisto.args().get("campaign_id")
if campaign_id is not None:
criteria = {"criteria": "campaigns.id:" + campaign_id}
else:
criteria = {}
res = http_request("GET", "/hosts", params=criteria)
new_hosts = []
for o in res["data"]:
new_hosts.append(
{
new_key: o[old_key] if old_key in o else None
for old_key, new_key in HOST_FIELDS.items()
}
)
context = createContext(new_hosts, removeNull=True)
contextHosts = createContext([return_host_standard_context(x) for x in res["data"]], removeNull=True)
return_entry_results(
"Hosts",
res["data"],
new_hosts,
{"CounterCraft.Host(val.ID && val.ID === obj.ID)": context, "Host(val.IP && val.IP === obj.IP)": contextHosts},
headers=["ID", "Name", "Description", "StatusCode", "TypeCode"],
)
def list_services_command():
"""
Retrieve all services
"""
host_id = demisto.args().get("host_id")
if host_id is not None:
criteria = {"criteria": "hosts.id:" + host_id}
else:
criteria = {}
res = http_request("GET", "/services", params=criteria)
new_services = []
for o in res["data"]:
new_services.append(
{
new_key: o[old_key] if old_key in o else None
for old_key, new_key in SERVICE_FIELDS.items()
}
)
context = createContext(new_services, removeNull=True)
return_entry_results(
"Services",
res["data"],
new_services,
{"CounterCraft.Service(val.ID && val.ID === obj.ID)": context},
headers=["ID", "Name", "Description", "StatusCode", "TypeCode"],
)
def list_breadcrumbs_command():
"""
Retrieve all breadcrumbs
"""
campaign_id = demisto.args().get("campaign_id")
if campaign_id is not None:
criteria = {"criteria": "campaigns.id:" + campaign_id}
else:
criteria = {}
res = http_request("GET", "/breadcrumbs", params=criteria)
new_breadcrumbs = []
for o in res["data"]:
new_breadcrumbs.append(
{
new_key: o[old_key] if old_key in o else None
for old_key, new_key in BREADCRUMB_FIELDS.items()
}
)
context = createContext(new_breadcrumbs, removeNull=True)
return_entry_results(
"Breadcrumbs",
res["data"],
new_breadcrumbs,
{"CounterCraft.Breadcrumb(val.ID && val.ID === obj.ID)": context},
headers=["ID", "Name", "Description", "StatusCode", "TypeCode"],
)
def list_incidents_command():
"""
Retrieve all incidents
"""
campaign_id = demisto.args().get("campaign_id")
if campaign_id is not None:
criteria = {"criteria": "campaigns.id:" + campaign_id}
else:
criteria = {}
res = http_request("GET", "/incidents", params=criteria)
new_incidents = []
for o in res["data"]:
new_incidents.append(
{
new_key: o[old_key] if old_key in o else None
for old_key, new_key in INCIDENT_FIELDS.items()
}
)
context = createContext(new_incidents, removeNull=True)
return_entry_results(
"Incidents",
res["data"],
new_incidents,
{"CounterCraft.Incident(val.ID && val.ID === obj.ID)": context},
headers=["ID", "Name", "Description", "StatusCode", "TLPCode", "Tags"],
)
def get_object_command():
"""
Retrieve all objects
"""
value = demisto.args().get("value")
if value is not None:
criteria = {"criteria": "objects.value:" + value}
else:
criteria = {}
res = http_request("GET", "/objects", params=criteria)
for entry in res["data"]:
entry["first_seen"] = formatEpochDate(entry["first_seen"])
entry["last_seen"] = formatEpochDate(entry["last_seen"])
new_objects = []
for o in res["data"]:
new_objects.append(
{
new_key: o[old_key] if old_key in o else None
for old_key, new_key in OBJECT_FIELDS.items()
}
)
context = createContext(new_objects, removeNull=True)
return_entry_results(
"Objects",
res["data"],
new_objects,
{"CounterCraft.Object(val.ID && val.ID === obj.ID)": context},
headers=[
"ID",
"Value",
"Hits",
"EventsCount",
"TypeCode",
"Score",
"FirstSeen",
"LastSeen",
"Tags",
],
)
def get_events_command():
"""
Retrieve all events
"""
sfilter = demisto.args().get("criteria")
per_page = demisto.args().get("max_results")
criteria = {
"criteria": sfilter,
"order": "-event_date",
"page": 1,
"per_page": per_page,
}
res = http_request("GET", "/events", params=criteria)
for entry in res["data"]:
entry["event_date"] = formatEpochDate(entry["event_date"])
new_events = []
for o in res["data"]:
new_events.append(
{
new_key: o[old_key] if old_key in o else None
for old_key, new_key in EVENT_FIELDS.items()
}
)
context = createContext(new_events, removeNull=True)
return_entry_results(
"Events",
res["data"],
new_events,
{"CounterCraft.Event(val.ID && val.ID === obj.ID)": context},
headers=[
"ID",
"CampaignName",
"CategoryCode",
"HostName",
"ServiceName",
"EventDate",
"Score",
"TypeCode",
"Data",
"Tags",
],
)
def list_notifications(last_fetched):
"""
Retrieve all Notifications
"""
criteria = {
"criteria": 'plugin_code:CONSOLE AND notifications.ctime:["%s" TO *]'
% last_fetched,
"order": "-ctime",
"with_stats": True,
}
res = http_request("GET", "/notifications", params=criteria)
return res["data"]
def create_campaign_command():
"""
Create a campaign
"""
name = demisto.args().get("name")
description = demisto.args().get("description")
data = {"name": name, "description": description}
res = http_request("POST", "/campaigns", data=data)
campaign = {
new_key: res[old_key] if old_key in res else None
for old_key, new_key in CAMPAIGN_FIELDS.items()
}
context = createContext(campaign, removeNull=True)
return_entry_results(
"Campaign",
res,
campaign,
{"CounterCraft.Campaign(val.ID && val.ID === obj.ID)": context},
headers=["ID", "Name", "Description", "StatusCode"],
)
def manage_campaign_command():
"""
Operate a campaign
"""
campaign_id = demisto.args().get("campaign_id")
operation = demisto.args().get("operation")
data = {"action": operation}
res = http_request("PATCH", "/campaigns/" + campaign_id, data=data)
message = [{"ID": campaign_id, "Message": res["message"]}]
context = createContext(message, removeNull=True)
return_entry_results(
"Campaign Management",
res,
message,
{"CounterCraft.Campaign(val.ID && val.ID === obj.ID)": context},
headers=["ID", "Message"],
)
def create_host_machine_command():
"""
Create a host of type MACHINE
"""
name = demisto.args().get("name")
description = demisto.args().get("description")
provider_id = demisto.args().get("provider_id")
type_code = "MACHINE"
deception_support_node_id = demisto.args().get("deception_support_node_id")
campaign_id = demisto.args().get("campaign_id")
ip_address = demisto.args().get("ip_address")
port = demisto.args().get("port")
username = demisto.args().get("username")
password = demisto.args().get("password")
host_data = {
"ip_address": ip_address,
"port": port,
"username": username,
"password": password,
"os_family_usr": "linux",
}
_uuid = str(uuid.uuid4())
data = {
"name": name,
"description": description,
"provider_id": provider_id,
"deception_support_node_id": deception_support_node_id,
"campaign_id": campaign_id,
"type_code": type_code,
"uuid": _uuid,
"data": host_data,
}
res = http_request("POST", "/hosts", data=data)
host = {
new_key: res[old_key] if old_key in res else None
for old_key, new_key in HOST_FIELDS.items()
}
context = createContext(host, removeNull=True)
return_entry_results(
"Hosts",
res,
host,
{"CounterCraft.Host(val.ID && val.ID === obj.ID)": context},
headers=["ID", "Name", "Description", "StatusCode", "TypeCode"],
)
def manage_host_command():
"""
Operate a host
"""
host_id = demisto.args().get("host_id")
operation = demisto.args().get("operation")
data = {"action": operation}
res = http_request("PATCH", "/hosts/" + host_id, data=data)
message = {"ID": host_id, "Message": res["message"]}
context = createContext(message, removeNull=True)
return_entry_results(
"Host Management",
res,
message,
{"CounterCraft.Host(val.ID && val.ID === obj.ID)": context},
headers=["ID", "Message"],
)
def manage_service_command():
"""
Operate a service
"""
service_id = demisto.args().get("service_id")
operation = demisto.args().get("operation")
data = {"action": operation}
res = http_request("PATCH", "/services/" + service_id, data=data)
message = {"ID": service_id, "Message": res["message"]}
context = createContext(message, removeNull=True)
return_entry_results(
"Service Management",
res,
message,
{"CounterCraft.Service(val.ID && val.ID === obj.ID)": context},
headers=["ID", "Message"],
)
def manage_breadcrumb_command():
"""
Operate a breadcrumb
"""
breadcrumb_id = demisto.args().get("breadcrumb_id")
operation = demisto.args().get("operation")
data = {"action": operation}
res = http_request("PATCH", "/breadcrumbs/" + breadcrumb_id, data=data)
message = {"ID": breadcrumb_id, "Message": res["message"]}
context = createContext(message, removeNull=True)
return_entry_results(
"Breadcrumb Management",
res,
message,
{"CounterCraft.Breadcrumb(val.ID && val.ID === obj.ID)": context},
headers=["ID", "Message"],
)
def fetch_incidents_command():
"""
Fetch incidents (user notifications)
"""
last_run = demisto.getLastRun()
if not last_run:
last_run = {}
if "time" not in last_run:
# get timestamp in seconds
timestamp, _ = parse_date_range(FETCH_DELTA, to_timestamp=True)
timestamp /= 1000
else:
timestamp = last_run["time"]
max_timestamp = timestamp
# All alerts retrieved from get_alerts are newer than last_fetch and are in a chronological order
notifications = list_notifications(timestamp)
incidents = []
for notification in notifications:
if int(notification["ctime"]) > timestamp:
incidents.append(
{
"name": notification["data"]["subject"],
"occurred": datetime.utcfromtimestamp(
int(notification["ctime"])
).strftime("%Y-%m-%dT%H:%M:%SZ"),
"details": notification["data"]["html"],
"rawJSON": json.dumps(notification),
}
)
if int(notification["ctime"]) > max_timestamp:
max_timestamp = int(notification["ctime"])
demisto.incidents(incidents)
demisto.setLastRun({"time": max_timestamp})
def main():
try:
if demisto.command() == "test-module":
test_module_command()
elif demisto.command() == "countercraft-list-providers":
list_providers_command()
elif demisto.command() == "countercraft-list-dsns":
list_dsns_command()
elif demisto.command() == "countercraft-list-campaigns":
list_campaigns_command()
elif demisto.command() == "countercraft-list-hosts":
list_hosts_command()
elif demisto.command() == "countercraft-list-services":
list_services_command()
elif demisto.command() == "countercraft-list-breadcrumbs":
list_breadcrumbs_command()
elif demisto.command() == "countercraft-list-incidents":
list_incidents_command()
elif demisto.command() == "countercraft-get-object":
get_object_command()
elif demisto.command() == "countercraft-get-events":
get_events_command()
elif demisto.command() == "countercraft-create-campaign":
create_campaign_command()
elif demisto.command() == "countercraft-create-host-machine":
create_host_machine_command()
elif demisto.command() == "countercraft-manage-campaign":
manage_campaign_command()
elif demisto.command() == "countercraft-manage-host":
manage_host_command()
elif demisto.command() == "countercraft-manage-service":
manage_service_command()
elif demisto.command() == "countercraft-manage-breadcrumb":
manage_breadcrumb_command()
elif demisto.command() == "fetch-incidents":
fetch_incidents_command()
except Exception as e:
return_error(
"Unable to perform command : {}, Reason: {}".format(demisto.command(), e)
)
if __name__ in ("__main__", "builtin", "builtins"):
main()
| mit | 625fb6d20cbd952816fffc6a0cb27a2d | 26.425743 | 119 | 0.572282 | 3.741558 | false | false | false | false |
demisto/content | Packs/DB2/Integrations/DB2/DB2.py | 2 | 12942 | import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
"""Integration for DB2"""
import re
import traceback
from typing import Any, Callable, Dict, List, Tuple
from urllib.parse import parse_qsl
import ibm_db
import urllib3
# Disable insecure warnings
urllib3.disable_warnings()
""" CONSTANTS """
PROTOCOL = "TCPIP"
DRIVER_NAME = "{IBM DB2 ODBC DRIVER}"
CLEAN = re.compile(r"\[.*?\]")
COLON_REGEX = re.compile(r"(?<![:\w\x5c]):(\w+)(?!:)", re.UNICODE)
# ============== DB2 Connection Options ===============
OPTIONS = {
"ATTR_CASE": ibm_db.ATTR_CASE,
"SQL_ATTR_AUTOCOMMIT": ibm_db.SQL_ATTR_AUTOCOMMIT,
"SQL_ATTR_INFO_USERID": ibm_db.SQL_ATTR_INFO_USERID,
"SQL_ATTR_CURSOR_TYPE": ibm_db.SQL_ATTR_CURSOR_TYPE,
"SQL_ATTR_INFO_ACCTSTR": ibm_db.SQL_ATTR_INFO_ACCTSTR,
"SQL_ATTR_INFO_APPLNAME": ibm_db.SQL_ATTR_INFO_APPLNAME,
"SQL_ATTR_CURRENT_SCHEMA": ibm_db.SQL_ATTR_CURRENT_SCHEMA,
"SQL_ATTR_INFO_WRKSTNNAME": ibm_db.SQL_ATTR_INFO_WRKSTNNAME,
"SQL_ATTR_INFO_PROGRAMNAME": ibm_db.SQL_ATTR_INFO_PROGRAMNAME,
"SQL_ATTR_USE_TRUSTED_CONTEXT": ibm_db.SQL_ATTR_USE_TRUSTED_CONTEXT,
"SQL_ATTR_TRUSTED_CONTEXT_USERID": ibm_db.SQL_ATTR_TRUSTED_CONTEXT_USERID,
"SQL_ATTR_TRUSTED_CONTEXT_PASSWORD": ibm_db.SQL_ATTR_TRUSTED_CONTEXT_PASSWORD,
}
VALUES = {
"SQL_ATTR_AUTOCOMMIT": {
"SQL_AUTOCOMMIT_ON": ibm_db.SQL_AUTOCOMMIT_ON,
"SQL_AUTOCOMMIT_OFF": ibm_db.SQL_AUTOCOMMIT_OFF,
},
"ATTR_CASE": {
"CASE_NATURAL": ibm_db.CASE_NATURAL,
"CASE_LOWER": ibm_db.CASE_LOWER,
"CASE_UPPER": ibm_db.CASE_UPPER,
},
"SQL_ATTR_CURSOR_TYPE": {
"SQL_CURSOR_FORWARD_ONLY": ibm_db.SQL_CURSOR_FORWARD_ONLY,
"SQL_CURSOR_KEYSET_DRIVEN": ibm_db.SQL_CURSOR_KEYSET_DRIVEN,
"SQL_CURSOR_DYNAMIC": ibm_db.SQL_CURSOR_DYNAMIC,
"SQL_CURSOR_STATIC": ibm_db.SQL_CURSOR_STATIC,
},
}
""" CLIENT CLASS """
class Client:
"""Client to use in the DB2 databases integration. Overrides BaseClient
makes the connection to the DB2 DB Server
"""
def __init__(
self,
host: str,
username: str,
password: str,
port: str,
database: str,
ssl_connect: bool,
connect_parameters: str = "",
use_persistent=False,
):
self.host = host
self.username = username
self.password = password
self.port = port
self.dbname = database
self.connect_parameters = self._parse_connect_parameters(connect_parameters)
self.ssl_connect = ssl_connect
self.use_persistent = use_persistent
self.connection = self._connect()
@staticmethod
def _parse_connect_parameters(connect_parameters: str) -> Dict:
"""
Parses a string of the form key1=value1&key2=value2 etc.
into a dict with matching keys and values.
Args:
connect_parameters: The string with query parameters
Returns:
A dict with the keys and values.
"""
connect_parameters_tuple_list = parse_qsl(connect_parameters, keep_blank_values=True)
connect_parameters_dict = dict()
for key, value in connect_parameters_tuple_list:
connect_parameters_dict[key] = value
return connect_parameters_dict
@property
def create_url(self) -> str:
"""
Create URL for making connection to given host
Returns:
string containing all of the required parameters
"""
conn_string = ("DRIVER={0};DATABASE={1};HOSTNAME={2};PORT={3};PROTOCOL={4};UID={5};PWD={6};").format(
DRIVER_NAME,
self.dbname,
self.host,
self.port,
PROTOCOL,
self.username,
self.password,
)
if self.ssl_connect:
conn_string += "SECURITY=SSL;"
return conn_string
def _options(self) -> Dict:
"""
Map connection options with connection parameters
`ibm_db.OPTION` will be called for every option
"""
options = dict()
for key, val in self.connect_parameters.items():
option: str = key.upper()
value: str = val.upper()
if option in OPTIONS and option in VALUES and value in VALUES[option]:
options[option] = VALUES[option][value]
elif option in OPTIONS and option not in VALUES:
options[option] = val
else:
# skip options which are not valid
pass
return options
def _connect(self) -> ibm_db.IBM_DBConnection:
"""
Connecting to the host using required parameters and returning the `Connection` object
Returns:
a connection object that will be used in order to execute queries
"""
conn: ibm_db.IBM_DBConnection = None
try:
if self.use_persistent:
demisto.info("Initializing Persistent connection")
conn = ibm_db.pconnect(self.create_url, "", "", conn_options=self._options())
else:
demisto.info("Initializing Non-Persistent connection")
conn = ibm_db.connect(self.create_url, "", "", conn_options=self._options())
except Exception:
demisto.error(f"Connection State:\n{ibm_db.conn_error}")
demisto.error(f"Connection Error:\n{ibm_db.conn_errormsg()}")
raise DemistoException(f"DB2 Connection Failed:\n{ibm_db.conn_errormsg()}")
return conn
def _prepare_statement(self, query: str, bindvars: Any) -> Any:
"""
Populate values from bindvars to query and `ibm_db.prepare` statement
Args:
query(str): db2 query string
bindvars(Any): list/dictionay with values to populate query
Returns:
ibm_db.prepare
"""
# Validate bindvars with respect to `?` or `:`
if len(bindvars):
if isinstance(bindvars, list) and query.count("?") < len(bindvars):
raise DemistoException("Insufficient bind values found")
elif isinstance(bindvars, dict) and query.count(":") < len(bindvars.keys()):
raise DemistoException("Insufficient bind names & values found")
demisto.info("Preparing Statement ...")
if isinstance(bindvars, dict):
try:
def repl(x, bindvars=bindvars):
return f"'{bindvars[x.group(0).strip(':')]}'"
query = re.sub(COLON_REGEX, repl=repl, string=query)
except KeyError as err:
demisto.error(f"{err.args[0]} key not found in bind names")
raise DemistoException(f"{err.args[0]} key not found in bind names")
stmt = ibm_db.prepare(self.connection, query)
if isinstance(bindvars, list):
for index, var in enumerate(bindvars, 1):
ibm_db.bind_param(stmt, index, var)
return stmt
def execute_query(self, query: str, bind_vars: Any) -> Tuple[List, List]:
"""
Execute query at DB2 Database via connection
Args:
query(str): db2 query string
bind_vars(Any): in case there are names and values - a bind_vars dict,
in case there are only values - list
Returns:
Tuple[results(List), headers(List)]
"""
results = list()
headers = list()
status = False
stmt = self._prepare_statement(query, bind_vars)
try:
demisto.info("Executing ...")
status = ibm_db.execute(stmt)
demisto.info("Done !!!")
except Exception:
demisto.error(clear(ibm_db.stmt_error()))
demisto.error(clear(ibm_db.stmt_errormsg()))
raise DemistoException(clear(ibm_db.stmt_errormsg()))
demisto.info("Collecting results")
if status:
row = ibm_db.fetch_assoc(stmt)
while row:
results.append(row)
row = ibm_db.fetch_assoc(stmt)
if results:
headers = [*results[0]]
return results, headers
def close(self) -> bool:
demisto.info("Closing Connection")
return ibm_db.close(self.connection)
""" HELPER FUNCTIONS """
def clear(message: str):
"""
Clean data with square brackets from message
Args:
message(str): Any message string
Returns:
string with clean message
"""
def repl(x):
return ""
return (re.sub(CLEAN, repl=repl, string=message)).strip()
def bind_variables(names: str, values: str) -> Any:
"""
Binding of column names with their values or return list of values
Args:
names(str): column name to bind with values, must be in the length of the values list
values(str): the values to bind with columns, can be in the length of the names list
Returns:
Any: a dict with column and value as a key value pair or list of values
"""
names_list = argToList(names)
values_list = argToList(values)
# assuming the order of values is correct
if values_list and not names_list:
return [val for val in values_list]
elif len(names_list) == len(values_list):
return dict(zip(names_list, values_list))
else:
raise Exception("The bind variables lists are not is the same length")
""" COMMAND FUNCTIONS """
def query_command(client: Client, args: Dict, *_) -> CommandResults:
"""
Executes the db2 query with the connection that was configured in the Client
Args:
client(Client): the client object with db connection
args(demisto.args): arguments for the query-command
"""
sql_query = str(args.get("query"))
limit = int(args.get("limit", 50))
skip = int(args.get("offset", 0))
bind_variable_name = args.get("bind_variables_name", "")
bind_variable_values = args.get("bind_variables_values", "")
try:
variables = bind_variables(bind_variable_name, bind_variable_values)
result, headers = client.execute_query(sql_query, variables)
converted_table = [dict(row) for row in result]
table = [{str(key): str(value) for key, value in dictionary.items()} for dictionary in converted_table]
table = table[skip: skip + limit]
human_readable = tableToMarkdown(name="Query result:", t=table, headers=headers, removeNull=True)
context = {"Result": table, "Query": sql_query, "DbName": f"{client.dbname}"}
client.close()
return CommandResults(
outputs_prefix="DB2",
outputs_key_field="Query",
outputs=context,
raw_response=result,
readable_output=human_readable,
)
except Exception as err:
client.close()
demisto.error(f"error:\n {err}")
if str(err).lower() == "column information cannot be retrieved: ":
human_readable = f"{sql_query} Command Executed Successfully"
return CommandResults(readable_output=human_readable)
raise DemistoException(err)
def test_module(client: Client, *_) -> str:
"""
If the connection in the client was successful the test will return OK
if it wasn't an exception will be raised
"""
return "ok"
def main(): # pragma: no cover
"""main function, parses params and runs command functions"""
params = demisto.params()
# Fetch required parameters
host = params.get("host")
uid = params.get("credentials").get("identifier")
password = params.get("credentials").get("password")
database = params.get("dbname")
port = params.get("port", 50000)
ssl_connect = params.get("ssl_connect")
connect_params = params.get("connect_parameters")
use_persistent = params.get("use_persistent")
command = demisto.command()
demisto.debug(f"command being called is {command}")
try:
client = Client(
host=host,
username=uid,
password=password,
port=port,
database=database,
ssl_connect=ssl_connect,
connect_parameters=connect_params,
use_persistent=use_persistent,
)
commands: Dict[str, Callable] = {
"test-module": test_module,
"db2-query": query_command,
}
if command in commands:
return_results(*commands[command](client, demisto.args(), command))
else:
raise NotImplementedError(f"{command} is not an existing DB2 command")
except Exception as e:
demisto.error(traceback.format_exc()) # print the traceback
return_error(f"failed to execute {command} command.\nerror:\n{str(e)}")
if __name__ in ("__main__", "__builtin__", "builtins"):
main()
| mit | e4e2f183a8ad1f73dbaa8ef4254a024e | 31.114144 | 111 | 0.599212 | 3.886486 | false | false | false | false |
demisto/content | Packs/CarbonBlackDefense/Integrations/CarbonBlackEndpointStandard/CarbonBlackEndpointStandard_test.py | 2 | 3668 | def create_client():
from CarbonBlackEndpointStandard import Client
client = Client(base_url='example.com',
verify=False,
proxies=1234,
api_secret_key="api_secret_key",
api_key="api_key",
policy_api_key="policy_api_key",
policy_api_secret_key="policy_api_secret_key",
organization_key="organization_key")
return client
def test_get_alert_details_command(mocker):
"""
Given:
Alert by id to be searched
When:
get_alert_by_id is running
Then:
Assert that the output is we are expected
"""
mocker_result = {'id': '1234', 'severity': 7, 'category': 'THREAT', 'device_username': 'demo'}
expected_result = {
'CarbonBlackDefense.Alert(val.id && val.id == obj.id)': {
'id': '1234', 'severity': 7, 'category': 'THREAT', 'device_username': 'demo'
}
}
client = create_client()
mocker.patch.object(client, 'get_alert_by_id', return_value=mocker_result)
from CarbonBlackEndpointStandard import get_alert_details_command
command_results = get_alert_details_command(client, {'alertId': 1234})
output = command_results.to_context().get('EntryContext', {})
assert output == expected_result
def test_device_search_command(mocker):
"""
Given:
Devices to be searched
When:
get_devices is running
Then:
Assert that the output is we are expected
"""
mocker_result = {
"results": [
{'id': 1234, 'name': 'carbon-black-integration-endpoint', 'os': 'MAC'}
]
}
expected_result = {
'CarbonBlackDefense.Device(val.id && val.id == obj.id)': [
{'id': 1234, 'name': 'carbon-black-integration-endpoint', 'os': 'MAC'}
]
}
client = create_client()
mocker.patch.object(client, 'get_devices', return_value=mocker_result)
from CarbonBlackEndpointStandard import device_search_command
command_results = device_search_command(client, {'device_id': '1234', 'os': 'MAC', 'status': 'sleep'})
output = command_results.to_context().get('EntryContext', {})
assert output == expected_result
def test_find_events_command(mocker):
"""
Given:
Events to be searched
When:
get_events is running
Then:
Assert that the output is we are expected
"""
mocker_result = {'job_id': '123456'}
expected_result = {
'CarbonBlackDefense.Events.Search(val.job_id && val.job_id == obj.job_id)': {'job_id': '123456'}
}
client = create_client()
mocker.patch.object(client, 'get_events', return_value=mocker_result)
from CarbonBlackEndpointStandard import find_events_command
command_results = find_events_command(client, {})
output = command_results.to_context().get('EntryContext', {})
assert output == expected_result
def test_find_processes_command(mocker):
"""
Given:
Processes to be searched
When:
get_processes is running
Then:
Assert that the output is we are expected
"""
mocker_result = {'job_id': '123456'}
expected_result = {
'CarbonBlackDefense.Process.Search(val.job_id && val.job_id == obj.job_id)': {'job_id': '123456'}
}
client = create_client()
mocker.patch.object(client, 'get_processes', return_value=mocker_result)
from CarbonBlackEndpointStandard import find_processes_command
command_results = find_processes_command(client, {})
output = command_results.to_context().get('EntryContext', {})
assert output == expected_result
| mit | c977be7699a2d719149bf83654ef3b57 | 32.045045 | 106 | 0.613413 | 3.832811 | false | false | false | false |
demisto/content | Packs/ShiftManagement/Scripts/ManageOOOusers/ManageOOOusers.py | 2 | 3253 | from CommonServerPython import * # noqa: F401
def _get_current_user():
current_username = demisto.executeCommand("getUsers", {"current": True})
if isError(current_username):
demisto.debug(f"failed to get current username - {get_error(current_username)}")
return
else:
return current_username[0]["Contents"][0]['username']
def main():
# get current time
now = datetime.now()
# args
list_name = demisto.getArg("listname")
username = demisto.getArg("username")
option = demisto.getArg("option")
days_off = now + timedelta(days=int(demisto.getArg("daysoff")))
off_until = days_off.strftime("%Y-%m-%d")
# update list name to start with 'OOO', so we can't overwrite other lists with this
if not list_name.startswith("OOO"):
list_name = f"OOO {list_name}"
current_user = _get_current_user()
if not current_user and not username:
return_error('Failed to get current user. Please set the username argument in the script.')
if not username:
# Current user was found, running script on it.
username = current_user
else:
# check if provided username is a valid xsoar user
users = demisto.executeCommand("getUsers", {})
if isError(users):
return_error(f'Failed to get users: {str(get_error(users))}')
users = users[0]['Contents']
users = [x['username'] for x in users]
if username not in users:
return_error(message=f"{username} is not a valid user")
# get the out of office list, check if the list exists, if not create it:
ooo_list = demisto.executeCommand("getList", {"listName": list_name})[0]["Contents"]
if isError(ooo_list):
return_error(f'Failed to get users out of office: {str(get_error(ooo_list))}')
if "Item not found" in ooo_list:
demisto.results(demisto.executeCommand("createList", {"listName": list_name, "listData": []}))
ooo_list = demisto.executeCommand("getList", {"listName": list_name})[0]["Contents"]
# check status of the list, and add/remove the user from it.
if not ooo_list:
list_data = []
else:
list_data = json.loads(ooo_list)
if option == "add":
# check if user is already in the list, and remove, to allow updating
list_data = [i for i in list_data if not (i['user'] == username)]
list_data.append({"user": username,
"offuntil": off_until,
"addedby": current_user if current_user else 'DBot'})
else:
# remove the user from the list.
list_data = [i for i in list_data if not (i['user'] == username)]
set_list_res = demisto.executeCommand("setList", {"listName": list_name, "listData": json.dumps(list_data)})
if isError(set_list_res):
return_error(f'Failed to update the list {list_name}: {str(get_error(set_list_res))}')
# welcome back, or see ya later!
if option == "add":
demisto.results(f"Vacation mode engaged until {off_until}, enjoy the time off {username}")
else:
demisto.results(f"Welcome back {username}, it's like you never left!")
if __name__ in ('__builtin__', 'builtins', '__main__'):
main()
| mit | 957d74874542f3beda4ccd79f4784656 | 38.192771 | 112 | 0.62158 | 3.622494 | false | false | false | false |
demisto/content | Packs/Telegram/Integrations/Telegram/Telegram.py | 2 | 3885 | from CommonServerPython import *
''' IMPORTS '''
import json
import requests
# Disable insecure warnings
requests.packages.urllib3.disable_warnings()
''' GLOBALS/PARAMS '''
TOKEN = demisto.params().get('token')
BASE_URL = 'https://api.telegram.org/bot{}/'.format(TOKEN)
''' HELPER FUNCTIONS '''
def http_request(method, url_suffix, params=None, data=None):
result = requests.request(
method,
BASE_URL + url_suffix,
verify=False,
params=params,
data=data
)
if result.status_code not in {200}:
return_error('Error in API call to Telegram Integration [%d] - %s' % (result.status_code, result.reason))
return result.json()
def get_updates():
return http_request('GET', 'getUpdates')
def get_bot():
return http_request('GET', 'getMe')
def item_to_incident(item):
incident = {
'name': 'Example Incident: ' + item.get('name'),
'occurred': item.get('createdDate'),
'rawJSON': json.dumps(item)
}
return incident
''' COMMANDS + REQUESTS FUNCTIONS '''
def test_module():
"""
Performs basic get request to get item samples
"""
contents = get_bot()
if contents['ok']:
demisto.results("ok")
else:
error_code = contents['error_code']
description = contents['description']
demisto.results(f'{error_code} {description}')
def telegram_send_message():
"""
Gets details about a items using IDs or some other filters
"""
user_id = demisto.args().get('userID')
if user_id is None:
username = demisto.args().get('username')
if username is not None:
user_id = str(get_user_id(username))
if user_id is None:
return_error(f'username {username} does not exists, please use list_user command')
message = demisto.args().get('message')
contents = http_request('GET', "sendMessage?chat_id=" + user_id + "&&text=" + message)
demisto.results({
'Type': entryTypes['note'],
'ContentsFormat': formats['json'],
'Contents': contents,
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown('Message sent', contents, 'result', removeNull=True),
'EntryContext': contents
})
def get_users():
users = {}
contents = get_updates()
for result in contents['result']:
user_data = result['message']
if 'username' in user_data['from']:
users[user_data['from']['username']] = user_data['from']['id']
# not all users have a username, so no choice but to save by their first_name (data can be overwritten)
else:
users[user_data['from']['first_name']] = user_data['from']['id']
return users
def telegram_list_users():
users = get_users()
demisto.results({
'Type': entryTypes['note'],
'ContentsFormat': formats['json'],
'Contents': users,
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown('Users', users, removeNull=True),
'EntryContext': users
})
def get_user_id(username):
users = get_users()
if username in users:
return users[username]
else:
return
''' COMMANDS MANAGER / SWITCH PANEL '''
def main():
LOG(f'command is {demisto.command()}')
try:
# Remove proxy if not set to true in params
handle_proxy()
if demisto.command() == 'test-module':
test_module()
elif demisto.command() == 'telegram-send-message' or demisto.command() == 'send-message':
telegram_send_message()
elif demisto.command() == 'telegram-list-users' or demisto.command() == 'list-users':
telegram_list_users()
except Exception as ex:
return_error(str(ex))
if __name__ == "__builtin__" or __name__ == "builtins":
main()
| mit | cf842c21f408adbc974c33ccb93814ed | 24.9 | 113 | 0.603604 | 3.77551 | false | false | false | false |
demisto/content | Packs/Kenna/Integrations/KennaV2/KennaV2.py | 2 | 24424 | from typing import Tuple, Callable
import urllib3
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
# Disable insecure warnings
urllib3.disable_warnings()
def parse_response(raw_data: List[Dict[str, Any]], wanted_keys: List[Any], actual_keys: List[Any]) -> \
List[Dict[str, Any]]:
"""Lists all raw data and return outputs in Demisto's format.
Args:
raw_data: raw response from the api.
wanted_keys: The keys as we would like them to be.
actual_keys :The keys as they are in raw response.
Returns:
Specific Keys from the raw data.
"""
context_list = []
for raw in raw_data:
context = {}
for wanted_key, actual_key in zip(wanted_keys, actual_keys):
if isinstance(wanted_key, list):
inner_raw = raw.get(actual_key[0])
if inner_raw:
lst_inner = []
for in_raw in inner_raw:
inner_dict = {}
for inner_wanted_key, inner_actual_key in zip(wanted_key[1:], actual_key[1:]):
inner_dict.update({inner_wanted_key: in_raw.get(inner_actual_key)})
lst_inner.append(inner_dict)
context.update({wanted_key[0]: lst_inner})
else:
context.update({wanted_key: raw.get(actual_key)})
context_list.append(context)
return context_list
class Client(BaseClient):
def __init__(self, base_url: str, api_key: str, verify: bool, proxy: bool):
header = {
'X-Risk-Token': api_key,
'Content-Type': 'application/json',
'Accept': 'application/json'
}
super().__init__(base_url=base_url, verify=verify, proxy=proxy, headers=header)
def http_request(self, message: str, suffix: str, params: Optional[dict] = None,
data: Optional[dict] = None):
"""Connects to api and Returns response.
Args:
message: The HTTP message, for example: GET, POST, and so on
suffix :The API endpoint.
params: URL parameters to specify the query.
data:The data to send in a specific request.
Returns:
response from the api.
"""
url = f'{self._base_url}{suffix}'
try:
response = requests.request(
message,
url,
headers=self._headers,
params=params,
json=data,
verify=self._verify,
)
except requests.exceptions.SSLError as err:
raise DemistoException(f'Connection error in the API call to Kenna.\n'
f'Check your not secure parameter.\n\n{err}')
except requests.ConnectionError as err:
raise DemistoException(f'Connection error in the API call to Kenna.\n'
f'Check your Server URL parameter.\n\n{err}')
try:
response_list = response.json() if response.text else {}
if not response.ok:
if response_list.get('error') == "unauthorized":
raise DemistoException(f'Connection error in the API call to Kenna.\n'
f'Check your Api Key parameter.\n\n{demisto.get(response_list, "error.message")}')
else:
raise DemistoException(f'API call to Kenna failed ,Error code [{response.status_code}]'
f' - {demisto.get(response_list, "error.message")}')
elif response.status_code == 204:
return {'status': 'success'}
return response_list
except TypeError:
raise Exception(f'Error in API call to Kenna, could not parse result [{response.status_code}]')
def test_module(client: Client, *_):
"""
Performs basic get request from Kenna v2
"""
res_vulnerabilities = client.http_request('GET', '/vulnerabilities')
res_assets = client.http_request('GET', '/assets')
if isinstance(res_vulnerabilities.get('vulnerabilities'), list) and isinstance(res_assets.get('assets'), list):
return 'ok', None, None
raise Exception('Error occurred while trying to query the api.')
def search_vulnerabilities(client: Client, args: dict) -> Tuple[str, Dict[str, Any], List[Dict[str, Any]]]:
"""Search vulnerability command.
Args:
client: Client which connects to api
args: arguments for the request
Returns:
Human Readable
Entry Context
Raw Data
"""
url_suffix = '/vulnerabilities/search'
limit: int = int(args.get('limit', 500))
to_context = args.get('to_context')
human_readable = []
context: Dict[str, Any] = {}
params = {
'id[]': argToList(args.get('id')),
'top_priority[]': argToList(args.get('top-priority')),
'min_risk_meter_score': args.get('min-score'),
'status[]': argToList(args.get('status')),
}
response = client.http_request(message='GET', suffix=url_suffix,
params=params).get('vulnerabilities')
if response:
vulnerability_list = response[:limit]
wanted_keys = ['AssetID', ['Connectors', 'DefinitionName', 'ID', 'Name', 'Vendor'], 'CveID', 'FixID',
'ID', 'Patch',
'Score', ['ScannerVulnerabilities', 'ExternalID', 'Open', 'Port'],
'Severity',
'Status', 'Threat', 'TopPriority',
['ServiceTicket', 'DueDate', 'ExternalIdentifier', 'Status', 'TicketType']]
actual_keys = ['asset_id', ['connectors', 'connector_definition_name', 'id', 'name', 'vendor'], 'cve_id',
'fix_id',
'id', 'patch', 'risk_meter_score',
['scanner_vulnerabilities', 'external_unique_id', 'open', 'port'],
'severity', 'status', 'threat', 'top_priority',
['service_ticket', 'due_date', 'external_identifier', 'status', 'ticket_type']]
context_list = parse_response(vulnerability_list, wanted_keys, actual_keys)
for lst in vulnerability_list:
human_readable.append({
'id': lst.get('id'),
'Name': lst.get('cve_id'),
'Score': lst.get('risk_meter_score')
})
context = {
'Kenna.Vulnerabilities(val.ID === obj.ID)': context_list
}
human_readable_markdown = tableToMarkdown('Kenna Vulnerabilities', human_readable, removeNull=True)
else:
human_readable_markdown = "no vulnerabilities found."
if to_context == "False":
return human_readable_markdown, {}, response
return human_readable_markdown, context, response
def get_connectors(client: Client, *_) -> Tuple[str, Dict[str, Any], List[Dict[str, Any]]]:
"""Get Connectors command.
Args:
client: Client which connects to api
Returns:
Human Readable
Entry Context
Raw Data
"""
url_suffix = '/connectors'
human_readable = []
context: Dict[str, Any] = {}
connectors = client.http_request(message='GET', suffix=url_suffix).get('connectors')
if connectors:
wanted_keys = ['Host', 'Name', 'Running', 'ID']
actual_keys = ['host', 'name', 'running', 'id']
context_list = parse_response(connectors, wanted_keys, actual_keys)
for connector in connectors:
curr_dict = {
'Host': connector.get('host'),
'Name': connector.get('name'),
'Running': connector.get('running'),
'ID': connector.get('id')
}
human_readable.append(curr_dict)
context = {
'Kenna.ConnectorsList(val.ID === obj.ID)': context_list
}
human_readable_markdown = tableToMarkdown('Kenna Connectors', human_readable, removeNull=True)
else:
human_readable_markdown = "no connectors in get response."
return human_readable_markdown, context, connectors
def get_connector_runs(client: Client, args: dict) -> Tuple[str, Dict[str, Any], List[Dict[str, Any]]]:
"""Get Connector Runs command.
Args:
client: Client which connects to api
Returns:
Human Readable
Entry Context
Raw Data
"""
connector_id = str(args.get("connector_id"))
url_suffix = f'/connectors/{connector_id}/connector_runs'
human_readable = []
context: Dict[str, Any] = {}
connectors: List[Dict[str, Any]] = client.http_request(message='GET', suffix=url_suffix)
if connectors:
actual_keys = [
"id", "start_time",
"end_time", "success",
"total_payload_count",
"processed_palyoad_count",
"failed_payload_count",
"processed_assets_count",
"assets_with_tags_reset_count",
"processed_scanner_vuln_count",
"updated_scanner_vuln_count",
"created_scanner_vuln_count",
"closed_scanner_vuln_count",
"autoclosed_scanner_vuln_count",
"reopened_scanner_vuln_count",
"closed_vuln_count",
"autoclosed_vuln_count",
"reopened_vuln_count"
]
wanted_keys = [
"ID", "StartTime",
"EndTime", "Success",
"TotalPayload",
"ProcessedPayload",
"FailedPayload",
"ProcessedAssets",
"AssetsWithTagsReset",
"ProcessedScannerVulnerabilities",
"UpdatedScannerVulnerabilities",
"CreatedScannerVulnerabilities",
"ClosedScannerVulnerabilities",
"AutoclosedScannerVulnerabilities",
"ReopenedScannerVulnerabilities",
"ClosedVulnerabilities",
"AutoclosedVulnerabilities",
"ReopenedVulnerabilities"
]
context_list = parse_response(connectors, wanted_keys, actual_keys)
for connector in connectors:
curr_dict = {
"ID": connector.get("id"),
"StartTime": connector.get("start_time"),
"EndTime": connector.get("end_time"),
"Success": connector.get("success"),
"TotalPayload": connector.get("total_payload_count"),
"ProcessedPayload": connector.get("total_payload_count"),
"FailedPayload": connector.get("failed_payload_count"),
"ProcessedAssets": connector.get("processed_assets_count"),
"AssetsWithTagsReset": connector.get("assets_with_tags_reset_count"),
"ProcessedScannerVulnerabilities": connector.get("processed_scanner_vuln_count"),
"UpdatedScannerVulnerabilities": connector.get("updated_scanner_vuln_count"),
"CreatedScannerVulnerabilities": connector.get("created_scanner_vuln_count"),
"ClosedScannerVulnerabilities": connector.get("closed_scanner_vuln_count"),
"AutoclosedScannerVulnerabilities": connector.get("autoclosed_scanner_vuln_count"),
"ReopenedScannerVulnerabilities": connector.get("reopened_scanner_vuln_count"),
"ClosedVulnerabilities": connector.get("closed_vuln_count"),
"AutoclosedVulnerabilities": connector.get("closed_vuln_count"),
"ReopenedVulnerabilities": connector.get("reopened_vuln_count")
}
human_readable.append(curr_dict)
context = {
'Kenna.ConnectorRunsList(val.ID === obj.ID)': context_list
}
human_readable_markdown = tableToMarkdown('Kenna Connector Runs', human_readable, removeNull=True)
else:
human_readable_markdown = "no connectors in get response."
return human_readable_markdown, context, connectors
def run_connector(client: Client, args: dict) -> Tuple[str, Dict[str, Any], List[Dict[str, Any]]]:
"""Run Connector command.
Args:
client: Client which connects to api
args: arguments for the request
Returns:
Success/ Failure , according to the response
"""
args_id: str = str(args.get('id'))
url_suffix = f'/connectors/{args_id}/run'
run_response = client.http_request(message='GET', suffix=url_suffix)
if run_response and run_response.get('success') == 'true':
return f'Connector {args_id} ran successfully.', {}, []
return f'Connector {args_id} did not ran successfully.', {}, []
def search_fixes(client: Client, args: dict) -> Tuple[str, Dict[str, Any], List[Dict[str, Any]]]:
"""Search Fixes command.
Args:
client: Client which connects to api
args: arguments for the request
Returns:
Human Readable
Entry Context
Raw Data
"""
human_readable_markdown = ''
url_suffix = '/fixes/search'
limit: int = int(args.get('limit', 500))
to_context = args.get('to_context')
context: Dict[str, Any] = {}
params = {
'id[]': argToList(args.get('id')),
'top_priority[]': argToList(args.get('top-priority')),
'min_risk_meter_score': args.get('min-score'),
'status[]': argToList(args.get('status')),
'per_page': limit
}
response = client.http_request(message='GET', suffix=url_suffix, params=params).get('fixes')
if response:
wanted_keys = ['ID', 'Title', ['Assets', 'ID', 'Locator', 'PrimaryLocator', 'DisplayLocator'],
['Vulnerabilities', 'ID', 'ServiceTicketStatus', 'ScannerIDs'], 'CveID', 'LastUpdatedAt',
'Category', 'VulnerabilityCount', 'MaxScore']
actual_keys = ['id', 'title', ['assets', 'id', 'locator', 'primary_locator', 'display_locator'],
['vulnerabilities', 'id', 'service_ticket_status', 'scanner_ids'], 'cves', 'updated_at',
'category',
'vuln_count', 'max_vuln_score']
context_list = parse_response(response, wanted_keys, actual_keys)
remove_html = re.compile(r'<[^>]+>')
for fix in response:
if fix:
human_readable_markdown += str(fix.get('title')) + '\n'
human_readable_markdown += '#### ID: ' + str(fix.get('id')) + '\n'
human_readable_markdown += str(fix.get('vuln_count')) + ' vulnerabilities affected\n'
human_readable_markdown += '#### Diagnosis:\n'
human_readable_markdown += remove_html.sub(' ', str(fix.get('diagnosis'))) + '\n' + ' ' + '\n'
context = {
'Kenna.Fixes(val.ID === obj.ID)': context_list
}
else:
human_readable_markdown = "no fixes in response."
if to_context == "False":
return human_readable_markdown, {}, response
return human_readable_markdown, context, response
def update_asset(client: Client, args: dict) -> Tuple[str, Dict[str, Any], List[Dict[str, Any]]]:
"""Update Asset command.
Args:
client: Client which connects to api
args: arguments for the request
Returns:
Success/ Failure , according to the response
"""
args_id = str(args.get('id'))
url_suffix = f'/assets/{args_id}'
asset = {
'asset': {
'notes': args.get('notes')
}
}
result = client.http_request(message='PUT', suffix=url_suffix, data=asset)
try:
if result.get('status') != "success":
return 'Could not update asset.', {}, []
return f'Asset {args_id} was updated', {}, []
except DemistoException as err:
return f'Error occurred while preforming update-asset command {err}', {}, []
def update_vulnerability(client: Client, args: dict) -> Tuple[str, Dict[str, Any], List[Dict[str, Any]]]:
"""Update Vulnerabilities command.
Args:
client: Client which connects to api
args: arguments for the request
Returns:
Success/ Failure , according to the response
"""
params_to_update: dict = {
'vulnerability': {}
}
args_id = str(args.get('id'))
status = str(args.get('status'))
notes = str(args.get('notes'))
if notes:
params_to_update['vulnerability'].update({'notes': notes})
if status:
params_to_update['vulnerability'].update({'status': status})
url_suffix = f'/vulnerabilities/{args_id}'
result = client.http_request(message='PUT', suffix=url_suffix, data=params_to_update)
try:
if result.get('status') != "success":
return 'Could not update asset.', {}, []
return f'Asset {args_id} was updated', {}, []
except DemistoException as err:
return f'Error occurred while preforming update-vulenrability command {err}', {}, []
def search_assets(client: Client, args: dict) -> Tuple[str, Dict[str, Any], List[Dict[str, Any]]]:
"""Search Asset command.
Args:
client: Client which connects to api
args: arguments for the request
Returns:
Human Readable
Entry Context
Raw Data
"""
url_suffix = '/assets/search'
human_readable = []
limit: int = int(args.get('limit', 500))
to_context = args.get('to_context')
context: Dict[str, Any] = {}
hostnames = args.get('hostname')
if args.get('tags'):
tags = argToList(args.get('tags'))
else:
tags = args.get('tags')
hostnames_query = f'hostname:({hostnames.replace(",", " ")})' if hostnames else hostnames
params = {
'id[]': argToList(args.get('id')),
'q': hostnames_query,
'min_risk_meter_score': args.get('min-score'),
'tags[]': tags
}
response = client.http_request(message='GET', suffix=url_suffix, params=params).get(
'assets')
if response:
assets_list = response[:limit]
wanted_keys = ['ID', 'Hostname', 'Score', 'IpAddress', 'VulnerabilitiesCount', 'OperatingSystem', 'Tags',
'Fqdn', 'Status', 'Owner', 'Priority', 'Notes', 'OperatingSystem']
actual_keys = ['id', 'hostname', 'risk_meter_score', 'ip_address', 'vulnerabilities_count',
'operating_system',
'tags', 'fqdn', 'status', 'owner', 'priority', 'notes', 'operating_system']
context_list: List[Dict[str, Any]] = parse_response(assets_list, wanted_keys, actual_keys)
for lst in assets_list:
human_readable.append({
'id': lst.get('id'),
'Hostname': lst.get('hostname'),
'IP-address': lst.get('ip_address'),
'Vulnerabilities Count': args.get('vulnerabilities_count'),
'Operating System': lst.get('operating_system'),
'Score': lst.get('risk_meter_score')
})
context = {
'Kenna.Assets(val.ID === obj.ID)': context_list
}
human_readable_markdown = tableToMarkdown('Kenna Assets', human_readable, removeNull=True)
else:
human_readable_markdown = "no assets in response"
if to_context == "False":
return human_readable_markdown, {}, response
return human_readable_markdown, context, response
def get_asset_vulnerabilities(client: Client, args: dict) -> Tuple[str, Dict[str, Any], List[Dict[str, Any]]]:
"""Get Asset by Vulnerability command.
Args:
client: Client which connects to api
args: arguments for the request
Returns:
Human Readable
Entry Context
Raw Data
"""
args_id = str(args.get('id'))
limit: int = int(args.get('limit', 500))
to_context = args.get('to_context')
url_suffix = f'/assets/{args_id}/vulnerabilities'
human_readable = []
context: Dict[str, Any] = {}
response = client.http_request(message='GET', suffix=url_suffix).get(
'vulnerabilities')
if response:
vulnerabilities_list = response[:limit]
wanted_keys: List[Any] = ['AssetID', 'CveID', 'ID', 'Patch', 'Status', 'TopPriority', 'Score']
actual_keys: List[Any] = ['asset_id', 'cve_id', 'id', 'patch', 'status', 'top_priority', 'risk_meter_score']
context_list: List[Dict[str, Any]] = parse_response(vulnerabilities_list, wanted_keys, actual_keys)
for lst in vulnerabilities_list:
human_readable.append({
'id': lst.get('id'),
'Name': lst.get('cve_id'),
'Score': lst.get('risk_meter_score')
})
context = {
'Kenna.VulnerabilitiesOfAsset(val.ID === obj.ID)': context_list
}
human_readable_markdown = tableToMarkdown('Kenna Vulnerabilities', human_readable, removeNull=True)
else:
human_readable_markdown = "no vulnerabilities in response"
if to_context == "False":
return human_readable_markdown, {}, response
return human_readable_markdown, context, response
def add_tags(client: Client, args: dict) -> Tuple[str, Dict[str, Any], List[Dict[str, Any]]]:
"""Add tags command.
Args:
client: Client which connects to api
args: arguments for the request
Returns:
Success/ Failure , according to the response
"""
args_id = str(args.get('id'))
tags = str(args.get('tag'))
url_suffix = f'/assets/{args_id}/tags'
asset = {
'asset': {
'tags': tags
}
}
result = client.http_request(message='PUT', suffix=url_suffix, data=asset)
try:
if result.get('status') != "success":
return f'Tag {tags} was not added to asset {args_id}', {}, []
return f'Tag {tags} was added to asset {args_id}', {}, []
except DemistoException as err:
return f'Error occurred while preforming add-tags command {err}', {}, []
def delete_tags(client: Client, args: dict) -> Tuple[str, Dict[str, Any], List[Dict[str, Any]]]:
"""Delete tags command.
Args:
client: Client which connects to api
args: arguments for the request
Returns:
Success/ Failure , according to the response
"""
args_id = str(args.get('id'))
tags = str(args.get('tag'))
url_suffix = f'/assets/{args_id}/tags'
asset = {
'asset': {
'tags': tags
}
}
result = client.http_request(message='DELETE', suffix=url_suffix, data=asset)
try:
if result.get('status') != "success":
return f'Tag {tags} was not deleted to asset {args_id}', {}, []
return f'Tag {tags} was deleted to asset {args_id}', {}, []
except DemistoException as err:
return f'Error occurred while preforming delete-tags command {err}', {}, []
def main():
params = demisto.params()
api = params.get('key')
# Service base URL
base_url = params.get('url')
# Should we use SSL
use_ssl = not params.get('insecure', False)
# Should we use system proxy settings
use_proxy = params.get('proxy')
# Initialize Client object
client = Client(base_url=base_url, api_key=api, verify=use_ssl, proxy=use_proxy)
command = demisto.command()
LOG(f'Command being called is {command}')
# Commands dict
commands: Dict[str, Callable[[Client, Dict[str, str]], Tuple[str, Dict[Any, Any], List[Any]]]] = {
'test-module': test_module,
'kenna-search-vulnerabilities': search_vulnerabilities,
'kenna-get-connectors': get_connectors,
'kenna-run-connector': run_connector,
'kenna-search-fixes': search_fixes,
'kenna-update-asset': update_asset,
'kenna-update-vulnerability': update_vulnerability,
'kenna-search-assets': search_assets,
'kenna-get-asset-vulnerabilities': get_asset_vulnerabilities,
'kenna-add-tag': add_tags,
'kenna-delete-tag': delete_tags,
'kenna-get-connector-runs': get_connector_runs
}
try:
if command in commands:
return_outputs(*commands[command](client, demisto.args()))
else:
raise NotImplementedError(f'{command} is not an existing Kenna v2 command')
except Exception as err:
return_error(f'Error from Kenna v2 Integration \n\n {err} \n', err)
if __name__ in ['__main__', 'builtin', 'builtins']:
main()
| mit | 2126037d89295a7e73103a2c2a37dc6d | 39.706667 | 125 | 0.578079 | 3.884842 | false | false | false | false |
demisto/content | Packs/SOCRadar/Integrations/SOCRadarThreatFusion/SOCRadarThreatFusion.py | 2 | 26163 | from json.decoder import JSONDecodeError
import demistomock as demisto
from CommonServerPython import * # noqa # pylint: disable=unused-wildcard-import
from CommonServerUserPython import * # noqa
import requests
import traceback
from typing import Dict, Any
# Disable insecure warnings
requests.packages.urllib3.disable_warnings() # pylint: disable=no-member
''' CONSTANTS '''
SOCRADAR_API_ENDPOINT = 'https://platform.socradar.com/api'
MESSAGES: Dict[str, str] = {
'BAD_REQUEST_ERROR': 'An error occurred while fetching the data.',
'AUTHORIZATION_ERROR': 'Authorization Error: make sure API Key is correctly set.',
'RATE_LIMIT_EXCEED_ERROR': 'Rate limit has been exceeded. Please make sure your your API key\'s rate limit is adequate.',
}
INTEGRATION_NAME = 'SOCRadar ThreatFusion'
''' CLIENT CLASS '''
class Client(BaseClient):
"""
Client class to interact with the SOCRadar API
"""
def __init__(self, base_url, api_key, verify, proxy):
super().__init__(base_url, verify=verify, proxy=proxy)
self.api_key = api_key
def get_entity_score(self, entity):
suffix = '/threat/analysis'
api_params = {'key': self.api_key, 'entity': entity}
response = self._http_request(method='GET', url_suffix=suffix, params=api_params, timeout=60,
error_handler=self.handle_error_response)
return response
def check_auth(self):
suffix = '/threat/analysis/check/auth'
api_params = {'key': self.api_key}
response = self._http_request(method='GET', url_suffix=suffix, params=api_params,
error_handler=self.handle_error_response)
return response
@staticmethod
def handle_error_response(response) -> None:
"""Handles API response to display descriptive error messages based on status code
:param response: SOCRadar API response.
:return: DemistoException for particular error code.
"""
error_reason = ''
try:
json_resp = response.json()
error_reason = json_resp.get('error') or json_resp.get('message')
except JSONDecodeError:
pass
status_code_messages = {
400: f"{MESSAGES['BAD_REQUEST_ERROR']} Reason: {error_reason}",
401: MESSAGES['AUTHORIZATION_ERROR'],
404: f"{MESSAGES['BAD_REQUEST_ERROR']} Reason: {error_reason}",
429: MESSAGES['RATE_LIMIT_EXCEED_ERROR']
}
if response.status_code in status_code_messages.keys():
demisto.debug(f'Response Code: {response.status_code}, Reason: {status_code_messages[response.status_code]}')
raise DemistoException(status_code_messages[response.status_code])
else:
raise DemistoException(response.raise_for_status())
''' HELPER FUNCTIONS '''
def calculate_dbot_score(score: int) -> int:
"""Transforms cyber risk score (reputation) from SOCRadar API to DBot Score and using threshold.
Args:
score: Cyber risk score (reputation) from SOCRadar API
Returns:
Score representation in DBot
"""
return_score = 0
# Malicious
if score > 800:
return_score = 3
# Suspicious
elif score > 400:
return_score = 2
# Good
elif score > 0:
return_score = 1
# Unknown
return return_score
class Validator:
@staticmethod
def validate_domain(domain_to_validate):
if not isinstance(domain_to_validate, str) or len(domain_to_validate) > 255:
return False
if domain_to_validate.endswith("."):
domain_to_validate = domain_to_validate[:-1]
domain_regex = re.compile(r"(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
return all(domain_regex.match(x) for x in domain_to_validate.split("."))
@staticmethod
def validate_ipv4(ip_to_validate):
return is_ip_valid(ip_to_validate)
@staticmethod
def validate_ipv6(ip_to_validate):
return is_ipv6_valid(ip_to_validate)
@staticmethod
def validate_hash(hash_to_validate):
return get_hash_type(hash_to_validate) != 'Unknown'
@staticmethod
def raise_if_ip_not_valid(ip: str):
"""Raises an error if ip is not valid
Args:
ip: ip
Raises:
ValueError: if ip is not type of ipv4 or ipv6
Examples:
>>> Validator.raise_if_ip_not_valid('not an ip')
Traceback (most recent call last):
...
ValueError: IP "not an ip" is not a type of IPv4 or IPv6
>>> Validator.raise_if_ip_not_valid('1.1.1.1')
"""
if not Validator.validate_ipv4(ip) and not Validator.validate_ipv6(ip):
raise ValueError(f'IP "{ip}" is not a type of IPv4 or IPv6')
@staticmethod
def raise_if_domain_not_valid(domain: str):
"""Raises an error if domain is not valid
Args:
domain: domain
Raises:
ValueError: if domain is not a valid domain address
Examples:
>>> Validator.raise_if_domain_not_valid('not a domain')
Traceback (most recent call last):
...
ValueError: Domain "not a domain" is not a valid domain address
>>> Validator.raise_if_hash_not_valid('not a domain')
"""
if not Validator.validate_domain(domain):
raise ValueError(f'Domain "{domain}" is not a valid domain address')
@staticmethod
def raise_if_hash_not_valid(file_hash: str):
"""Raises an error if file_hash is not valid
Args:
file_hash: file hash
Raises:
ValueError: if hash is not of type SHA-1 or MD5
Examples:
>>> Validator.raise_if_hash_not_valid('not a hash')
Traceback (most recent call last):
...
ValueError: Hash "not a hash" is not of type SHA-1 or MD5
>>> Validator.raise_if_hash_not_valid('7e641f6b9706d860baf09fe418b6cc87')
"""
if not Validator.validate_hash(file_hash):
raise ValueError(f'Hash "{file_hash}" is not of type SHA-1 or MD5')
def verify_entity_type(entity_to_control_list: list, entity_type: str):
"""Verify intended entity type. Raise exception if the provided entity type is not expected.
:type entity_to_control_list: ``list``
:param entity_to_control_list: Intended entity list to be verified.
:type entity_type: ``str``
:param entity_type: Intended entity type to be verified.
"""
control_dict = {
'ip': Validator.raise_if_ip_not_valid,
'domain': Validator.raise_if_domain_not_valid,
'hash': Validator.raise_if_hash_not_valid,
}
for entity_to_control in entity_to_control_list:
control_dict[entity_type](entity_to_control)
def map_indicator_type(socradar_indicator_type: str) -> Optional[str]:
"""Map SOCRadar indicator type to XSOAR indicator type
:type socradar_indicator_type: ``str``
:param socradar_indicator_type: The SOCRadar indicator type
:return: XSOAR indicator type
:rtype: ``Optional[str]``
"""
indicator_map = {
'ipv4': FeedIndicatorType.IP,
'ipv6': FeedIndicatorType.IPv6,
'hash': FeedIndicatorType.File,
'hostname': FeedIndicatorType.Domain,
}
return indicator_map.get(socradar_indicator_type)
def build_entry_context(results: Union[Dict, List], indicator_type: str):
"""Formatting results from SOCRadar API to Demisto Context
:type results: ``Union[Dict, List]``
:param results: Raw results obtained from SOCRadar API.
:type indicator_type: ``str``
:param results: Type of indicator to be used in context creation.
"""
if isinstance(results, list):
return [build_entry_context(entry, indicator_type) for entry in results] # pragma: no cover
result_data = results.get('data', {})
return_context = {
'Risk Score (Out of 1000)': result_data.get('score'),
'Score Details': result_data.get('score_details'),
'Total Encounters': len(result_data.get('findings', [])),
map_indicator_type(result_data.get('classification')): result_data.get('value'),
}
if indicator_type == 'domain':
return_context['Subdomains'] = result_data.get('subdomains', [])
if indicator_type != 'hash':
return_context['Whois Details'] = {}
for key, value in result_data.get('whois', {}).items():
# Exclude raw whois
if key == 'raw':
continue
if value and type(value) == list and key in ('creation_date', 'expiration_date', 'updated_date', 'registrar'):
value = value[0]
return_context['Whois Details'][key] = value
return_context['DNS Details'] = result_data.get('dns_info', {})
if indicator_type == 'ip':
geo_location_dict = {}
if result_data.get('geo_location', []):
geo_location = result_data['geo_location'][0]
geo_location_dict = {key: value for key, value in geo_location.items() if key.lower() != 'ip'}
asn_code = result_data.get('whois', {}).get('asn', '')
if not asn_code:
asn_code = geo_location_dict.get('AsnCode', '')
asn_description = result_data.get('whois', {}).get('asn_description', '')
if not asn_description:
asn_description = geo_location_dict.get('AsnName', '')
asn = f"[{asn_code}] {asn_description}"
geo_location_dict['ASN'] = asn
return_context['Geo Location'] = geo_location_dict
return return_context
''' COMMAND FUNCTIONS '''
def test_module(client: Client) -> str:
"""Tests API connectivity and authentication'
Returning 'ok' indicates that the integration works like it is supposed to.
Connection to the service is successful.
Raises exceptions if something goes wrong.
:type client: ``Client``
:param client: client to use
:return: 'ok' if test passed, anything else will fail the test.
:rtype: ``str``
"""
client.check_auth()
return "ok"
def ip_command(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
"""Returns SOCRadar reputation details for the given IP entities.
:type client: ``Client``
:param client: client to use
:type args: Dict[str, Any]
:param args: contains all arguments for ip_command
:return:
List of ``CommandResults`` objects that is then passed to ``return_results``
:rtype: ``List[CommandResults]``
"""
ip_addresses = args.get('ip', '')
ip_list: list = argToList(ip_addresses)
verify_entity_type(ip_list, 'ip')
command_results_list: List[CommandResults] = []
for ip_to_score in ip_list:
raw_response = client.get_entity_score(ip_to_score)
if raw_response.get('is_success'):
if raw_response.get('data', {}).get('is_whitelisted'):
score = 1
elif (score := raw_response.get('data', {}).get('score', 0)) is not None:
score = calculate_dbot_score(score)
title = f'SOCRadar - Analysis results for IP: {ip_to_score}'
context_entry = build_entry_context(raw_response, 'ip')
human_readable = tableToMarkdown(title, context_entry)
dbot_score = Common.DBotScore(indicator=ip_to_score,
indicator_type=DBotScoreType.IP,
integration_name=INTEGRATION_NAME,
score=score,
reliability=demisto.params().get('integrationReliability'))
ip_object = Common.IP(ip=ip_to_score,
dbot_score=dbot_score,
asn=context_entry['Geo Location'].get('ASN'),
geo_country=context_entry['Geo Location'].get('CountryCode'),
geo_latitude=context_entry['Geo Location'].get('Latitude'),
geo_longitude=context_entry['Geo Location'].get('Longitude'),
region=context_entry['Geo Location'].get('RegionName'))
command_results_list.append(CommandResults(
outputs_prefix="SOCRadarThreatFusion.Reputation.IP",
outputs_key_field="IP",
readable_output=human_readable,
raw_response=raw_response,
outputs=context_entry,
indicator=ip_object
))
else:
message = f"Error at scoring IP {ip_to_score} while getting API response. " \
f"SOCRadar ThreatFusion API Response: {raw_response.get('message', '')}"
command_results_list.append(CommandResults(readable_output=message))
if not command_results_list:
command_results_list = [
CommandResults('SOCRadar ThreatFusion could not find any results for the given IP address(es).')
]
return command_results_list
def domain_command(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
"""Returns SOCRadar reputation details for the given domain entities.
:type client: ``Client``
:param client: client to use
:type args: Dict[str, Any]
:param args: contains all arguments for domain_command
:return:
List of ``CommandResults`` objects that is then passed to ``return_results``
:rtype: ``List[CommandResults]``
"""
domains = args.get('domain', '')
domains_list: list = argToList(domains)
verify_entity_type(domains_list, 'domain')
command_results_list: List[CommandResults] = []
for domain_to_score in domains_list:
raw_response = client.get_entity_score(domain_to_score)
if raw_response.get('is_success'):
if raw_response.get('data', {}).get('is_whitelisted'):
score = 1
elif (score := raw_response.get('data', {}).get('score', 0)) is not None:
score = calculate_dbot_score(score)
title = f'SOCRadar - Analysis results for domain: {domain_to_score}'
context_entry = build_entry_context(raw_response, 'domain')
human_readable = tableToMarkdown(title, context_entry)
dbot_score = Common.DBotScore(indicator=domain_to_score,
indicator_type=DBotScoreType.DOMAIN,
integration_name=INTEGRATION_NAME,
score=score,
reliability=demisto.params().get('integrationReliability'))
domain_object = Common.Domain(domain=domain_to_score,
dbot_score=dbot_score,
dns=', '.join(context_entry['DNS Details'].get('A', [])),
creation_date=context_entry['Whois Details'].get('creation_date'),
expiration_date=context_entry['Whois Details'].get('expiration_date'),
updated_date=context_entry['Whois Details'].get('updated_date'),
registrant_country=context_entry['Whois Details'].get('registrant_country'),
registrant_name=context_entry['Whois Details'].get('registrant_name')
or context_entry['Whois Details'].get('name'),
registrar_name=context_entry['Whois Details'].get('registrar'),
organization=context_entry['Whois Details'].get('org'),
geo_country=context_entry['Whois Details'].get('country'),
sub_domains=context_entry['Subdomains'],
name_servers=context_entry['DNS Details'].get('NS')
or context_entry['Whois Details'].get('name_servers'))
command_results_list.append(CommandResults(
outputs_prefix="SOCRadarThreatFusion.Reputation.Domain",
outputs_key_field="Domain",
readable_output=human_readable,
raw_response=raw_response,
outputs=context_entry,
indicator=domain_object
))
else:
message = f"Error at scoring domain {domain_to_score} while getting API response. " \
f"SOCRadar ThreatFusion API Response: {raw_response.get('message', '')}"
command_results_list.append(CommandResults(readable_output=message))
if not command_results_list:
command_results_list = [
CommandResults('SOCRadar ThreatFusion could not find any results for the given domain(s).')
]
return command_results_list
def file_command(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
"""Returns SOCRadar reputation details for the given hash entities.
:type client: ``Client``
:param client: client to use
:type args: Dict[str, Any]
:param args: contains all arguments for hash_command
:return:
List of ``CommandResults`` objects that is then passed to ``return_results``
:rtype: ``List[CommandResults]``
"""
file_hashes = args.get('file', '')
file_hash_list: list = argToList(file_hashes)
verify_entity_type(file_hash_list, 'hash')
command_results_list: List[CommandResults] = []
for hash_to_score in file_hash_list:
hash_type = get_hash_type(hash_to_score)
raw_response = client.get_entity_score(hash_to_score)
if raw_response.get('is_success'):
if raw_response.get('data', {}).get('is_whitelisted'):
score = 1
elif (score := raw_response.get('data', {}).get('score', 0)) is not None:
score = calculate_dbot_score(score)
title = f'SOCRadar - Analysis results for hash: {hash_to_score}'
context_entry = build_entry_context(raw_response, 'hash')
human_readable = tableToMarkdown(title, context_entry)
dbot_score = Common.DBotScore(indicator=hash_to_score,
indicator_type=DBotScoreType.FILE,
integration_name=INTEGRATION_NAME,
score=score,
reliability=demisto.params().get('integrationReliability'))
file_object = Common.File(dbot_score=dbot_score)
# hash_type can either be 'sha-1' or 'md5' at this point.
if hash_type == 'sha-1':
file_object.sha1 = hash_to_score
else:
file_object.md5 = hash_to_score
command_results_list.append(CommandResults(
outputs_prefix="SOCRadarThreatFusion.Reputation.Hash",
outputs_key_field="File",
readable_output=human_readable,
raw_response=raw_response,
outputs=context_entry,
indicator=file_object
))
else:
message = f"Error at scoring file hash {hash_to_score} while getting API response. " \
f"SOCRadar ThreatFusion API Response: {raw_response.get('message', '')}"
command_results_list.append(CommandResults(readable_output=message))
if not command_results_list:
command_results_list = [
CommandResults('SOCRadar ThreatFusion could not find any results for the given file hash(es).')
]
return command_results_list
def score_ip_command(client: Client, args: Dict[str, Any]) -> CommandResults:
"""Returns SOCRadar reputation details for the given IP entity.
:type client: ``Client``
:param client: client to use
:type args: Dict[str, Any]
:param args: contains all arguments for list-detections command
:return:
A ``CommandResults`` object that is then passed to ``return_results``
:rtype: ``CommandResults``
"""
ip_to_score = args.get('ip', '')
verify_entity_type([ip_to_score], 'ip')
raw_response = client.get_entity_score(ip_to_score)
if raw_response.get('is_success'):
if raw_response.get('data', {}).get('is_whitelisted'):
score = 1
elif (score := raw_response.get('data', {}).get('score', 0)) is not None:
score = calculate_dbot_score(score)
title = f'SOCRadar - Analysis results for IP: {ip_to_score}'
context_entry = build_entry_context(raw_response, 'ip')
dbot_entry = build_dbot_entry(ip_to_score, DBotScoreType.IP, 'SOCRadar ThreatFusion', score)
human_readable = tableToMarkdown(title, context_entry)
context_entry.update(dbot_entry)
else:
message = f"Error while getting API response. SOCRadar API Response: {raw_response.get('message', '')}"
raise DemistoException(message=message)
return CommandResults(
outputs_prefix="SOCRadarThreatFusion.Reputation.IP",
outputs_key_field="IP",
readable_output=human_readable,
raw_response=raw_response,
outputs=context_entry
)
def score_domain_command(client: Client, args: Dict[str, Any]) -> CommandResults:
"""Returns SOCRadar reputation details for the given domain entity.
:type client: ``Client``
:param client: client to use
:type args: Dict[str, Any]
:param args: contains all arguments for list-detections command
:return:
A ``CommandResults`` object that is then passed to ``return_results``
:rtype: ``CommandResults``
"""
domain_to_score = args.get('domain', '')
verify_entity_type([domain_to_score], 'domain')
raw_response = client.get_entity_score(domain_to_score)
if raw_response.get('is_success'):
if raw_response.get('data', {}).get('is_whitelisted'):
score = 1
elif (score := raw_response.get('data', {}).get('score', 0)) is not None:
score = calculate_dbot_score(score)
title = f'SOCRadar - Analysis results for domain: {domain_to_score}'
context_entry = build_entry_context(raw_response, 'domain')
dbot_entry = build_dbot_entry(domain_to_score, DBotScoreType.DOMAIN, 'SOCRadar ThreatFusion', score)
human_readable = tableToMarkdown(title, context_entry)
context_entry.update(dbot_entry)
else:
message = f"Error while getting API response. SOCRadar API Response: {raw_response.get('message', '')}"
raise DemistoException(message=message)
return CommandResults(
outputs_prefix="SOCRadarThreatFusion.Reputation.Domain",
outputs_key_field="Domain",
readable_output=human_readable,
raw_response=raw_response,
outputs=context_entry
)
def score_hash_command(client: Client, args: Dict[str, Any]) -> CommandResults:
"""Returns SOCRadar reputation details for the given hash entity.
:type client: ``Client``
:param client: client to use
:type args: Dict[str, Any]
:param args: contains all arguments for list-detections command
:return:
A ``CommandResults`` object that is then passed to ``return_results``
:rtype: ``CommandResults``
"""
hash_to_score = args.get('hash', '')
verify_entity_type([hash_to_score], 'hash')
hash_type = get_hash_type(hash_to_score)
raw_response = client.get_entity_score(hash_to_score)
if raw_response.get('is_success'):
if raw_response.get('data', {}).get('is_whitelisted'):
score = 1
elif (score := raw_response.get('data', {}).get('score', 0)) is not None:
score = calculate_dbot_score(score)
title = f'SOCRadar - Analysis results for hash: {hash_to_score}'
context_entry = build_entry_context(raw_response, 'hash')
dbot_entry = build_dbot_entry(hash_to_score, hash_type, 'SOCRadar ThreatFusion', score)
human_readable = tableToMarkdown(title, context_entry)
context_entry.update(dbot_entry)
else:
message = f"Error while getting API response. SOCRadar API Response: {raw_response.get('message', '')}"
raise DemistoException(message=message)
return CommandResults(
outputs_prefix="SOCRadarThreatFusion.Reputation.Hash",
outputs_key_field="File",
readable_output=human_readable,
raw_response=raw_response,
outputs=context_entry
)
''' MAIN FUNCTION '''
def main() -> None:
"""main function, parses params and runs command functions
:return:
:rtype:
"""
api_key = demisto.params().get('apikey')
base_url = SOCRADAR_API_ENDPOINT
verify_certificate = not demisto.params().get('insecure', False)
proxy = demisto.params().get('proxy', False)
demisto.debug(f'Command being called is {demisto.command()}')
try:
client = Client(
base_url=base_url,
api_key=api_key,
verify=verify_certificate,
proxy=proxy)
command = demisto.command()
commands = {
'ip': ip_command,
'domain': domain_command,
'file': file_command,
'socradar-score-ip': score_ip_command,
'socradar-score-domain': score_domain_command,
'socradar-score-hash': score_hash_command,
}
if command == 'test-module':
return_results(test_module(client))
else:
command_function = commands.get(command)
if command_function:
return_results(command_function(client, demisto.args()))
except Exception as e:
demisto.error(traceback.format_exc())
return_error(f'Failed to execute {demisto.command()} command.\nError:\n{str(e)}')
''' ENTRY POINT '''
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | c3362bc90a19bc25ff1a5e8e40823d3b | 37.588496 | 125 | 0.596797 | 3.953309 | false | false | false | false |
demisto/content | Packs/GoogleSafeBrowsing/Integrations/GoogleSafeBrowsingV2/GoogleSafeBrowsingV2.py | 2 | 8902 | import demistomock as demisto
from CommonServerPython import * # noqa # pylint: disable=unused-wildcard-import
from CommonServerUserPython import * # noqa
import urllib3
from typing import Dict, Any
# Disable insecure warnings
urllib3.disable_warnings()
TYPES = {
'threatTypes': ["MALWARE", "SOCIAL_ENGINEERING", "POTENTIALLY_HARMFUL_APPLICATION", "UNWANTED_SOFTWARE"],
'platformTypes': ["ANY_PLATFORM", "WINDOWS", "LINUX", "ALL_PLATFORMS", "OSX", "CHROME", "IOS", "ANDROID"]
}
INTEGRATION_NAME = 'GoogleSafeBrowsing'
URL_OUTPUT_PREFIX = 'GoogleSafeBrowsing.URL'
class Client(BaseClient):
def __init__(self, proxy: bool, verify: bool, reliability: str, base_url: str, params: dict):
headers = {
'Content-Type': 'application/json',
'Accept': 'application/json'
}
super().__init__(proxy=proxy, verify=verify, base_url=base_url, headers=headers)
self.base_url = base_url
self.client_body = {
'clientId': params.get('client_id'),
'clientVersion': params.get('client_version'),
}
if DBotScoreReliability.is_valid_type(reliability):
self.reliability = DBotScoreReliability.get_dbot_score_reliability_from_str(reliability)
else:
raise Exception("Google Safe Browsing v2 error: "
"Please provide a valid value for the Source Reliability parameter.")
def build_request_body(self, client_body: Dict, list_url: List) -> Dict:
""" build the request body according to the client body and the urls.
Args:
client_body: client body to add it in the request body
list_url: The urls list
Returns:
(dict) The request body, in the right format.
"""
list_urls = []
for url in list_url:
list_urls.append({"url": url})
body: Dict = {
"client": client_body,
"threatInfo": {
"threatTypes": TYPES.get('threatTypes'),
"platformTypes": TYPES.get('platformTypes'),
"threatEntryTypes": ["URL"],
"threatEntries": list_urls
}
}
return body
def url_request(self, client_body, list_url) -> Dict:
""" send the url request.
Args:
client_body: client body to add it in the request body
list_url: The urls list
Returns:
(dict) The response from the request.
"""
body = self.build_request_body(client_body, list_url)
result = self._http_request(
method='POST',
json_data=body,
full_url=self.base_url)
return result
def test_module(client: Client) -> str:
"""
Performs basic get request to get sample URL details.
"""
try:
# testing a known malicious URL to check if we get matches
test_url = "http://testsafebrowsing.appspot.com/apiv4/ANY_PLATFORM/MALWARE/URL/"
res = client.url_request(client.client_body, [test_url])
if res.get('matches'): # matches - There is a match for the URL we were looking for
message = 'ok'
else:
message = 'Error querying Google Safe Browsing. Expected matching respons, but received none'
except DemistoException as e:
if 'Forbidden' in str(e) or 'Authorization' in str(e):
message = 'Authorization Error: please make sure the API Key is set correctly.'
else:
raise e
return message
def handle_errors(result: Dict) -> None:
"""
Handle errors, raise Exception when there is errors in the response.
"""
status_code = result.get('StatusCode', 0)
result_body = result.get('Body')
if result_body == '' and status_code == 204:
raise Exception('No content received. Possible API rate limit reached.')
if 200 < status_code < 299:
raise Exception(f'Failed to perform request, request status code: {status_code}.')
if result_body == '':
raise Exception('No content received. Maybe you tried a private API?.')
if result.get('error'):
error_massage = result.get('error', {}).get('message')
error_code = result.get('error', {}).get('code')
raise Exception(f'Failed accessing Google Safe Browsing APIs. Error: {error_massage}. Error code: {error_code}')
def arrange_results_to_urls(results: List, url_list: List) -> Dict:
""" Arrange and filter the URLs results according to the URLs list that we asked information on.
Args:
results: the API response.
url_list: The URLs list that we asked information on.
Returns:
(dict) The results according the urls.
"""
urls_results: Dict[str, list] = {}
for url in url_list:
urls_results[url] = []
for result in results:
url = result.get('threat', {}).get('url')
urls_results[url].append(result)
return urls_results
def url_command(client: Client, args: Dict[str, Any]) -> Union[List[CommandResults], CommandResults]:
"""
url command: Returns URL details for a list of URL
"""
url = argToList(args.get('url'))
result = client.url_request(client.client_body, url)
if not result:
dbot_score = Common.DBotScore(
indicator=url,
indicator_type=DBotScoreType.URL,
integration_name=INTEGRATION_NAME,
score=0,
reliability=client.reliability
)
url_standard_context = Common.URL(
url=url,
dbot_score=dbot_score
)
return CommandResults(
readable_output=f'No information was found for url {url}',
outputs_prefix=URL_OUTPUT_PREFIX,
outputs_key_field='IndicatorValue',
outputs=result,
indicator=url_standard_context
)
if result.get('StatusCode'):
handle_errors(result)
urls_data = arrange_results_to_urls(result.get('matches'), url) # type: ignore
url_data_list = []
for url_key, url_data in urls_data.items():
if url_data:
dbot_score = Common.DBotScore(
indicator=url_key,
indicator_type=DBotScoreType.URL,
integration_name=INTEGRATION_NAME,
score=3,
reliability=client.reliability
)
url_standard_context = Common.URL(
url=url_key,
dbot_score=dbot_score
)
url_data_list.append(CommandResults(
readable_output=tableToMarkdown(f'Google Safe Browsing APIs - URL Query: {url_key}', url_data),
outputs_prefix=URL_OUTPUT_PREFIX,
outputs_key_field='IndicatorValue',
outputs=url_data,
indicator=url_standard_context
))
else:
dbot_score = Common.DBotScore(
indicator=url_key,
indicator_type=DBotScoreType.URL,
integration_name=INTEGRATION_NAME,
score=0,
reliability=client.reliability
)
url_standard_context = Common.URL(
url=url_key,
dbot_score=dbot_score
)
url_data_list.append(CommandResults(
readable_output=f'No matches for URL {url_key}',
outputs_prefix=URL_OUTPUT_PREFIX,
outputs_key_field='IndicatorValue',
outputs=result,
indicator=url_standard_context
))
return url_data_list
def build_base_url(params: Dict) -> str:
api_key = params.get('api_key')
base_url = params.get('url', '')
if not base_url.endswith('/'):
base_url += '/'
return f"{base_url}?key={api_key}"
def main() -> None:
params = demisto.params()
verify_certificate = not params.get('insecure', False)
proxy = params.get('proxy', False)
base_url = build_base_url(params)
reliability = params.get('integrationReliability')
reliability = reliability if reliability else DBotScoreReliability.B
demisto.debug(f'Command being called is {demisto.command()}')
try:
client = Client(
params=params,
base_url=base_url,
verify=verify_certificate,
proxy=proxy,
reliability=reliability)
if demisto.command() == 'test-module':
result = test_module(client)
return_results(result)
elif demisto.command() == 'url':
return_results(url_command(client, demisto.args()))
# Log exceptions and return errors
except Exception as e:
return_error(f'Failed to execute {demisto.command()} command.\nError:\n{str(e)}')
''' ENTRY POINT '''
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | 8fcfea9b53047a26087ed2766c122527 | 32.466165 | 120 | 0.588182 | 3.995512 | false | false | false | false |
demisto/content | Packs/EWS/Scripts/GetEWSFolder/GetWESFolder_test.py | 2 | 1729 | import json
from GetEWSFolder import main, convert_mail_to_json
import demistomock as demisto
def create_mail(subject, body):
return {'subject': subject, 'textBody': body, 'body': '<body>{}<\\body>'.format(body)}
mails_folder_1 = [create_mail(subject='subject 1', body='body 1'), create_mail(subject='subject 2', body='body 2')]
mails_folder_2 = [create_mail(subject='subject 3', body='body 3'), create_mail(subject='subject 4', body='body 4')]
def identical_mail(mail1, mail2):
if len(mail1) != len(mail2):
return False
return all(mail1[field] == mail2[field] for field in ['subject', 'body', 'textBody'])
def test_main(mocker):
def executeCommand(name, args):
if args['folder-path'] == 'folder1':
return [{'Contents': mails_folder_1, 'Type': 'Content'}]
if args['folder-path'] == 'folder2':
return [{'Contents': mails_folder_2, 'Type': 'Content'}]
else:
raise ValueError('Unexist directory')
mocker.patch.object(demisto, 'args', return_value={
"foldersPaths": 'folder1,folder2'
})
mocker.patch.object(demisto, 'executeCommand', side_effect=executeCommand)
mocker.patch.object(demisto, 'results')
entry = main()
file_name = '1_{}'.format(entry['FileID'])
with open(file_name) as json_file:
mails_from_file = json.load(json_file)
assert len(mails_from_file) == len(mails_folder_1) + len(mails_folder_2)
for mails_folder, folder in zip([mails_folder_1, mails_folder_2], ['folder1', 'folder2']):
for mail in mails_folder:
formatted_mail = convert_mail_to_json(mail, folder)
assert sum(identical_mail(m, formatted_mail) for m in mails_from_file) == 1
| mit | c8b121ba1096cdd317fd6f8964fcbc5b | 36.586957 | 115 | 0.639676 | 3.312261 | false | false | false | false |
demisto/content | Packs/PaloAltoNetworks_Threat_Vault/Integrations/Threat_Vault/Threat_Vault.py | 2 | 19996 | from CommonServerPython import *
# Disable insecure warnings
requests.packages.urllib3.disable_warnings()
class Client(BaseClient):
"""
Client to use in the Threat Vault integration. Overrides BaseClient.
"""
def __init__(self, api_key: str, verify: bool, proxy: bool, reliability: str):
super().__init__(base_url='https://autofocus.paloaltonetworks.com/api/intel/v1', verify=verify, proxy=proxy,
headers={'Content-Type': 'application/json'})
self._params = {'api_key': api_key}
self.name = 'ThreatVault'
self.reliability = reliability
@logger
def antivirus_signature_get_request(self, sha256: str = '', signature_id: str = '') -> dict:
"""Get antivirus signature by sending a GET request.
Args:
sha256: antivirus sha256.
signature_id: signature ID.
Returns:
Response from API.
"""
if (sha256 and signature_id) or (not sha256 and not signature_id):
raise Exception('Please submit a sha256 or a signature_id.')
if signature_id:
suffix = f'/threatvault/panav/signature/{signature_id}'
else:
suffix = f'/file/{sha256}/signature'
return self._http_request(method='GET', url_suffix=suffix, params=self._params)
@logger
def dns_signature_get_request(self, dns_signature_id: str) -> dict:
"""Get DNS signature by sending a GET request.
Args:
dns_signature_id: DNS signature ID.
Returns:
Response from API.
"""
return self._http_request(method='GET', url_suffix=f'/threatvault/dns/signature/{dns_signature_id}',
params=self._params)
@logger
def antispyware_get_by_id_request(self, signature_id: str) -> dict:
"""Get DNS signature by sending a GET request.
Args:
signature_id: signature ID.
Returns:
Response from API.
"""
return self._http_request(method='GET', url_suffix=f'/threatvault/ips/signature/{signature_id}',
params=self._params)
@logger
def ip_geo_get_request(self, ip_: str) -> dict:
"""Get IP geolocation by sending a GET request.
Args:
ip_: ip address.
Returns:
Response from API.
"""
return self._http_request(method='GET', url_suffix=f'/ip/{ip_}/geolocation', params=self._params)
@logger
def search_request(self, path: str, from_: int, to_: int, signature_name: str = '', domain_name: str = '',
vendor: str = '', cve: str = '') -> dict:
"""Initiate a search by sending a POST request.
Args:
path: API endpoint path to search.
from_: from which signature to return results.
to_: to which signature to return results.
signature_name: signature name.
domain_name: domain name.
vendor: vendor ID.
cve: cve ID.
Returns:
Response from API.
"""
if path == 'dns': # DNS search
if signature_name and domain_name:
raise Exception('Please provide either a signature_name or a domain_name.')
elif path == 'ips': # Anti spyware search
if (cve and (vendor or signature_name)) or (vendor and (cve or signature_name)) \
or (signature_name and (cve or vendor)):
raise Exception('Please provide either a signature_name or a cve or a vendor.')
data: Dict[str, Any] = {
'from': from_,
'size': to_ - from_
}
if signature_name:
data['field'] = 'signatureName'
data['value'] = signature_name
elif domain_name:
data['field'] = 'domainName'
data['value'] = domain_name
elif cve:
data['field'] = 'cve'
data['value'] = cve
else: # vendor name
data['field'] = 'vendor'
data['value'] = vendor
return self._http_request(method='POST', url_suffix=f'/threatvault/{path}/search', params=self._params,
json_data=data)
@logger
def signature_search_results_request(self, search_type: str, search_request_id: str) -> dict:
"""Get signature search results by sending a GET request.
Args:
search_type: search type.
search_request_id: signature id.
Returns:
Response from API.
"""
return self._http_request(method='GET',
url_suffix=f'/threatvault/{search_type}/search/result/{search_request_id}',
params=self._params)
def antivirus_signature_get(client: Client, args: dict) -> CommandResults:
"""Get antivirus signature.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
CommandResults.
"""
sha256 = str(args.get('sha256', ''))
signature_id = str(args.get('signature_id', ''))
try:
response = client.antivirus_signature_get_request(sha256, signature_id)
readable_output = tableToMarkdown(name="Antivirus:", t=response, removeNull=True)
except Exception as err:
if 'Error in API call [404] - Not Found' in str(err):
response = {}
readable_output = 'Antivirus signature was not found. Please try with a different sha256 or signature_id.'
else:
raise Exception(err)
return CommandResults(
outputs_prefix=f'{client.name}.Antivirus',
outputs_key_field='signatureId',
outputs=response,
readable_output=readable_output,
raw_response=response
)
def file_command(client: Client, args: Dict) -> List[CommandResults]:
"""Get the reputation of a sha256 representing an antivirus
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
list of CommandResults.
"""
sha256_list = argToList(args.get('file'))
command_results_list: List[CommandResults] = []
dbot_reliability = DBotScoreReliability.get_dbot_score_reliability_from_str(client.reliability)
for sha256 in sha256_list:
try:
response = client.antivirus_signature_get_request(sha256)
dbot_score = Common.DBotScore(
indicator=sha256,
indicator_type=DBotScoreType.FILE,
integration_name=client.name,
score=Common.DBotScore.BAD,
reliability=dbot_reliability
)
file = Common.File(
sha256=sha256,
dbot_score=dbot_score
)
readable_output = tableToMarkdown(name=f"SHA256 {sha256} Antivirus reputation:", t=response,
removeNull=True)
except Exception as err:
if 'Error in API call [404] - Not Found' in str(err):
response = {}
dbot_score = Common.DBotScore(
indicator=sha256,
indicator_type=DBotScoreType.FILE,
integration_name=client.name,
reliability=dbot_reliability,
score=Common.DBotScore.NONE
)
file = Common.File(
sha256=sha256,
dbot_score=dbot_score
)
readable_output = f"SHA256 {sha256} Antivirus reputation is unknown to Threat Vault."
else:
raise Exception(err)
command_results = CommandResults(
outputs_prefix=f'{client.name}.Antivirus',
outputs_key_field='signatureId',
outputs=response,
readable_output=readable_output,
raw_response=response,
indicator=file
)
command_results_list.append(command_results)
return command_results_list
def dns_get_by_id(client: Client, args: dict) -> CommandResults:
"""Get DNS signature.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
CommandResults.
"""
dns_signature_id = str(args.get('dns_signature_id', ''))
try:
response = client.dns_signature_get_request(dns_signature_id)
headers = ['signatureId', 'signatureName', 'domainName', 'createTime', 'category']
readable_output = tableToMarkdown(name="DNS Signature:", t=response, headers=headers, removeNull=True)
except Exception as err:
if 'Error in API call [404] - Not Found' in str(err):
response = {}
readable_output = 'DNS signature was not found. Please try with a different dns_signature_id.'
else:
raise Exception(err)
return CommandResults(
outputs_prefix=f'{client.name}.DNS',
outputs_key_field='signatureId',
outputs=response,
readable_output=readable_output,
raw_response=response
)
def antispyware_get_by_id(client: Client, args: dict) -> CommandResults:
"""Get anti spyware signature.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
CommandResults.
"""
signature_id = str(args.get('signature_id', ''))
try:
response = client.antispyware_get_by_id_request(signature_id)
headers = ['signatureId', 'signatureName', 'signatureType', 'status', 'firstReleaseTime', 'latestReleaseTime']
readable_output = tableToMarkdown(name="Anti Spyware Signature:", t=response, headers=headers, removeNull=True)
except Exception as err:
if 'Error in API call [404] - Not Found' in str(err):
response = {}
readable_output = 'Anti spyware signature was not found. Please try with a different signature_id.'
else:
raise Exception(err)
return CommandResults(
outputs_prefix=f'{client.name}.AntiSpyware',
outputs_key_field='signatureId',
outputs=response,
readable_output=readable_output,
raw_response=response
)
def ip_geo_get(client: Client, args: dict) -> CommandResults:
"""Get IP geo location.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
CommandResults.
"""
ip_ = str(args.get('ip', ''))
try:
response = client.ip_geo_get_request(ip_)
readable_output = tableToMarkdown(name="IP location:", t=response, removeNull=True)
except Exception as err:
if 'Error in API call [404] - Not Found' in str(err):
response = {}
readable_output = 'IP location was not found. Please try with a different IP.'
else:
raise Exception(err)
return CommandResults(
outputs_prefix=f'{client.name}.IP',
outputs_key_field='ipAddress',
outputs=response,
readable_output=readable_output,
raw_response=response
)
def ip_command(client: Client, args: dict) -> List[CommandResults]:
"""Get IP geo location.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
list of CommandResults.
"""
ip_list = argToList(args.get('ip', ''))
command_results_list: List[CommandResults] = []
dbot_reliability = DBotScoreReliability.get_dbot_score_reliability_from_str(client.reliability)
for ip_ in ip_list:
try:
response = client.ip_geo_get_request(ip_)
dbot_score = Common.DBotScore(
indicator=ip_,
indicator_type=DBotScoreType.IP,
integration_name=client.name,
reliability=dbot_reliability,
score=Common.DBotScore.NONE
)
ip_obj = Common.IP(
ip=ip_,
dbot_score=dbot_score,
geo_country=response.get('countryName'),
)
readable_output = tableToMarkdown(name="IP location:", t=response, removeNull=True)
except Exception as err:
if 'Error in API call [404] - Not Found' in str(err):
response = {}
dbot_score = Common.DBotScore(
indicator=ip_,
indicator_type=DBotScoreType.IP,
integration_name=client.name,
reliability=dbot_reliability,
score=Common.DBotScore.NONE
)
ip_obj = Common.IP(
ip=ip_,
dbot_score=dbot_score
)
readable_output = 'IP location was not found. Please try with a different IP.'
else:
raise Exception(err)
command_results = CommandResults(
outputs_prefix=f'{client.name}.IP',
outputs_key_field='ipAddress',
outputs=response,
readable_output=readable_output,
raw_response=response,
indicator=ip_obj
)
command_results_list.append(command_results)
return command_results_list
def antivirus_signature_search(client: Client, args: dict) -> CommandResults:
"""Initiate antivirus signature search.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
CommandResults.
"""
signature_name = str(args.get('signature_name', ''))
from_ = int(args.get('from', 0))
to_ = from_ + int(args.get('to', 10))
response = client.search_request('panav', from_, to_, signature_name)
outputs = response
outputs.update({'search_type': 'panav', 'from': from_, 'to': to_})
readable_output = tableToMarkdown(name="Antivirus Signature Search:", t=outputs, removeNull=True)
return CommandResults(
outputs_prefix=f'{client.name}.Search',
outputs_key_field='search_request_id',
outputs=outputs,
readable_output=readable_output,
raw_response=response
)
def dns_signature_search(client: Client, args: dict) -> CommandResults:
"""Initiate DNS signature search.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
CommandResults.
"""
signature_name = str(args.get('signature_name', ''))
from_ = int(args.get('from', 0))
to_ = from_ + int(args.get('to', 10))
domain_name = str(args.get('domain_name', ''))
response = client.search_request('dns', from_, to_, signature_name, domain_name=domain_name)
outputs = response
outputs.update({'search_type': 'dns', 'from': from_, 'to': to_})
readable_output = tableToMarkdown(name="DNS Signature Search:", t=outputs, removeNull=True)
return CommandResults(
outputs_prefix=f'{client.name}.Search',
outputs_key_field='search_request_id',
outputs=outputs,
readable_output=readable_output,
raw_response=response
)
def antispyware_signature_search(client: Client, args: dict) -> CommandResults:
"""Initiate anti spyware signature search.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
CommandResults.
"""
signature_name = str(args.get('signature_name', ''))
from_ = int(args.get('from', 0))
to_ = from_ + int(args.get('to', 10))
vendor = str(args.get('vendor', ''))
cve = str(args.get('cve', ''))
response = client.search_request('ips', from_, to_, signature_name, vendor=vendor, cve=cve)
outputs = response
outputs.update({'search_type': 'ips', 'from': from_, 'to': to_})
readable_output = tableToMarkdown(name="Anti Spyware Signature Search:", t=outputs, removeNull=True)
return CommandResults(
outputs_prefix=f'{client.name}.Search',
outputs_key_field='search_request_id',
outputs=outputs,
readable_output=readable_output,
raw_response=response
)
def signature_search_results(client: Client, args: dict):
"""Retrieve signature search results.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
CommandResults.
"""
search_request_id = str(args.get('search_request_id', ''))
search_type = str(args.get('search_type', ''))
try:
response = client.signature_search_results_request(search_type, search_request_id)
outputs = response
outputs.update({'search_request_id': search_request_id})
if response.get('status') == 'submitted': # search was not completed
readable_output = f'Search {search_request_id} is still in progress.'
else:
headers = ['signatureId', 'signatureName', 'domainName', 'cve', 'signatureType', 'status', 'category',
'firstReleaseTime', 'latestReleaseTime']
outputs.update({'status': 'completed'})
title = f'Signature search are showing {outputs.get("page_count")} of {outputs.get("total_count")} results:'
readable_output = tableToMarkdown(name=title, t=outputs.get('signatures'),
headers=headers, removeNull=True)
return CommandResults(
outputs_prefix=f'{client.name}.Search',
outputs_key_field='search_request_id',
outputs=outputs,
readable_output=readable_output,
raw_response=response
)
except Exception as err:
if 'Not Found' in str(err):
return_warning(f'Search request ID {search_request_id} was not found.')
else:
raise
def test_module(client: Client, *_) -> str:
"""Performs basic get request to get ip geo data.
Args:
client: Client object with request.
Returns:
string.
"""
client.ip_geo_get_request(ip_='1.1.1.1')
return 'ok'
def main():
"""
PARSE AND VALIDATE INTEGRATION PARAMS
"""
params = demisto.params()
auto_focus_key_retriever = AutoFocusKeyRetriever(params.get('api_key'))
reliability = params.get('integrationReliability', 'D - Not usually reliable')
if not DBotScoreReliability.is_valid_type(reliability):
raise Exception("Please provide a valid value for the Source Reliability parameter.")
verify = not params.get('insecure', False)
proxy = params.get('proxy')
try:
command = demisto.command()
LOG(f'Command being called is {demisto.command()}')
client = Client(api_key=auto_focus_key_retriever.key, verify=verify, proxy=proxy, reliability=reliability)
commands = {
'threatvault-antivirus-signature-get': antivirus_signature_get,
'file': file_command,
'threatvault-dns-signature-get-by-id': dns_get_by_id,
'threatvault-antispyware-signature-get-by-id': antispyware_get_by_id,
'threatvault-ip-geo-get': ip_geo_get,
'ip': ip_command,
'threatvault-antivirus-signature-search': antivirus_signature_search,
'threatvault-dns-signature-search': dns_signature_search,
'threatvault-antispyware-signature-search': antispyware_signature_search,
'threatvault-signature-search-results': signature_search_results,
}
if demisto.command() == 'test-module':
# This is the call made when pressing the integration Test button.
return_results(test_module(client))
elif command in commands:
return_results(commands[command](client, demisto.args()))
else:
raise NotImplementedError(f'Command "{command}" was not implemented.')
except Exception as err:
return_error(str(err), err)
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | f4a8295089b7ef6f8ed1bc20ada8b2e4 | 34.39115 | 120 | 0.589418 | 4.08749 | false | false | false | false |
demisto/content | Packs/rasterize/Integrations/rasterize/rasterize.py | 2 | 29696 | import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
import base64
import os
import re
import subprocess
import tempfile
import time
import traceback
from enum import Enum
from io import BytesIO
from pathlib import Path
from typing import Callable, Tuple
import numpy as np
from pdf2image import convert_from_path
from PIL import Image
from PyPDF2 import PdfReader
from selenium import webdriver
from pyvirtualdisplay import Display
from selenium.common.exceptions import (InvalidArgumentException,
NoSuchElementException,
TimeoutException)
# Chrome respects proxy env params
handle_proxy()
# Make sure our python code doesn't go through a proxy when communicating with chrome webdriver
os.environ['no_proxy'] = 'localhost,127.0.0.1'
# Needed for cases that rasterize is running with non-root user (docker hardening)
os.environ['HOME'] = tempfile.gettempdir()
WITH_ERRORS = demisto.params().get('with_error', True)
DEFAULT_WAIT_TIME = max(int(demisto.params().get('wait_time', 0)), 0)
DEFAULT_PAGE_LOAD_TIME = int(demisto.params().get('max_page_load_time', 180))
URL_ERROR_MSG = "Can't access the URL. It might be malicious, or unreachable for one of several reasons. " \
"You can choose to receive this message as error/warning in the instance settings\n"
EMPTY_RESPONSE_ERROR_MSG = "There is nothing to render. This can occur when there is a refused connection." \
" Please check your URL."
DEFAULT_W, DEFAULT_H = '600', '800'
DEFAULT_W_WIDE = '1024'
CHROME_USER_AGENT = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.64 Safari/537.36' # noqa
MAX_FULLSCREEN_W = 8000
MAX_FULLSCREEN_H = 8000
DRIVER_LOG = f'{tempfile.gettempdir()}/chromedriver.log'
DEFAULT_CHROME_OPTIONS = [
'--no-sandbox',
'--disable-gpu',
'--hide-scrollbars',
'--disable_infobars',
'--start-maximized',
'--start-fullscreen',
'--ignore-certificate-errors',
'--disable-dev-shm-usage',
f'--user-agent={CHROME_USER_AGENT}'
]
USER_CHROME_OPTIONS = demisto.params().get('chrome_options', "")
PAGES_LIMITATION = 20
CHROME_EXE = os.getenv('CHROME_EXE', '/opt/google/chrome/google-chrome')
class RasterizeMode(Enum):
WEBDRIVER_PREFERED = 'WebDriver - Preferred'
WEBDRIVER_ONLY = 'WebDriver - Only'
HEADLESS_CLI_PREFERED = 'Headless CLI - Preferred'
HEADLESS_CLI_ONLY = 'Headless CLI - Only'
DEFAULT_MODE = RasterizeMode(demisto.params().get('rasterize_mode', RasterizeMode.WEBDRIVER_PREFERED))
class RasterizeType(Enum):
PNG = 'png'
PDF = 'pdf'
JSON = 'json'
def check_width_and_height(width: int, height: int) -> Tuple[int, int]:
"""
Verifies that the width and height are not greater than the safeguard limit.
Args:
width: The given width.
height: The given height.
Returns: The checked width and height values - [width, height]
"""
w = min(width, MAX_FULLSCREEN_W)
h = min(height, MAX_FULLSCREEN_H)
return w, h
def return_err_or_warn(msg):
return_error(msg) if WITH_ERRORS else return_warning(msg, exit=True)
def opt_name(opt):
return opt.split('=', 1)[0]
def merge_options(default_options, user_options):
"""merge the defualt options and user options
Arguments:
default_options {list} -- list of options to use
user_options {string} -- user configured options comma seperated (comma value can be escaped with \\)
Returns:
list -- merged options
"""
user_options = re.split(r'(?<!\\),', user_options) if user_options else list()
if not user_options: # nothing to do
return default_options.copy()
demisto.debug(f'user chrome options: {user_options}')
options = []
remove_opts = []
for opt in user_options:
opt = opt.strip()
if opt.startswith('[') and opt.endswith(']'):
remove_opts.append(opt[1:-1])
else:
options.append(opt.replace(r'\,', ','))
# remove values (such as in user-agent)
option_names = [opt_name(x) for x in options]
# add filtered defaults only if not in removed and we don't have it already
options.extend([x for x in default_options if (opt_name(x) not in remove_opts and opt_name(x) not in option_names)])
return options
def check_response(driver):
EMPTY_PAGE = '<html><head></head><body></body></html>'
if driver.page_source == EMPTY_PAGE:
return_err_or_warn(EMPTY_RESPONSE_ERROR_MSG)
def init_display(width: int, height: int):
"""
Creates virtual display if include_url is set to True
Args:
width: desired snapshot width in pixels
height: desired snapshot height in pixels
Returns:
The display session
"""
try:
demisto.debug(f"Starting display with width: {width}, and height: {height}.")
os.environ['DISPLAY'] = ':0'
display = Display(visible=0, size=(width, height), backend='xvnc')
display.start()
except Exception as ex:
raise DemistoException(f'Unexpected exception: {ex}\nTrace:{traceback.format_exc()}')
demisto.debug('Creating display - COMPLETED')
return display
def init_driver(offline_mode=False, include_url=False):
"""
Creates headless Google Chrome Web Driver
Args:
offline_mode: when set to True, will block any outgoing communication
include_url: when set to True, will include the URL bar in the image result
Returns:
The driver session
"""
demisto.debug(f'Creating chrome driver. Mode: {"OFFLINE" if offline_mode else "ONLINE"}')
try:
chrome_options = webdriver.ChromeOptions()
for opt in merge_options(DEFAULT_CHROME_OPTIONS, USER_CHROME_OPTIONS):
chrome_options.add_argument(opt)
if not include_url:
chrome_options.add_argument('--headless')
driver = webdriver.Chrome(options=chrome_options, service_args=[
f'--log-path={DRIVER_LOG}',
])
if offline_mode:
driver.set_network_conditions(offline=True, latency=5, throughput=500 * 1024)
except Exception as ex:
raise DemistoException(f'Unexpected exception: {ex}\nTrace:{traceback.format_exc()}')
demisto.debug('Creating chrome driver - COMPLETED')
return driver
def find_zombie_processes():
"""find zombie proceses
Returns:
([process ids], raw ps output) -- return a tuple of zombie process ids and raw ps output
"""
ps_out = subprocess.check_output(['ps', '-e', '-o', 'pid,ppid,state,stime,cmd'],
stderr=subprocess.STDOUT, universal_newlines=True)
lines = ps_out.splitlines()
pid = str(os.getpid())
zombies = []
if len(lines) > 1:
for line in lines[1:]:
pinfo = line.split()
if pinfo[2] == 'Z' and pinfo[1] == pid: # zombie process
zombies.append(pinfo[0])
return zombies, ps_out
def quit_driver_and_display_and_reap_children(driver, display):
"""
Quits the driver's and display's sessions and reaps all of zombie child processes
:param driver: The driver session.
:param display: The display session.
:return: None
"""
try:
try:
if driver:
demisto.debug(f'Quitting driver session: {driver.session_id}')
driver.quit()
except Exception as edr:
demisto.error(f"Failed to quit driver. Error: {edr}. Trace: {traceback.format_exc()}")
try:
if display:
demisto.debug("Stopping display")
display.stop()
except Exception as edr:
demisto.error(f"Failed to stop display. Error: {edr}. Trace: {traceback.format_exc()}")
zombies, ps_out = find_zombie_processes()
if zombies:
demisto.info(f'Found zombie processes will waitpid: {ps_out}')
for pid in zombies:
waitres = os.waitpid(int(pid), os.WNOHANG)[1]
demisto.info(f'waitpid result: {waitres}')
else:
demisto.debug(f'No zombie processes found for ps output: {ps_out}')
except Exception as e:
demisto.error(f'Failed checking for zombie processes: {e}. Trace: {traceback.format_exc()}')
def rasterize(path: str, width: int, height: int, r_type: RasterizeType = RasterizeType.PNG, wait_time: int = 0,
offline_mode: bool = False, max_page_load_time: int = 180, full_screen: bool = False,
r_mode: RasterizeMode = RasterizeMode.WEBDRIVER_PREFERED, include_url: bool = False):
"""
Capturing a snapshot of a path (url/file), using Chrome Driver
:param offline_mode: when set to True, will block any outgoing communication
:param path: file path, or website url
:param width: desired snapshot width in pixels
:param height: desired snapshot height in pixels
:param r_type: result type: .png/.pdf
:param wait_time: time in seconds to wait before taking a screenshot
:param max_page_load_time: amount of time to wait for a page load to complete before throwing an error
:param full_screen: when set to True, the snapshot will take the whole page
:param r_mode: rasterizing mode see: RasterizeMode enum.
"""
demisto.debug(f'Rasterizing using mode: {r_mode}')
page_load_time = max_page_load_time if max_page_load_time > 0 else DEFAULT_PAGE_LOAD_TIME
rasterize_funcs: Tuple[Callable, ...] = ()
if r_mode == RasterizeMode.WEBDRIVER_PREFERED:
rasterize_funcs = (rasterize_webdriver, rasterize_headless_cmd)
elif r_mode == RasterizeMode.WEBDRIVER_ONLY:
rasterize_funcs = (rasterize_webdriver,)
elif r_mode == RasterizeMode.HEADLESS_CLI_PREFERED:
rasterize_funcs = (rasterize_headless_cmd, rasterize_webdriver)
elif r_mode == RasterizeMode.HEADLESS_CLI_ONLY:
rasterize_funcs = (rasterize_headless_cmd,)
else: # should never happen as we use an enum
demisto.error(f'Unknown rasterize mode: {r_mode}')
raise ValueError(f'Unknown rasterize mode: {r_mode}')
try:
for i, r_func in enumerate(rasterize_funcs): # type: ignore[var-annotated]
try:
return r_func(path=path, width=width, height=height, r_type=r_type, wait_time=wait_time, # type: ignore[misc]
offline_mode=offline_mode, max_page_load_time=page_load_time, full_screen=full_screen,
include_url=include_url)
except Exception as ex:
if i < (len(rasterize_funcs) - 1):
demisto.info(f'Failed rasterize preferred option trying second option. Exception: {ex}')
else:
demisto.info(f'Failed rasterizing using all available options. Raising last exception: {ex}')
raise
except (InvalidArgumentException, NoSuchElementException) as ex:
if 'invalid argument' in str(ex):
err_msg = URL_ERROR_MSG + str(ex)
return_err_or_warn(err_msg)
else:
return_err_or_warn(f'Invalid exception: {ex}\nTrace:{traceback.format_exc()}')
except (TimeoutException, subprocess.TimeoutExpired) as ex:
return_err_or_warn(f'Timeout exception with max load time of: {page_load_time} seconds. {ex}')
except Exception as ex:
err_str = f'General error: {ex}\nTrace:{traceback.format_exc()}'
demisto.error(err_str)
return_err_or_warn(err_str)
def rasterize_webdriver(path: str, width: int, height: int, r_type: RasterizeType = RasterizeType.PNG, wait_time: int = 0,
offline_mode: bool = False, max_page_load_time: int = 180, full_screen: bool = False,
include_url: bool = False):
"""
Capturing a snapshot of a path (url/file), using Chrome Driver if include_url is set to False,
otherwise, it uses a virtual Display to display the screen of the linux machine.
:param offline_mode: when set to True, will block any outgoing communication
:param path: file path, or website url
:param width: desired snapshot width in pixels
:param height: desired snapshot height in pixels
:param r_type: result type: .png/.pdf
:param wait_time: time in seconds to wait before taking a screenshot
:param include_url: when set to True, will include the URL bar in the image result
"""
driver, display = None, None
try:
if include_url:
display = init_display(width, height)
driver = init_driver(offline_mode, include_url)
demisto.debug(f'Navigating to path: {path}. Mode: {"OFFLINE" if offline_mode else "ONLINE"}.'
f' page load: {max_page_load_time}')
driver.set_page_load_timeout(max_page_load_time)
driver.get(path)
driver.maximize_window()
driver.implicitly_wait(5)
if wait_time > 0 or DEFAULT_WAIT_TIME > 0:
time.sleep(wait_time or DEFAULT_WAIT_TIME)
check_response(driver)
demisto.debug('Navigating to path - COMPLETED')
if r_type == RasterizeType.PDF:
output = get_pdf(driver, width, height)
elif r_type == RasterizeType.JSON:
html = driver.page_source
url = driver.current_url
output = {'image_b64': base64.b64encode(get_image(driver, width, height, full_screen, include_url)).decode('utf8'),
'html': html, 'current_url': url}
else:
output = get_image(driver, width, height, full_screen, include_url)
return output
finally:
quit_driver_and_display_and_reap_children(driver, display)
def rasterize_headless_cmd(path: str, width: int, height: int, r_type: RasterizeType = RasterizeType.PNG, wait_time: int = 0,
offline_mode: bool = False, max_page_load_time: int = 180, full_screen: bool = False,
include_url: bool = False):
if include_url:
demisto.info('include_url options is ignored in headless cmd mode. Image will not include the url bar.')
demisto.debug(f'rasterizing headless cmd mode for path: [{path}]')
if offline_mode:
raise NotImplementedError(f'offile_mode: {offline_mode} is not supported in Headless CLI mode')
if full_screen:
demisto.info(f'full_screen param: [{full_screen}] ignored in headless cmd mode.'
f' Will use width: : {width} and height: {height}.')
cmd_options = merge_options(DEFAULT_CHROME_OPTIONS, USER_CHROME_OPTIONS)
cmd_options.insert(0, CHROME_EXE)
cmd_options.append('--headless')
if width > 0 and height > 0:
cmd_options.append(f'--window-size={width},{height}')
# not using --timeout as it would return a screenshot even though it is not complete in some cases
# if max_page_load_time > 0:
# cmd_options.append(f'--timeout={max_page_load_time * 1000}')
output_file = None
if r_type == RasterizeType.PDF:
cmd_options.append('--print-to-pdf')
output_file = Path(tempfile.gettempdir()) / 'output.pdf'
elif r_type == RasterizeType.JSON:
cmd_options.append('--dump-dom')
else: # screeshot
cmd_options.append('--screenshot')
output_file = Path(tempfile.gettempdir()) / 'screenshot.png'
# run chrome
try:
cmd_options.append(path)
demisto.debug(f'CMD command: {" ".join(cmd_options)}')
cmd_timeout = 0 if max_page_load_time <= 0 else max_page_load_time
res = subprocess.run(cmd_options, cwd=tempfile.gettempdir(), capture_output=True, timeout=cmd_timeout,
check=True, text=True)
except subprocess.TimeoutExpired as te:
demisto.info(f'chrome headless timeout exception: {te}. Stderr: {te.stderr}')
raise
except subprocess.CalledProcessError as ce:
demisto.error(f'chrome headless called process exception: {ce}. Return code: {ce.returncode}. Stderr: {ce.stderr}')
raise
demisto.debug(f'Done rasterizing: [{path}]')
if is_debug_mode():
demisto.debug(f'chrome stderr output:{res.stderr}')
if not output_file: # json mode
return {'html': res.stdout, 'current_url': path}
try:
with open(output_file, 'rb') as f:
return f.read()
finally:
output_file.unlink(missing_ok=True)
def get_image(driver, width: int, height: int, full_screen: bool, include_url=False):
"""
Uses the Chrome driver to generate an image out of a currently loaded path
:param width: desired snapshot width in pixels
:param height: desired snapshot height in pixels
:param full_screen: when set to True, the snapshot will take the whole page
(safeguard limits defined in MAX_FULLSCREEN_W, MAX_FULLSCREEN_H)
:param include_url: when set to True, will include the URL bar in the image result
:return: .png file of the loaded path
"""
demisto.debug('Capturing screenshot')
# Set windows size to the given width and height:
driver.set_window_size(width, height)
if full_screen:
# Convention: the calculated values are always larger then the given width and height and smaller than the
# safeguard limits
# Calculates the width and height using the scrollbar of the window:
calc_width = driver.execute_script('return document.body.parentNode.scrollWidth')
calc_height = driver.execute_script('return document.body.parentNode.scrollHeight')
# Check that the width and height meet the safeguard limit:
calc_width, calc_height = check_width_and_height(calc_width, calc_height)
demisto.info(f'Calculated snapshot width is {calc_width}, calculated snapshot height is {calc_height}.')
# Reset window size
driver.set_window_size(calc_width, calc_height)
image = get_image_screenshot(driver=driver, include_url=include_url)
driver.quit()
demisto.debug('Capturing screenshot - COMPLETED')
return image
def get_image_screenshot(driver, include_url):
"""
Takes a screenshot using linux display if include_url is set to True and using drive if not, and returns it as an image.
Args:
driver: The driver session.
include_url: when set to True, will take the screenshot of the linux machine's display using the ImageMagick's import tool
to include the url bar in the image.
Returns:
The readed .png file of the image.
"""
if include_url:
try:
res = subprocess.run('import -window root screenshot.png'.split(' '), text=True, capture_output=True, check=True,
env={'DISPLAY': ':0'})
demisto.debug(f"Finished taking the screenshot. Stdout: [{res.stdout}] stderr: [{res.stderr}]")
except subprocess.CalledProcessError as se:
demisto.error(f'Subprocess exception: {se}. Stderr: [{se.stderr}] stdout: [{se.stdout}]')
raise
try:
with open('screenshot.png', 'rb') as f:
image = f.read()
except Exception as e:
demisto.error(f'Failed to read the screenshot.png image. Exception: {e}')
raise
finally:
os.remove('screenshot.png')
else:
image = driver.get_screenshot_as_png()
return image
def get_pdf(driver, width: int, height: int):
"""
Uses the Chrome driver to generate an pdf file out of a currently loaded path
:param width: desired snapshot width in pixels
:param height: desired snapshot height in pixels
:return: .pdf file of the loaded path
"""
demisto.debug('Generating PDF')
driver.set_window_size(width, height)
resource = f'{driver.command_executor._url}/session/{driver.session_id}/chromium/send_command_and_get_result'
body = json.dumps({'cmd': 'Page.printToPDF', 'params': {'landscape': False}})
response = driver.command_executor._request('POST', resource, body)
if response.get('status'):
demisto.results(response.get('status'))
return_error(response.get('value'))
data = base64.b64decode(response.get('value').get('data'))
demisto.debug('Generating PDF - COMPLETED')
return data
def convert_pdf_to_jpeg(path: str, max_pages: str, password: str, horizontal: bool = False):
"""
Converts a PDF file into a jpeg image
:param path: file's path
:param max_pages: max pages to render,
:param password: PDF password
:param horizontal: if True, will combine the pages horizontally
:return: A list of stream of combined images
"""
demisto.debug(f'Loading file at Path: {path}')
input_pdf = PdfReader(open(path, "rb"), strict=False)
pages = len(input_pdf.pages) if max_pages == "*" else min(int(max_pages), len(input_pdf.pages))
with tempfile.TemporaryDirectory() as output_folder:
demisto.debug('Converting PDF')
convert_from_path(
pdf_path=path,
fmt='jpeg',
first_page=1,
last_page=pages,
output_folder=output_folder,
userpw=password,
output_file='converted_pdf_'
)
demisto.debug('Converting PDF - COMPLETED')
demisto.debug('Combining all pages')
images = []
for page in sorted(os.listdir(output_folder)):
if os.path.isfile(os.path.join(output_folder, page)) and 'converted_pdf_' in page:
images.append(Image.open(os.path.join(output_folder, page)))
min_shape = min([(np.sum(page_.size), page_.size) for page_ in images])[1] # get the minimal width
# Divide the list of images into separate lists with constant length (20),
# due to the limitation of images in jpeg format (max size ~65,000 pixels).
# Create a list of lists (length == 20) of images to combine each list (20 images) to one image
images_matrix = [images[i:i + PAGES_LIMITATION] for i in range(0, len(images), PAGES_LIMITATION)]
outputs = []
for images_list in images_matrix:
if horizontal:
imgs_comb = np.hstack([np.asarray(image.resize(min_shape)) for image in images_list])
else:
imgs_comb = np.vstack([np.asarray(image.resize(min_shape)) for image in images_list])
imgs_comb = Image.fromarray(imgs_comb)
output = BytesIO()
imgs_comb.save(output, 'JPEG') # type: ignore
demisto.debug('Combining all pages - COMPLETED')
outputs.append(output.getvalue())
return outputs
def rasterize_command():
url = demisto.getArg('url')
w, h, r_mode = get_common_args(demisto.args())
r_type = RasterizeType(demisto.args().get('type', 'png').lower())
wait_time = int(demisto.args().get('wait_time', 0))
page_load = int(demisto.args().get('max_page_load_time', DEFAULT_PAGE_LOAD_TIME))
file_name = demisto.args().get('file_name', 'url')
full_screen = argToBoolean(demisto.args().get('full_screen', False))
include_url = argToBoolean(demisto.args().get('include_url', False))
w, h = check_width_and_height(w, h) # Check that the width and height meet the safeguard limit
if not (url.startswith('http')):
url = f'http://{url}'
file_name = f'{file_name}.{"pdf" if r_type == RasterizeType.PDF else "png"}' # type: ignore
output = rasterize(path=url, r_type=r_type, width=w, height=h, wait_time=wait_time, max_page_load_time=page_load,
full_screen=full_screen, r_mode=r_mode, include_url=include_url)
if r_type == RasterizeType.JSON:
return_results(CommandResults(raw_response=output, readable_output="Successfully rasterize url: " + url))
return
res = fileResult(filename=file_name, data=output)
if r_type == RasterizeType.PNG:
res['Type'] = entryTypes['image']
demisto.results(res)
def get_common_args(args: dict):
"""
Get commomn args.
:param args: dict to get args from
:return: width, height, rasterize mode
"""
w = int(args.get('width', DEFAULT_W).rstrip('px'))
h = int(args.get('height', DEFAULT_H).rstrip('px'))
r_mode = RasterizeMode(args.get('mode', DEFAULT_MODE))
return w, h, r_mode
def rasterize_image_command():
args = demisto.args()
entry_id = args.get('EntryID')
w, h, r_mode = get_common_args(args)
file_name = args.get('file_name', entry_id)
full_screen = argToBoolean(demisto.args().get('full_screen', False))
w, h = check_width_and_height(w, h) # Check that the width and height meet the safeguard limit
file_path = demisto.getFilePath(entry_id).get('path')
file_name = f'{file_name}.pdf'
with open(file_path, 'rb') as f:
output = rasterize(path=f'file://{os.path.realpath(f.name)}', width=w, height=h, r_type=RasterizeType.PDF,
full_screen=full_screen, r_mode=r_mode)
res = fileResult(filename=file_name, data=output, file_type=entryTypes['entryInfoFile'])
demisto.results(res)
def rasterize_email_command():
html_body = demisto.args().get('htmlBody')
w, h, r_mode = get_common_args(demisto.args())
offline = demisto.args().get('offline', 'false') == 'true'
r_type = RasterizeType(demisto.args().get('type', 'png').lower())
file_name = demisto.args().get('file_name', 'email')
html_load = int(demisto.args().get('max_page_load_time', DEFAULT_PAGE_LOAD_TIME))
full_screen = argToBoolean(demisto.args().get('full_screen', False))
w, h = check_width_and_height(w, h) # Check that the width and height meet the safeguard limit
file_name = f'{file_name}.{"pdf" if r_type == RasterizeType.PDF else "png"}' # type: ignore
with open('htmlBody.html', 'w', encoding='utf-8-sig') as f:
f.write(f'<html style="background:white";>{html_body}</html>')
path = f'file://{os.path.realpath(f.name)}'
output = rasterize(path=path, r_type=r_type, width=w, height=h, offline_mode=offline,
max_page_load_time=html_load, full_screen=full_screen, r_mode=r_mode)
res = fileResult(filename=file_name, data=output)
if r_type == RasterizeType.PNG:
res['Type'] = entryTypes['image']
demisto.results(res)
def rasterize_pdf_command():
entry_id = demisto.args().get('EntryID')
password = demisto.args().get('pdfPassword')
max_pages = demisto.args().get('maxPages', 30)
horizontal = demisto.args().get('horizontal', 'false') == 'true'
file_name = demisto.args().get('file_name', 'image')
file_path = demisto.getFilePath(entry_id).get('path')
file_name = f'{file_name}.jpeg' # type: ignore
with open(file_path, 'rb') as f:
images = convert_pdf_to_jpeg(path=os.path.realpath(f.name), max_pages=max_pages, password=password,
horizontal=horizontal)
results = []
for image in images:
res = fileResult(filename=file_name, data=image)
res['Type'] = entryTypes['image']
results.append(res)
demisto.results(results)
def rasterize_html_command():
args = demisto.args()
entry_id = args.get('EntryID')
w = args.get('width', DEFAULT_W).rstrip('px')
h = args.get('height', DEFAULT_H).rstrip('px')
r_type = args.get('type', 'png')
file_name = args.get('file_name', 'email')
full_screen = argToBoolean(demisto.args().get('full_screen', False))
file_name = f'{file_name}.{"pdf" if r_type.lower() == "pdf" else "png"}' # type: ignore
file_path = demisto.getFilePath(entry_id).get('path')
os.rename(f'./{file_path}', 'file.html')
output = rasterize(path=f"file://{os.path.realpath('file.html')}", width=w, height=h, r_type=r_type,
full_screen=full_screen)
res = fileResult(filename=file_name, data=output)
if r_type == 'png':
res['Type'] = entryTypes['image']
return_results(res)
def module_test():
# setting up a mock email file
with tempfile.NamedTemporaryFile('w+') as test_file:
test_file.write('<html><head><meta http-equiv=\"Content-Type\" content=\"text/html;charset=utf-8\">'
'</head><body><br>---------- TEST FILE ----------<br></body></html>')
test_file.flush()
file_path = f'file://{os.path.realpath(test_file.name)}'
# rasterizing the file
rasterize(path=file_path, width=250, height=250, r_mode=DEFAULT_MODE)
demisto.results('ok')
def main(): # pragma: no cover
try:
with open(DRIVER_LOG, 'w'):
pass # truncate the log file
if demisto.command() == 'test-module':
module_test()
elif demisto.command() == 'rasterize-image':
rasterize_image_command()
elif demisto.command() == 'rasterize-email':
rasterize_email_command()
elif demisto.command() == 'rasterize-pdf':
rasterize_pdf_command()
elif demisto.command() == 'rasterize-html':
rasterize_html_command()
elif demisto.command() == 'rasterize':
rasterize_command()
else:
return_error('Unrecognized command')
except Exception as ex:
return_err_or_warn(f'Unexpected exception: {ex}\nTrace:{traceback.format_exc()}')
finally:
if is_debug_mode():
with open(DRIVER_LOG, 'r') as log:
demisto.debug('Driver log:' + log.read())
if __name__ in ["__builtin__", "builtins", '__main__']:
main()
| mit | b8a6d924d901ed25767c7509e239279e | 39.12973 | 151 | 0.634261 | 3.597335 | false | false | false | false |
demisto/content | Packs/Remedy_AR/Integrations/RemedyAR/RemedyAR.py | 2 | 3834 | from urllib.parse import quote_plus
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
from CommonServerUserPython import * # noqa
''' IMPORTS '''
import json
import os
import urllib3
import requests
# disable insecure warnings
urllib3.disable_warnings()
if not demisto.params()['proxy']:
del os.environ['HTTP_PROXY']
del os.environ['HTTPS_PROXY']
del os.environ['http_proxy']
del os.environ['https_proxy']
''' HELPER FUNCTIONS '''
def http_request(method, url_suffix, data, headers):
data = {} if data is None else data
LOG.print_log(f'running request with url={BASE_URL}{url_suffix}\tdata={data}\theaders={headers}')
try:
res = requests.request(method,
BASE_URL + url_suffix,
verify=USE_SSL,
data=data,
headers=headers
)
if res.status_code not in (200, 204):
raise Exception(f'Your request failed with the following error: {res.reason}')
except Exception as ex:
raise Exception(ex)
return res
''' GLOBAL VARS '''
SERVER_URL = demisto.params()['server']
USERNAME = demisto.params()['credentials']['identifier']
PASSWORD = demisto.params()['credentials']['password']
BASE_URL = SERVER_URL + '/api'
USE_SSL = not demisto.params().get('insecure', False)
DEFAULT_HEADERS = {
'Content-Type': 'application/json'
}
''' FUNCTIONS '''
def login():
cmd_url = '/jwt/login'
data = {
'username': USERNAME,
'password': PASSWORD
}
login_headers = {
'Content-Type': 'application/x-www-form-urlencoded'
}
result = http_request('POST', cmd_url, data, login_headers)
return result
def logout():
cmd_url = '/jwt/logout'
http_request('POST', cmd_url, None, DEFAULT_HEADERS)
def get_server_details(qualification, fields):
# Adds fields to filter by
if isinstance(fields, list):
fields = ','.join(fields)
fields = f'fields=values({fields})'
# URL Encodes qualification
qualification = quote_plus(qualification)
cmd_url = f'/arsys/v1/entry/AST:ComputerSystem/?q={qualification}&{fields}'
result = http_request('GET', cmd_url, None, DEFAULT_HEADERS).json()
entries = result['entries']
server_details = []
for entry in entries:
# Context Standard
current_entry = entry['values']
if current_entry['NC_IOPs'] is None:
current_entry.pop('NC_IOPs', None)
else:
current_entry['IPAddress'] = current_entry.pop('NC_IOPs')
current_entry['Hostname'] = current_entry.pop('Name')
server_details.append(current_entry)
context = {
'Endpoint': server_details
}
entry = {
'Type': entryTypes['note'],
'Contents': json.dumps(server_details),
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown('Remedy AR Server Details', server_details),
'EntryContext': context
}
return entry
''' EXECUTION CODE '''
auth = login()
token = auth.content
DEFAULT_HEADERS['Authorization'] = f'AR-JWT {token}'
LOG('command is %s' % (demisto.command(), ))
try:
if demisto.command() == 'test-module':
# Login is made and tests connectivity and credentials
demisto.results('ok')
elif demisto.command() == 'remedy-get-server-details':
if 'qualification' in demisto.args():
qualification = demisto.args()['qualification']
else:
qualification = ''
demisto.results(get_server_details(qualification, demisto.args()['fields']))
except Exception as e:
LOG(e)
LOG.print_log()
raise
finally:
logout()
| mit | e641892821024593d62229b333f8add8 | 26.985401 | 101 | 0.614502 | 3.826347 | false | false | false | false |
demisto/content | Packs/CommonScripts/Scripts/MaliciousRatioReputation/MaliciousRatioReputation_test.py | 2 | 3878 | import demistomock as demisto
def test_main_malicious_ratio_reputation(mocker):
"""
Given:
- The script args.
When:
- Running the main with valid indicator.
Then:
- Validating after calling the helper functions the results is as expected.
"""
import MaliciousRatioReputation
args = {'input': 'value_a', 'threshold': '-2'}
mocker.patch.object(demisto, 'args', return_value=args)
mocker.patch.object(MaliciousRatioReputation, 'get_indicator_from_value',
return_value={'value': 'value_a', 'indicator_type': 'IP'})
res_get_indicator_result = {'Type': 1, 'EntryContext': {'DBotScore': {'Type': 'ip',
'Score': 2, 'Vendor': 'DBot-MaliciousRatio',
'Indicator': 'value_a'}},
'Contents': 2,
'ContentsFormat': 'text',
'HumanReadable': 'Malicious ratio for value_a is -1.00',
'ReadableContentsFormat': 'markdown'}
mocker.patch.object(MaliciousRatioReputation, 'get_indicator_result',
return_value=res_get_indicator_result)
res_mock = mocker.patch.object(demisto, 'results')
MaliciousRatioReputation.main()
assert res_mock.call_count == 1
assert res_mock.call_args[0][0] == res_get_indicator_result
def test_get_indicator_result(mocker):
"""
Given:
- The script args and indicator with mr_score > given threshold.
When:
- Running the get_indicator_result function.
Then:
- Validating that the function returns entry to the context.
"""
from MaliciousRatioReputation import get_indicator_result
args = {'input': '8.8.8.8', 'threshold': '-2'}
mocker.patch.object(demisto, 'args', return_value=args)
indicator = {'value': '8.8.8.8', 'indicator_type': 'IP'}
execute_command_res = [{'Contents': [{'maliciousRatio': -1}]}]
execute_mock = mocker.patch.object(demisto, 'executeCommand', return_value=execute_command_res)
entry = get_indicator_result(indicator)
assert execute_mock.call_count == 1
assert len(entry['EntryContext']) > 0
def test_get_indicator_result_with_smaller_mr_score(mocker):
"""
Given:
- The script args and indicator with mr_score < given threshold.
When:
- Running the get_indicator_result function.
Then:
- Validating that the function doesn't return entry.
"""
from MaliciousRatioReputation import get_indicator_result
mocker.patch.object(demisto, 'args', return_value={'input': '8.8.8.8', 'threshold': '0.3'})
indicator = {'value': '8.8.8.8', 'indicator_type': 'IP'}
execute_command_res = [{'Contents': [{'maliciousRatio': -1}]}]
execute_mock = mocker.patch.object(demisto, 'executeCommand', return_value=execute_command_res)
assert get_indicator_result(indicator) is None
assert execute_mock.call_count == 1
def test_get_indicator_from_value(mocker):
"""
Given:
- The function args.
When:
- Running the get_indicator_from_value function.
Then:
- Validating that the return value after calling to "findIndicators" command is as expected.
"""
from MaliciousRatioReputation import get_indicator_from_value
execute_command_res = [{'Contents': [{'id': 'a', 'investigationIDs': ['1', '2', '10'], 'value': 'value_a',
'indicator_type': 'File'}], 'Type': 'note'}]
execute_mock = mocker.patch.object(demisto, 'executeCommand', return_value=execute_command_res)
indicator = get_indicator_from_value('value_a')
assert execute_mock.call_count == 1
assert indicator == execute_command_res[0]['Contents'][0]
| mit | f73ce8c8f1a8606196f66f2aeda4fefd | 43.574713 | 118 | 0.60624 | 3.843409 | false | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.