repository_name stringclasses 316 values | func_path_in_repository stringlengths 6 223 | func_name stringlengths 1 134 | language stringclasses 1 value | func_code_string stringlengths 57 65.5k | func_documentation_string stringlengths 1 46.3k | split_name stringclasses 1 value | func_code_url stringlengths 91 315 | called_functions listlengths 1 156 ⌀ | enclosing_scope stringlengths 2 1.48M |
|---|---|---|---|---|---|---|---|---|---|
tony-landis/datomic-py | datomic/datomic.py | Query.all | python | def all(self):
" execute query, get all list of lists"
query,inputs = self._toedn()
return self.db.q(query,
inputs = inputs,
limit = self._limit,
offset = self._offset,
history = self._history) | execute query, get all list of lists | train | https://github.com/tony-landis/datomic-py/blob/54f713d29ad85ba86d53d5115c9b312ff14b7846/datomic/datomic.py#L320-L327 | [
"def _toedn(self):\n \"\"\" prepare the query for the rest api\n \"\"\"\n finds = u\"\"\n inputs = u\"\"\n wheres = u\"\"\n args = []\n \": in and args\"\n for a,b in self._input:\n inputs += \" {0}\".format(a)\n args.append(dump_edn_val(b))\n if inputs:\n inputs = u\":in ${0}\".format(inputs)\n \" :where \"\n for where in self._where:\n if isinstance(where, (str,unicode)): \n wheres += u\"[{0}]\".format(where)\n elif isinstance(where, (list)):\n wheres += u\" \".join([u\"[{0}]\".format(w) for w in where])\n \" find: \"\n if self._find == []: #find all\n fs = set()\n for p in wheres.replace('[',' ').replace(']',' ').split(' '):\n if p.startswith('?'):\n fs.add(p)\n self._find = list(fs)\n finds = \" \".join(self._find)\n \" all togethr now...\"\n q = u\"\"\"[ :find {0} {1} :where {2} ]\"\"\".\\\n format( finds, inputs, wheres)\n return q,args\n"
] | class Query(object):
""" chainable query builder"
>>> db.find('?e ?a') # default find
>>> q.where() # with add
>>> q.ins() # in add
"""
def __init__(self, find, db=None, schema=None):
self.db = db
self.schema = schema
self._find = []
self._where = []
self._input = []
self._limit = None
self._offset = None
self._history = False
self.find(find)
def __repr__(self):
return " ".join([str(self._find), str(self._in), str(self._where)])
def find(self, *args, **kwargs):
" :find "
if args[0] is all:
pass # finds all
else:
[(self._find.append(x)) for x in args]
return self
def where(self, *args, **kwargs):
" :where "
[(self._where.append(x)) for x in args]
return self
def fulltext(self, attr, s, q, e, v):
self._where.append("(fulltext $ {0} {1}) [[{2} {3}]]".format(attr, s, e, v))
self._input.append((s, q))
def param(self, *args, **kwargs):
" :in "
for first, second in pairwise(args):
if isinstance(second, list):
if not isinstance(second[0], list):
" add a logical _or_ "
self._input.append((
u"[{0} ...]".format(first), second))
else:
" relations, list of list"
self._input.append((
u"[[{0}]]".format(first), second))
elif isinstance(second, tuple):
" tuple "
self._input.append((
u"[{0}]".format(first), list(second)))
else:
" nothing special "
self._input.append((first,second))
return self
def limit(self, limit):
self._limit = limit
return self
def offset(self, offset):
self._offset = offset
return self
def history(self, history):
self._offset = history
return self
def hashone(self):
"execute query, get back"
rs = self.one()
if not rs:
return {}
else:
finds = " ".join(self._find).split(' ')
return dict(zip((x.replace('?','') for x in finds), rs))
def one(self):
"execute query, get a single list"
self.limit(1)
rs = self.all()
if not rs:
return None
else:
return rs[0]
def _toedn(self):
""" prepare the query for the rest api
"""
finds = u""
inputs = u""
wheres = u""
args = []
": in and args"
for a,b in self._input:
inputs += " {0}".format(a)
args.append(dump_edn_val(b))
if inputs:
inputs = u":in ${0}".format(inputs)
" :where "
for where in self._where:
if isinstance(where, (str,unicode)):
wheres += u"[{0}]".format(where)
elif isinstance(where, (list)):
wheres += u" ".join([u"[{0}]".format(w) for w in where])
" find: "
if self._find == []: #find all
fs = set()
for p in wheres.replace('[',' ').replace(']',' ').split(' '):
if p.startswith('?'):
fs.add(p)
self._find = list(fs)
finds = " ".join(self._find)
" all togethr now..."
q = u"""[ :find {0} {1} :where {2} ]""".\
format( finds, inputs, wheres)
return q,args
|
tony-landis/datomic-py | datomic/datomic.py | Query._toedn | python | def _toedn(self):
finds = u""
inputs = u""
wheres = u""
args = []
": in and args"
for a,b in self._input:
inputs += " {0}".format(a)
args.append(dump_edn_val(b))
if inputs:
inputs = u":in ${0}".format(inputs)
" :where "
for where in self._where:
if isinstance(where, (str,unicode)):
wheres += u"[{0}]".format(where)
elif isinstance(where, (list)):
wheres += u" ".join([u"[{0}]".format(w) for w in where])
" find: "
if self._find == []: #find all
fs = set()
for p in wheres.replace('[',' ').replace(']',' ').split(' '):
if p.startswith('?'):
fs.add(p)
self._find = list(fs)
finds = " ".join(self._find)
" all togethr now..."
q = u"""[ :find {0} {1} :where {2} ]""".\
format( finds, inputs, wheres)
return q,args | prepare the query for the rest api | train | https://github.com/tony-landis/datomic-py/blob/54f713d29ad85ba86d53d5115c9b312ff14b7846/datomic/datomic.py#L329-L359 | [
"def dump_edn_val(v):\n \" edn simple value dump\"\n if isinstance(v, (str, unicode)): \n return json.dumps(v)\n elif isinstance(v, E): \n return unicode(v)\n else: \n return dumps(v)\n"
] | class Query(object):
""" chainable query builder"
>>> db.find('?e ?a') # default find
>>> q.where() # with add
>>> q.ins() # in add
"""
def __init__(self, find, db=None, schema=None):
self.db = db
self.schema = schema
self._find = []
self._where = []
self._input = []
self._limit = None
self._offset = None
self._history = False
self.find(find)
def __repr__(self):
return " ".join([str(self._find), str(self._in), str(self._where)])
def find(self, *args, **kwargs):
" :find "
if args[0] is all:
pass # finds all
else:
[(self._find.append(x)) for x in args]
return self
def where(self, *args, **kwargs):
" :where "
[(self._where.append(x)) for x in args]
return self
def fulltext(self, attr, s, q, e, v):
self._where.append("(fulltext $ {0} {1}) [[{2} {3}]]".format(attr, s, e, v))
self._input.append((s, q))
def param(self, *args, **kwargs):
" :in "
for first, second in pairwise(args):
if isinstance(second, list):
if not isinstance(second[0], list):
" add a logical _or_ "
self._input.append((
u"[{0} ...]".format(first), second))
else:
" relations, list of list"
self._input.append((
u"[[{0}]]".format(first), second))
elif isinstance(second, tuple):
" tuple "
self._input.append((
u"[{0}]".format(first), list(second)))
else:
" nothing special "
self._input.append((first,second))
return self
def limit(self, limit):
self._limit = limit
return self
def offset(self, offset):
self._offset = offset
return self
def history(self, history):
self._offset = history
return self
def hashone(self):
"execute query, get back"
rs = self.one()
if not rs:
return {}
else:
finds = " ".join(self._find).split(' ')
return dict(zip((x.replace('?','') for x in finds), rs))
def one(self):
"execute query, get a single list"
self.limit(1)
rs = self.all()
if not rs:
return None
else:
return rs[0]
def all(self):
" execute query, get all list of lists"
query,inputs = self._toedn()
return self.db.q(query,
inputs = inputs,
limit = self._limit,
offset = self._offset,
history = self._history)
|
tony-landis/datomic-py | datomic/datomic.py | TX.add | python | def add(self, *args, **kwargs):
assert self.resp is None, "Transaction already committed"
entity, av_pairs, args = None, [], list(args)
if len(args):
if isinstance(args[0], (int, long)):
" first arg is an entity or tempid"
entity = E(args[0], tx=self)
elif isinstance(args[0], E):
" dont resuse entity from another tx"
if args[0]._tx is self:
entity = args[0]
else:
if int(args[0]) > 0:
" use the entity id on a new obj"
entity = E(int(args[0]), tx=self)
args[0] = None
" drop the first arg"
if entity is not None or args[0] in (None, False, 0):
v = args.pop(0)
" auto generate a temp id?"
if entity is None:
entity = E(self.ctmpid, tx=self)
self.ctmpid -= 1
" a,v from kwargs"
if len(args) == 0 and kwargs:
for a,v in kwargs.iteritems():
self.addeav(entity, a, v)
" a,v from args "
if len(args):
assert len(args) % 2 == 0, "imbalanced a,v in args: " % args
for first, second in pairwise(args):
if not first.startswith(':'):
first = ':' + first
if not first.endswith('/'):
" longhand used: blah/blah "
if isinstance(second, list):
for v in second:
self.addeav(entity, first, v)
else:
self.addeav(entity, first, second)
continue
elif isinstance(second, dict):
" shorthand used: blah/, dict "
for a,v in second.iteritems():
self.addeav(entity, "%s%s" % (first, a), v)
continue
elif isinstance(second, (list, tuple)):
" shorthand used: blah/, list|tuple "
for a,v in pairwise(second):
self.addeav(entity, "%s%s" % (first, a), v)
continue
else:
raise Exception, "invalid pair: %s : %s" % (first,second)
"pass back the entity so it can be resolved after tx()"
return entity | Accumulate datums for the transaction
Start a transaction on an existing db connection
>>> tx = TX(db)
Get get an entity object with a tempid
>>> ref = add()
>>> ref = add(0)
>>> ref = add(None)
>>> ref = add(False)
Entity id passed as first argument (int|long)
>>> tx.add(1, 'thing/name', 'value')
Shorthand form for multiple attributes sharing a root namespace
>>> tx.add(':thing/', {'name':'value', 'tag':'value'})
Attributes with a value of None are ignored
>>> tx.add(':thing/ignored', None)
Add multiple datums for an attribute with carinality:many
>>> tx.add(':thing/color', ['red','white','blue']) | train | https://github.com/tony-landis/datomic-py/blob/54f713d29ad85ba86d53d5115c9b312ff14b7846/datomic/datomic.py#L515-L594 | [
"def pairwise(iterable):\n \"s -> (s0,s1), (s2,s3), (s4, s5), ...\"\n a = iter(iterable)\n return izip(a, a)\n",
"def addeav(self, e, a, v):\n if v is None: return\n self.adds.append((e, a, v))\n if int(e) < 0 and e not in self.tmpents:\n self.tmpents.append(e)\n elif int(e) > 0 and e not in self.realents:\n self.realents.append(e)\n"
] | class TX(object):
""" Accumulate, execute, and resolve tempids
"""
def __init__(self, db):
self.db = db
self.tmpents, self.adds, self.ctmpid, self.txid = [], [], -1, -1
self.resp = None
self.realents = []
def __repr__(self):
return "<datomic tx, %i pending>" % len(self)
def __len__(self):
return len(self.adds or [])
def __int__(self):
return self.txid
def execute(self, **kwargs):
""" commit the current statements from add()
"""
assert self.resp is None, "Transaction already committed"
try:
self.resp = self.db.tx(list(self.edn_iter), **kwargs)
except Exception:
self.resp = False
raise
else:
self.resolve()
self.adds = None
self.tmpents = None
return self.resp # raw dict response
def resolve(self):
""" Resolve one or more tempids.
Automatically takes place after transaction is executed.
"""
assert isinstance(self.resp, dict), "Transaction in uncommitted or failed state"
rids = [(v) for k,v in self.resp['tempids'].items()]
self.txid = self.resp['tx-data'][0]['tx']
rids.reverse()
for t in self.tmpents:
pos = self.tmpents.index(t)
t._eid, t._txid = rids[pos], self.txid
for t in self.realents:
t._txid = self.txid
def addeav(self, e, a, v):
if v is None: return
self.adds.append((e, a, v))
if int(e) < 0 and e not in self.tmpents:
self.tmpents.append(e)
elif int(e) > 0 and e not in self.realents:
self.realents.append(e)
@property
def edn_iter(self):
""" yields edns
"""
for e,a,v in self.adds:
yield u"{%(a)s %(v)s :db/id #db/id[:db.part/user %(e)s ]}" % \
dict(a=a, v=dump_edn_val(v), e=int(e))
|
tony-landis/datomic-py | datomic/datomic.py | TX.execute | python | def execute(self, **kwargs):
assert self.resp is None, "Transaction already committed"
try:
self.resp = self.db.tx(list(self.edn_iter), **kwargs)
except Exception:
self.resp = False
raise
else:
self.resolve()
self.adds = None
self.tmpents = None
return self.resp | commit the current statements from add() | train | https://github.com/tony-landis/datomic-py/blob/54f713d29ad85ba86d53d5115c9b312ff14b7846/datomic/datomic.py#L596-L609 | [
"def resolve(self):\n \"\"\" Resolve one or more tempids. \n Automatically takes place after transaction is executed.\n \"\"\"\n assert isinstance(self.resp, dict), \"Transaction in uncommitted or failed state\"\n rids = [(v) for k,v in self.resp['tempids'].items()]\n self.txid = self.resp['tx-data'][0]['tx']\n rids.reverse()\n for t in self.tmpents:\n pos = self.tmpents.index(t)\n t._eid, t._txid = rids[pos], self.txid\n for t in self.realents:\n t._txid = self.txid\n"
] | class TX(object):
""" Accumulate, execute, and resolve tempids
"""
def __init__(self, db):
self.db = db
self.tmpents, self.adds, self.ctmpid, self.txid = [], [], -1, -1
self.resp = None
self.realents = []
def __repr__(self):
return "<datomic tx, %i pending>" % len(self)
def __len__(self):
return len(self.adds or [])
def __int__(self):
return self.txid
def add(self, *args, **kwargs):
""" Accumulate datums for the transaction
Start a transaction on an existing db connection
>>> tx = TX(db)
Get get an entity object with a tempid
>>> ref = add()
>>> ref = add(0)
>>> ref = add(None)
>>> ref = add(False)
Entity id passed as first argument (int|long)
>>> tx.add(1, 'thing/name', 'value')
Shorthand form for multiple attributes sharing a root namespace
>>> tx.add(':thing/', {'name':'value', 'tag':'value'})
Attributes with a value of None are ignored
>>> tx.add(':thing/ignored', None)
Add multiple datums for an attribute with carinality:many
>>> tx.add(':thing/color', ['red','white','blue'])
"""
assert self.resp is None, "Transaction already committed"
entity, av_pairs, args = None, [], list(args)
if len(args):
if isinstance(args[0], (int, long)):
" first arg is an entity or tempid"
entity = E(args[0], tx=self)
elif isinstance(args[0], E):
" dont resuse entity from another tx"
if args[0]._tx is self:
entity = args[0]
else:
if int(args[0]) > 0:
" use the entity id on a new obj"
entity = E(int(args[0]), tx=self)
args[0] = None
" drop the first arg"
if entity is not None or args[0] in (None, False, 0):
v = args.pop(0)
" auto generate a temp id?"
if entity is None:
entity = E(self.ctmpid, tx=self)
self.ctmpid -= 1
" a,v from kwargs"
if len(args) == 0 and kwargs:
for a,v in kwargs.iteritems():
self.addeav(entity, a, v)
" a,v from args "
if len(args):
assert len(args) % 2 == 0, "imbalanced a,v in args: " % args
for first, second in pairwise(args):
if not first.startswith(':'):
first = ':' + first
if not first.endswith('/'):
" longhand used: blah/blah "
if isinstance(second, list):
for v in second:
self.addeav(entity, first, v)
else:
self.addeav(entity, first, second)
continue
elif isinstance(second, dict):
" shorthand used: blah/, dict "
for a,v in second.iteritems():
self.addeav(entity, "%s%s" % (first, a), v)
continue
elif isinstance(second, (list, tuple)):
" shorthand used: blah/, list|tuple "
for a,v in pairwise(second):
self.addeav(entity, "%s%s" % (first, a), v)
continue
else:
raise Exception, "invalid pair: %s : %s" % (first,second)
"pass back the entity so it can be resolved after tx()"
return entity
# raw dict response
def resolve(self):
""" Resolve one or more tempids.
Automatically takes place after transaction is executed.
"""
assert isinstance(self.resp, dict), "Transaction in uncommitted or failed state"
rids = [(v) for k,v in self.resp['tempids'].items()]
self.txid = self.resp['tx-data'][0]['tx']
rids.reverse()
for t in self.tmpents:
pos = self.tmpents.index(t)
t._eid, t._txid = rids[pos], self.txid
for t in self.realents:
t._txid = self.txid
def addeav(self, e, a, v):
if v is None: return
self.adds.append((e, a, v))
if int(e) < 0 and e not in self.tmpents:
self.tmpents.append(e)
elif int(e) > 0 and e not in self.realents:
self.realents.append(e)
@property
def edn_iter(self):
""" yields edns
"""
for e,a,v in self.adds:
yield u"{%(a)s %(v)s :db/id #db/id[:db.part/user %(e)s ]}" % \
dict(a=a, v=dump_edn_val(v), e=int(e))
|
tony-landis/datomic-py | datomic/datomic.py | TX.resolve | python | def resolve(self):
assert isinstance(self.resp, dict), "Transaction in uncommitted or failed state"
rids = [(v) for k,v in self.resp['tempids'].items()]
self.txid = self.resp['tx-data'][0]['tx']
rids.reverse()
for t in self.tmpents:
pos = self.tmpents.index(t)
t._eid, t._txid = rids[pos], self.txid
for t in self.realents:
t._txid = self.txid | Resolve one or more tempids.
Automatically takes place after transaction is executed. | train | https://github.com/tony-landis/datomic-py/blob/54f713d29ad85ba86d53d5115c9b312ff14b7846/datomic/datomic.py#L611-L623 | null | class TX(object):
""" Accumulate, execute, and resolve tempids
"""
def __init__(self, db):
self.db = db
self.tmpents, self.adds, self.ctmpid, self.txid = [], [], -1, -1
self.resp = None
self.realents = []
def __repr__(self):
return "<datomic tx, %i pending>" % len(self)
def __len__(self):
return len(self.adds or [])
def __int__(self):
return self.txid
def add(self, *args, **kwargs):
""" Accumulate datums for the transaction
Start a transaction on an existing db connection
>>> tx = TX(db)
Get get an entity object with a tempid
>>> ref = add()
>>> ref = add(0)
>>> ref = add(None)
>>> ref = add(False)
Entity id passed as first argument (int|long)
>>> tx.add(1, 'thing/name', 'value')
Shorthand form for multiple attributes sharing a root namespace
>>> tx.add(':thing/', {'name':'value', 'tag':'value'})
Attributes with a value of None are ignored
>>> tx.add(':thing/ignored', None)
Add multiple datums for an attribute with carinality:many
>>> tx.add(':thing/color', ['red','white','blue'])
"""
assert self.resp is None, "Transaction already committed"
entity, av_pairs, args = None, [], list(args)
if len(args):
if isinstance(args[0], (int, long)):
" first arg is an entity or tempid"
entity = E(args[0], tx=self)
elif isinstance(args[0], E):
" dont resuse entity from another tx"
if args[0]._tx is self:
entity = args[0]
else:
if int(args[0]) > 0:
" use the entity id on a new obj"
entity = E(int(args[0]), tx=self)
args[0] = None
" drop the first arg"
if entity is not None or args[0] in (None, False, 0):
v = args.pop(0)
" auto generate a temp id?"
if entity is None:
entity = E(self.ctmpid, tx=self)
self.ctmpid -= 1
" a,v from kwargs"
if len(args) == 0 and kwargs:
for a,v in kwargs.iteritems():
self.addeav(entity, a, v)
" a,v from args "
if len(args):
assert len(args) % 2 == 0, "imbalanced a,v in args: " % args
for first, second in pairwise(args):
if not first.startswith(':'):
first = ':' + first
if not first.endswith('/'):
" longhand used: blah/blah "
if isinstance(second, list):
for v in second:
self.addeav(entity, first, v)
else:
self.addeav(entity, first, second)
continue
elif isinstance(second, dict):
" shorthand used: blah/, dict "
for a,v in second.iteritems():
self.addeav(entity, "%s%s" % (first, a), v)
continue
elif isinstance(second, (list, tuple)):
" shorthand used: blah/, list|tuple "
for a,v in pairwise(second):
self.addeav(entity, "%s%s" % (first, a), v)
continue
else:
raise Exception, "invalid pair: %s : %s" % (first,second)
"pass back the entity so it can be resolved after tx()"
return entity
def execute(self, **kwargs):
""" commit the current statements from add()
"""
assert self.resp is None, "Transaction already committed"
try:
self.resp = self.db.tx(list(self.edn_iter), **kwargs)
except Exception:
self.resp = False
raise
else:
self.resolve()
self.adds = None
self.tmpents = None
return self.resp # raw dict response
def addeav(self, e, a, v):
if v is None: return
self.adds.append((e, a, v))
if int(e) < 0 and e not in self.tmpents:
self.tmpents.append(e)
elif int(e) > 0 and e not in self.realents:
self.realents.append(e)
@property
def edn_iter(self):
""" yields edns
"""
for e,a,v in self.adds:
yield u"{%(a)s %(v)s :db/id #db/id[:db.part/user %(e)s ]}" % \
dict(a=a, v=dump_edn_val(v), e=int(e))
|
tony-landis/datomic-py | datomic/datomic.py | TX.edn_iter | python | def edn_iter(self):
for e,a,v in self.adds:
yield u"{%(a)s %(v)s :db/id #db/id[:db.part/user %(e)s ]}" % \
dict(a=a, v=dump_edn_val(v), e=int(e)) | yields edns | train | https://github.com/tony-landis/datomic-py/blob/54f713d29ad85ba86d53d5115c9b312ff14b7846/datomic/datomic.py#L634-L639 | [
"def dump_edn_val(v):\n \" edn simple value dump\"\n if isinstance(v, (str, unicode)): \n return json.dumps(v)\n elif isinstance(v, E): \n return unicode(v)\n else: \n return dumps(v)\n"
] | class TX(object):
""" Accumulate, execute, and resolve tempids
"""
def __init__(self, db):
self.db = db
self.tmpents, self.adds, self.ctmpid, self.txid = [], [], -1, -1
self.resp = None
self.realents = []
def __repr__(self):
return "<datomic tx, %i pending>" % len(self)
def __len__(self):
return len(self.adds or [])
def __int__(self):
return self.txid
def add(self, *args, **kwargs):
""" Accumulate datums for the transaction
Start a transaction on an existing db connection
>>> tx = TX(db)
Get get an entity object with a tempid
>>> ref = add()
>>> ref = add(0)
>>> ref = add(None)
>>> ref = add(False)
Entity id passed as first argument (int|long)
>>> tx.add(1, 'thing/name', 'value')
Shorthand form for multiple attributes sharing a root namespace
>>> tx.add(':thing/', {'name':'value', 'tag':'value'})
Attributes with a value of None are ignored
>>> tx.add(':thing/ignored', None)
Add multiple datums for an attribute with carinality:many
>>> tx.add(':thing/color', ['red','white','blue'])
"""
assert self.resp is None, "Transaction already committed"
entity, av_pairs, args = None, [], list(args)
if len(args):
if isinstance(args[0], (int, long)):
" first arg is an entity or tempid"
entity = E(args[0], tx=self)
elif isinstance(args[0], E):
" dont resuse entity from another tx"
if args[0]._tx is self:
entity = args[0]
else:
if int(args[0]) > 0:
" use the entity id on a new obj"
entity = E(int(args[0]), tx=self)
args[0] = None
" drop the first arg"
if entity is not None or args[0] in (None, False, 0):
v = args.pop(0)
" auto generate a temp id?"
if entity is None:
entity = E(self.ctmpid, tx=self)
self.ctmpid -= 1
" a,v from kwargs"
if len(args) == 0 and kwargs:
for a,v in kwargs.iteritems():
self.addeav(entity, a, v)
" a,v from args "
if len(args):
assert len(args) % 2 == 0, "imbalanced a,v in args: " % args
for first, second in pairwise(args):
if not first.startswith(':'):
first = ':' + first
if not first.endswith('/'):
" longhand used: blah/blah "
if isinstance(second, list):
for v in second:
self.addeav(entity, first, v)
else:
self.addeav(entity, first, second)
continue
elif isinstance(second, dict):
" shorthand used: blah/, dict "
for a,v in second.iteritems():
self.addeav(entity, "%s%s" % (first, a), v)
continue
elif isinstance(second, (list, tuple)):
" shorthand used: blah/, list|tuple "
for a,v in pairwise(second):
self.addeav(entity, "%s%s" % (first, a), v)
continue
else:
raise Exception, "invalid pair: %s : %s" % (first,second)
"pass back the entity so it can be resolved after tx()"
return entity
def execute(self, **kwargs):
""" commit the current statements from add()
"""
assert self.resp is None, "Transaction already committed"
try:
self.resp = self.db.tx(list(self.edn_iter), **kwargs)
except Exception:
self.resp = False
raise
else:
self.resolve()
self.adds = None
self.tmpents = None
return self.resp # raw dict response
def resolve(self):
""" Resolve one or more tempids.
Automatically takes place after transaction is executed.
"""
assert isinstance(self.resp, dict), "Transaction in uncommitted or failed state"
rids = [(v) for k,v in self.resp['tempids'].items()]
self.txid = self.resp['tx-data'][0]['tx']
rids.reverse()
for t in self.tmpents:
pos = self.tmpents.index(t)
t._eid, t._txid = rids[pos], self.txid
for t in self.realents:
t._txid = self.txid
def addeav(self, e, a, v):
if v is None: return
self.adds.append((e, a, v))
if int(e) < 0 and e not in self.tmpents:
self.tmpents.append(e)
elif int(e) > 0 and e not in self.realents:
self.realents.append(e)
@property
|
YuriyGuts/pygoose | pygoose/kg/jobs.py | split_into_batches | python | def split_into_batches(input_list, batch_size, batch_storage_dir, checkpoint=False):
if checkpoint and not os.path.exists(batch_storage_dir):
os.mkdir(batch_storage_dir)
batches = [
{
'index': batch_index,
'data': input_list[start_index:start_index + batch_size],
'input_filename': os.path.join(batch_storage_dir, 'batch-{:05d}-input.pickle'.format(batch_index)),
'result_filename': os.path.join(batch_storage_dir, 'batch-{:05d}-output.pickle'.format(batch_index)),
}
for batch_index, start_index in enumerate(range(0, len(input_list), batch_size))
]
if checkpoint:
for batch in batches:
save(batch['data'], batch['input_filename'])
return batches | Break the input data into smaller batches, optionally saving each one to disk.
Args:
input_list: An input object that has a list-like interface (indexing and slicing).
batch_size: The maximum number of input items in each batch.
batch_storage_dir: The directory to save the checkpoints to.
checkpoint: Whether to save each batch to a file.
Returns:
A list of batch objects with the following structure:
{'index', 'data', 'input_filename', 'result_filename'} | train | https://github.com/YuriyGuts/pygoose/blob/4d9b8827c6d6c4b79949d1cd653393498c0bb3c2/pygoose/kg/jobs.py#L16-L48 | [
"def save(obj, filename, protocol=4):\n \"\"\"\n Serialize an object to disk using pickle protocol.\n\n Args:\n obj: The object to serialize.\n filename: Path to the output file.\n protocol: Version of the pickle protocol.\n \"\"\"\n\n with open(filename, 'wb') as f:\n pickle.dump(obj, f, protocol=protocol)\n"
] | import os
import shutil
import sys
import uuid
from joblib import delayed, Parallel
from tqdm import tqdm as progressbar
from .io import save
def _create_job_id():
return str(uuid.uuid4())
def map_embarrassingly_parallel(input_list, mapper, project, n_jobs=-1, batch_size=-1,
checkpoint=False, cleanup=True, **kwargs):
"""
Process items in a list in parallel (optionally, one smaller batch at a time).
Args:
input_list: An input object that has a list-like interface (indexing and slicing).
mapper: A function to apply to each item of the input list.
project: An instance of pygoose project.
n_jobs: The number of parallel processing jobs. -1 will use the number of CPUs on the system.
batch_size: The maximum number of input items in each batch. -1 will store all data as a single batch.
checkpoint: Whether to save each batch and its corresponding output to disk.
cleanup: Whether to remove the batch checkpoints from the disk after all batches are processed.
**kwargs: Additional keyword arguments to joblib.Parallel.
Returns:
A list representing the combined output from the mapper function called on all input items.
"""
if batch_size < 0:
batch_size = len(input_list)
# Partition the data.
job_id = _create_job_id()
print('Creating job ID:', job_id)
batch_storage_dir = os.path.join(project.temp_dir, job_id)
batches = split_into_batches(input_list, batch_size, batch_storage_dir, checkpoint)
# The results will be collected here.
# TODO: collecting lists like this may be memory inefficient. Perhaps we could use another callback function.
combined_results = []
# Process data one batch at a time.
for batch in batches:
description = 'Batch {}/{}'.format(batch['index'] + 1, len(batches))
# Process each item in the batch in parallel.
batch_result = Parallel(n_jobs=n_jobs, **kwargs)(
delayed(mapper)(input_item)
for input_item in progressbar(
batch['data'],
desc=description,
total=len(batch['data']),
file=sys.stdout,
)
)
if checkpoint:
save(batch_result, batch['result_filename'])
combined_results.extend(batch_result)
# Remove the temporary files.
if checkpoint and cleanup:
shutil.rmtree(batch_storage_dir)
return combined_results
def map_batch_parallel(input_list, batch_size, item_mapper=None, batch_mapper=None, flatten=True, n_jobs=-1, **kwargs):
"""
Split the data into batches and process each batch in its own thread.
Args:
input_list: An input object that has a list-like interface (indexing and slicing).
item_mapper: (optional) A function to apply to each item in the batch.
batch_mapper: (optional) A function to apply to each batch. Either item_mapper or batch_mapper must be set.
flatten: Whether to unwrap individual batch results or keep them grouped by batch.
n_jobs: The number of parallel processing jobs. -1 will use the number of CPUs on the system.
batch_size: The maximum number of input items in each batch. -1 will store all data as a single batch.
**kwargs: Additional keyword arguments to joblib.Parallel.
Returns:
A list representing the combined output from the mapper function called on all input items of each batch.
"""
# We must specify either how to process each batch or how to process each item.
if item_mapper is None and batch_mapper is None:
raise ValueError('You should specify either batch_mapper or item_mapper.')
if batch_mapper is None:
batch_mapper = _default_batch_mapper
batches = split_into_batches(input_list, batch_size, batch_storage_dir='')
all_batch_results = Parallel(n_jobs=n_jobs, **kwargs)(
delayed(batch_mapper)(batch['data'], item_mapper)
for batch in progressbar(
batches,
desc='Batches',
total=len(batches),
file=sys.stdout,
)
)
# Unwrap the individual batch results if necessary.
if flatten:
final_result = []
for batch_result in all_batch_results:
final_result.extend(batch_result)
else:
final_result = all_batch_results
return final_result
def _default_batch_mapper(batch, item_mapper):
return [item_mapper(item) for item in batch]
|
YuriyGuts/pygoose | pygoose/kg/jobs.py | map_embarrassingly_parallel | python | def map_embarrassingly_parallel(input_list, mapper, project, n_jobs=-1, batch_size=-1,
checkpoint=False, cleanup=True, **kwargs):
if batch_size < 0:
batch_size = len(input_list)
# Partition the data.
job_id = _create_job_id()
print('Creating job ID:', job_id)
batch_storage_dir = os.path.join(project.temp_dir, job_id)
batches = split_into_batches(input_list, batch_size, batch_storage_dir, checkpoint)
# The results will be collected here.
# TODO: collecting lists like this may be memory inefficient. Perhaps we could use another callback function.
combined_results = []
# Process data one batch at a time.
for batch in batches:
description = 'Batch {}/{}'.format(batch['index'] + 1, len(batches))
# Process each item in the batch in parallel.
batch_result = Parallel(n_jobs=n_jobs, **kwargs)(
delayed(mapper)(input_item)
for input_item in progressbar(
batch['data'],
desc=description,
total=len(batch['data']),
file=sys.stdout,
)
)
if checkpoint:
save(batch_result, batch['result_filename'])
combined_results.extend(batch_result)
# Remove the temporary files.
if checkpoint and cleanup:
shutil.rmtree(batch_storage_dir)
return combined_results | Process items in a list in parallel (optionally, one smaller batch at a time).
Args:
input_list: An input object that has a list-like interface (indexing and slicing).
mapper: A function to apply to each item of the input list.
project: An instance of pygoose project.
n_jobs: The number of parallel processing jobs. -1 will use the number of CPUs on the system.
batch_size: The maximum number of input items in each batch. -1 will store all data as a single batch.
checkpoint: Whether to save each batch and its corresponding output to disk.
cleanup: Whether to remove the batch checkpoints from the disk after all batches are processed.
**kwargs: Additional keyword arguments to joblib.Parallel.
Returns:
A list representing the combined output from the mapper function called on all input items. | train | https://github.com/YuriyGuts/pygoose/blob/4d9b8827c6d6c4b79949d1cd653393498c0bb3c2/pygoose/kg/jobs.py#L51-L107 | [
"def save(obj, filename, protocol=4):\n \"\"\"\n Serialize an object to disk using pickle protocol.\n\n Args:\n obj: The object to serialize.\n filename: Path to the output file.\n protocol: Version of the pickle protocol.\n \"\"\"\n\n with open(filename, 'wb') as f:\n pickle.dump(obj, f, protocol=protocol)\n",
"def _create_job_id():\n return str(uuid.uuid4())\n",
"def split_into_batches(input_list, batch_size, batch_storage_dir, checkpoint=False):\n \"\"\"\n Break the input data into smaller batches, optionally saving each one to disk.\n\n Args:\n input_list: An input object that has a list-like interface (indexing and slicing).\n batch_size: The maximum number of input items in each batch.\n batch_storage_dir: The directory to save the checkpoints to.\n checkpoint: Whether to save each batch to a file.\n\n Returns:\n A list of batch objects with the following structure:\n {'index', 'data', 'input_filename', 'result_filename'}\n \"\"\"\n\n if checkpoint and not os.path.exists(batch_storage_dir):\n os.mkdir(batch_storage_dir)\n\n batches = [\n {\n 'index': batch_index,\n 'data': input_list[start_index:start_index + batch_size],\n 'input_filename': os.path.join(batch_storage_dir, 'batch-{:05d}-input.pickle'.format(batch_index)),\n 'result_filename': os.path.join(batch_storage_dir, 'batch-{:05d}-output.pickle'.format(batch_index)),\n }\n for batch_index, start_index in enumerate(range(0, len(input_list), batch_size))\n ]\n\n if checkpoint:\n for batch in batches:\n save(batch['data'], batch['input_filename'])\n\n return batches\n"
] | import os
import shutil
import sys
import uuid
from joblib import delayed, Parallel
from tqdm import tqdm as progressbar
from .io import save
def _create_job_id():
return str(uuid.uuid4())
def split_into_batches(input_list, batch_size, batch_storage_dir, checkpoint=False):
"""
Break the input data into smaller batches, optionally saving each one to disk.
Args:
input_list: An input object that has a list-like interface (indexing and slicing).
batch_size: The maximum number of input items in each batch.
batch_storage_dir: The directory to save the checkpoints to.
checkpoint: Whether to save each batch to a file.
Returns:
A list of batch objects with the following structure:
{'index', 'data', 'input_filename', 'result_filename'}
"""
if checkpoint and not os.path.exists(batch_storage_dir):
os.mkdir(batch_storage_dir)
batches = [
{
'index': batch_index,
'data': input_list[start_index:start_index + batch_size],
'input_filename': os.path.join(batch_storage_dir, 'batch-{:05d}-input.pickle'.format(batch_index)),
'result_filename': os.path.join(batch_storage_dir, 'batch-{:05d}-output.pickle'.format(batch_index)),
}
for batch_index, start_index in enumerate(range(0, len(input_list), batch_size))
]
if checkpoint:
for batch in batches:
save(batch['data'], batch['input_filename'])
return batches
def map_batch_parallel(input_list, batch_size, item_mapper=None, batch_mapper=None, flatten=True, n_jobs=-1, **kwargs):
"""
Split the data into batches and process each batch in its own thread.
Args:
input_list: An input object that has a list-like interface (indexing and slicing).
item_mapper: (optional) A function to apply to each item in the batch.
batch_mapper: (optional) A function to apply to each batch. Either item_mapper or batch_mapper must be set.
flatten: Whether to unwrap individual batch results or keep them grouped by batch.
n_jobs: The number of parallel processing jobs. -1 will use the number of CPUs on the system.
batch_size: The maximum number of input items in each batch. -1 will store all data as a single batch.
**kwargs: Additional keyword arguments to joblib.Parallel.
Returns:
A list representing the combined output from the mapper function called on all input items of each batch.
"""
# We must specify either how to process each batch or how to process each item.
if item_mapper is None and batch_mapper is None:
raise ValueError('You should specify either batch_mapper or item_mapper.')
if batch_mapper is None:
batch_mapper = _default_batch_mapper
batches = split_into_batches(input_list, batch_size, batch_storage_dir='')
all_batch_results = Parallel(n_jobs=n_jobs, **kwargs)(
delayed(batch_mapper)(batch['data'], item_mapper)
for batch in progressbar(
batches,
desc='Batches',
total=len(batches),
file=sys.stdout,
)
)
# Unwrap the individual batch results if necessary.
if flatten:
final_result = []
for batch_result in all_batch_results:
final_result.extend(batch_result)
else:
final_result = all_batch_results
return final_result
def _default_batch_mapper(batch, item_mapper):
return [item_mapper(item) for item in batch]
|
YuriyGuts/pygoose | pygoose/kg/jobs.py | map_batch_parallel | python | def map_batch_parallel(input_list, batch_size, item_mapper=None, batch_mapper=None, flatten=True, n_jobs=-1, **kwargs):
# We must specify either how to process each batch or how to process each item.
if item_mapper is None and batch_mapper is None:
raise ValueError('You should specify either batch_mapper or item_mapper.')
if batch_mapper is None:
batch_mapper = _default_batch_mapper
batches = split_into_batches(input_list, batch_size, batch_storage_dir='')
all_batch_results = Parallel(n_jobs=n_jobs, **kwargs)(
delayed(batch_mapper)(batch['data'], item_mapper)
for batch in progressbar(
batches,
desc='Batches',
total=len(batches),
file=sys.stdout,
)
)
# Unwrap the individual batch results if necessary.
if flatten:
final_result = []
for batch_result in all_batch_results:
final_result.extend(batch_result)
else:
final_result = all_batch_results
return final_result | Split the data into batches and process each batch in its own thread.
Args:
input_list: An input object that has a list-like interface (indexing and slicing).
item_mapper: (optional) A function to apply to each item in the batch.
batch_mapper: (optional) A function to apply to each batch. Either item_mapper or batch_mapper must be set.
flatten: Whether to unwrap individual batch results or keep them grouped by batch.
n_jobs: The number of parallel processing jobs. -1 will use the number of CPUs on the system.
batch_size: The maximum number of input items in each batch. -1 will store all data as a single batch.
**kwargs: Additional keyword arguments to joblib.Parallel.
Returns:
A list representing the combined output from the mapper function called on all input items of each batch. | train | https://github.com/YuriyGuts/pygoose/blob/4d9b8827c6d6c4b79949d1cd653393498c0bb3c2/pygoose/kg/jobs.py#L110-L153 | [
"def split_into_batches(input_list, batch_size, batch_storage_dir, checkpoint=False):\n \"\"\"\n Break the input data into smaller batches, optionally saving each one to disk.\n\n Args:\n input_list: An input object that has a list-like interface (indexing and slicing).\n batch_size: The maximum number of input items in each batch.\n batch_storage_dir: The directory to save the checkpoints to.\n checkpoint: Whether to save each batch to a file.\n\n Returns:\n A list of batch objects with the following structure:\n {'index', 'data', 'input_filename', 'result_filename'}\n \"\"\"\n\n if checkpoint and not os.path.exists(batch_storage_dir):\n os.mkdir(batch_storage_dir)\n\n batches = [\n {\n 'index': batch_index,\n 'data': input_list[start_index:start_index + batch_size],\n 'input_filename': os.path.join(batch_storage_dir, 'batch-{:05d}-input.pickle'.format(batch_index)),\n 'result_filename': os.path.join(batch_storage_dir, 'batch-{:05d}-output.pickle'.format(batch_index)),\n }\n for batch_index, start_index in enumerate(range(0, len(input_list), batch_size))\n ]\n\n if checkpoint:\n for batch in batches:\n save(batch['data'], batch['input_filename'])\n\n return batches\n"
] | import os
import shutil
import sys
import uuid
from joblib import delayed, Parallel
from tqdm import tqdm as progressbar
from .io import save
def _create_job_id():
return str(uuid.uuid4())
def split_into_batches(input_list, batch_size, batch_storage_dir, checkpoint=False):
"""
Break the input data into smaller batches, optionally saving each one to disk.
Args:
input_list: An input object that has a list-like interface (indexing and slicing).
batch_size: The maximum number of input items in each batch.
batch_storage_dir: The directory to save the checkpoints to.
checkpoint: Whether to save each batch to a file.
Returns:
A list of batch objects with the following structure:
{'index', 'data', 'input_filename', 'result_filename'}
"""
if checkpoint and not os.path.exists(batch_storage_dir):
os.mkdir(batch_storage_dir)
batches = [
{
'index': batch_index,
'data': input_list[start_index:start_index + batch_size],
'input_filename': os.path.join(batch_storage_dir, 'batch-{:05d}-input.pickle'.format(batch_index)),
'result_filename': os.path.join(batch_storage_dir, 'batch-{:05d}-output.pickle'.format(batch_index)),
}
for batch_index, start_index in enumerate(range(0, len(input_list), batch_size))
]
if checkpoint:
for batch in batches:
save(batch['data'], batch['input_filename'])
return batches
def map_embarrassingly_parallel(input_list, mapper, project, n_jobs=-1, batch_size=-1,
checkpoint=False, cleanup=True, **kwargs):
"""
Process items in a list in parallel (optionally, one smaller batch at a time).
Args:
input_list: An input object that has a list-like interface (indexing and slicing).
mapper: A function to apply to each item of the input list.
project: An instance of pygoose project.
n_jobs: The number of parallel processing jobs. -1 will use the number of CPUs on the system.
batch_size: The maximum number of input items in each batch. -1 will store all data as a single batch.
checkpoint: Whether to save each batch and its corresponding output to disk.
cleanup: Whether to remove the batch checkpoints from the disk after all batches are processed.
**kwargs: Additional keyword arguments to joblib.Parallel.
Returns:
A list representing the combined output from the mapper function called on all input items.
"""
if batch_size < 0:
batch_size = len(input_list)
# Partition the data.
job_id = _create_job_id()
print('Creating job ID:', job_id)
batch_storage_dir = os.path.join(project.temp_dir, job_id)
batches = split_into_batches(input_list, batch_size, batch_storage_dir, checkpoint)
# The results will be collected here.
# TODO: collecting lists like this may be memory inefficient. Perhaps we could use another callback function.
combined_results = []
# Process data one batch at a time.
for batch in batches:
description = 'Batch {}/{}'.format(batch['index'] + 1, len(batches))
# Process each item in the batch in parallel.
batch_result = Parallel(n_jobs=n_jobs, **kwargs)(
delayed(mapper)(input_item)
for input_item in progressbar(
batch['data'],
desc=description,
total=len(batch['data']),
file=sys.stdout,
)
)
if checkpoint:
save(batch_result, batch['result_filename'])
combined_results.extend(batch_result)
# Remove the temporary files.
if checkpoint and cleanup:
shutil.rmtree(batch_storage_dir)
return combined_results
def _default_batch_mapper(batch, item_mapper):
return [item_mapper(item) for item in batch]
|
YuriyGuts/pygoose | pygoose/kg/eda.py | plot_real_feature | python | def plot_real_feature(df, feature_name, bins=50, figsize=(15, 15)):
ix_negative_target = df[df.target == 0].index
ix_positive_target = df[df.target == 1].index
plt.figure(figsize=figsize)
ax_overall_dist = plt.subplot2grid((3, 2), (0, 0), colspan=2)
ax_target_conditional_dist = plt.subplot2grid((3, 2), (1, 0), colspan=2)
ax_botplot = plt.subplot2grid((3, 2), (2, 0))
ax_violin_plot = plt.subplot2grid((3, 2), (2, 1))
ax_overall_dist.set_title('Distribution of {}'.format(feature_name), fontsize=16)
sns.distplot(
df[feature_name],
bins=50,
ax=ax_overall_dist
)
sns.distplot(
df.loc[ix_positive_target][feature_name],
bins=bins,
ax=ax_target_conditional_dist,
label='Positive Target'
)
sns.distplot(
df.loc[ix_negative_target][feature_name],
bins=bins,
ax=ax_target_conditional_dist,
label='Negative Target'
)
ax_target_conditional_dist.legend(loc='upper right', prop={'size': 14})
sns.boxplot(
y=feature_name,
x='target',
data=df,
ax=ax_botplot
)
sns.violinplot(
y=feature_name,
x='target',
data=df,
ax=ax_violin_plot
)
plt.show() | Plot the distribution of a real-valued feature conditioned by the target.
Examples:
`plot_real_feature(X, 'emb_mean_euclidean')`
Args:
df: Pandas dataframe containing the target column (named 'target').
feature_name: The name of the feature to plot.
bins: The number of histogram bins for the distribution plot.
figsize: The size of the plotted figure. | train | https://github.com/YuriyGuts/pygoose/blob/4d9b8827c6d6c4b79949d1cd653393498c0bb3c2/pygoose/kg/eda.py#L6-L65 | null | import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
def plot_pair(df, feature_name_1, feature_name_2, kind='scatter', alpha=0.01, **kwargs):
"""
Plot a scatterplot of two features against one another,
and calculate Pearson correlation coefficient.
Examples:
`plot_pair(X, 'emb_mean_euclidean', 'emb_mean_cosine')`
Args:
df:
feature_name_1: The name of the first feature.
feature_name_2: The name of the second feature.
kind: One of the values { 'scatter' | 'reg' | 'resid' | 'kde' | 'hex' }.
alpha: Alpha channel value.
**kwargs: Additional argument to 'sns.jointplot'.
"""
plt.figure()
sns.jointplot(
feature_name_1,
feature_name_2,
df,
alpha=alpha,
kind=kind,
**kwargs
)
plt.show()
def plot_feature_correlation_heatmap(df, features, font_size=9, figsize=(15, 15), save_filename=None):
"""
Plot a correlation heatmap between every feature pair.
Args:
df: Pandas dataframe containing the target column (named 'target').
features: The list of features to include in the correlation plot.
font_size: Font size for heatmap cells and axis labels.
figsize: The size of the plot.
save_filename: (Optional) The path of the file to save a high-res version of the plot to.
"""
features = features[:]
features += ['target']
mcorr = df[features].corr()
mask = np.zeros_like(mcorr, dtype=np.bool)
mask[np.triu_indices_from(mask)] = True
cmap = sns.diverging_palette(220, 10, as_cmap=True)
fig = plt.figure(figsize=figsize)
heatmap = sns.heatmap(
mcorr,
mask=mask,
cmap=cmap,
square=True,
annot=True,
fmt='0.2f',
annot_kws={'size': font_size},
)
heatmap.tick_params(axis='both', which='major', labelsize=font_size)
heatmap.tick_params(axis='both', which='minor', labelsize=font_size)
heatmap.set_xticklabels(features, rotation=90)
heatmap.set_yticklabels(reversed(features))
plt.show()
if save_filename is not None:
fig.savefig(save_filename, dpi=300)
def scatterplot_matrix(df, features, downsample_frac=None, figsize=(15, 15)):
"""
Plot a scatterplot matrix for a list of features, colored by target value.
Example: `scatterplot_matrix(X, X.columns.tolist(), downsample_frac=0.01)`
Args:
df: Pandas dataframe containing the target column (named 'target').
features: The list of features to include in the correlation plot.
downsample_frac: Dataframe downsampling rate (0.1 to include 10% of the dataset).
figsize: The size of the plot.
"""
if downsample_frac:
df = df.sample(frac=downsample_frac)
plt.figure(figsize=figsize)
sns.pairplot(df[features], hue='target')
plt.show()
|
YuriyGuts/pygoose | pygoose/kg/eda.py | plot_pair | python | def plot_pair(df, feature_name_1, feature_name_2, kind='scatter', alpha=0.01, **kwargs):
plt.figure()
sns.jointplot(
feature_name_1,
feature_name_2,
df,
alpha=alpha,
kind=kind,
**kwargs
)
plt.show() | Plot a scatterplot of two features against one another,
and calculate Pearson correlation coefficient.
Examples:
`plot_pair(X, 'emb_mean_euclidean', 'emb_mean_cosine')`
Args:
df:
feature_name_1: The name of the first feature.
feature_name_2: The name of the second feature.
kind: One of the values { 'scatter' | 'reg' | 'resid' | 'kde' | 'hex' }.
alpha: Alpha channel value.
**kwargs: Additional argument to 'sns.jointplot'. | train | https://github.com/YuriyGuts/pygoose/blob/4d9b8827c6d6c4b79949d1cd653393498c0bb3c2/pygoose/kg/eda.py#L68-L94 | null | import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
def plot_real_feature(df, feature_name, bins=50, figsize=(15, 15)):
"""
Plot the distribution of a real-valued feature conditioned by the target.
Examples:
`plot_real_feature(X, 'emb_mean_euclidean')`
Args:
df: Pandas dataframe containing the target column (named 'target').
feature_name: The name of the feature to plot.
bins: The number of histogram bins for the distribution plot.
figsize: The size of the plotted figure.
"""
ix_negative_target = df[df.target == 0].index
ix_positive_target = df[df.target == 1].index
plt.figure(figsize=figsize)
ax_overall_dist = plt.subplot2grid((3, 2), (0, 0), colspan=2)
ax_target_conditional_dist = plt.subplot2grid((3, 2), (1, 0), colspan=2)
ax_botplot = plt.subplot2grid((3, 2), (2, 0))
ax_violin_plot = plt.subplot2grid((3, 2), (2, 1))
ax_overall_dist.set_title('Distribution of {}'.format(feature_name), fontsize=16)
sns.distplot(
df[feature_name],
bins=50,
ax=ax_overall_dist
)
sns.distplot(
df.loc[ix_positive_target][feature_name],
bins=bins,
ax=ax_target_conditional_dist,
label='Positive Target'
)
sns.distplot(
df.loc[ix_negative_target][feature_name],
bins=bins,
ax=ax_target_conditional_dist,
label='Negative Target'
)
ax_target_conditional_dist.legend(loc='upper right', prop={'size': 14})
sns.boxplot(
y=feature_name,
x='target',
data=df,
ax=ax_botplot
)
sns.violinplot(
y=feature_name,
x='target',
data=df,
ax=ax_violin_plot
)
plt.show()
def plot_feature_correlation_heatmap(df, features, font_size=9, figsize=(15, 15), save_filename=None):
"""
Plot a correlation heatmap between every feature pair.
Args:
df: Pandas dataframe containing the target column (named 'target').
features: The list of features to include in the correlation plot.
font_size: Font size for heatmap cells and axis labels.
figsize: The size of the plot.
save_filename: (Optional) The path of the file to save a high-res version of the plot to.
"""
features = features[:]
features += ['target']
mcorr = df[features].corr()
mask = np.zeros_like(mcorr, dtype=np.bool)
mask[np.triu_indices_from(mask)] = True
cmap = sns.diverging_palette(220, 10, as_cmap=True)
fig = plt.figure(figsize=figsize)
heatmap = sns.heatmap(
mcorr,
mask=mask,
cmap=cmap,
square=True,
annot=True,
fmt='0.2f',
annot_kws={'size': font_size},
)
heatmap.tick_params(axis='both', which='major', labelsize=font_size)
heatmap.tick_params(axis='both', which='minor', labelsize=font_size)
heatmap.set_xticklabels(features, rotation=90)
heatmap.set_yticklabels(reversed(features))
plt.show()
if save_filename is not None:
fig.savefig(save_filename, dpi=300)
def scatterplot_matrix(df, features, downsample_frac=None, figsize=(15, 15)):
"""
Plot a scatterplot matrix for a list of features, colored by target value.
Example: `scatterplot_matrix(X, X.columns.tolist(), downsample_frac=0.01)`
Args:
df: Pandas dataframe containing the target column (named 'target').
features: The list of features to include in the correlation plot.
downsample_frac: Dataframe downsampling rate (0.1 to include 10% of the dataset).
figsize: The size of the plot.
"""
if downsample_frac:
df = df.sample(frac=downsample_frac)
plt.figure(figsize=figsize)
sns.pairplot(df[features], hue='target')
plt.show()
|
YuriyGuts/pygoose | pygoose/kg/eda.py | plot_feature_correlation_heatmap | python | def plot_feature_correlation_heatmap(df, features, font_size=9, figsize=(15, 15), save_filename=None):
features = features[:]
features += ['target']
mcorr = df[features].corr()
mask = np.zeros_like(mcorr, dtype=np.bool)
mask[np.triu_indices_from(mask)] = True
cmap = sns.diverging_palette(220, 10, as_cmap=True)
fig = plt.figure(figsize=figsize)
heatmap = sns.heatmap(
mcorr,
mask=mask,
cmap=cmap,
square=True,
annot=True,
fmt='0.2f',
annot_kws={'size': font_size},
)
heatmap.tick_params(axis='both', which='major', labelsize=font_size)
heatmap.tick_params(axis='both', which='minor', labelsize=font_size)
heatmap.set_xticklabels(features, rotation=90)
heatmap.set_yticklabels(reversed(features))
plt.show()
if save_filename is not None:
fig.savefig(save_filename, dpi=300) | Plot a correlation heatmap between every feature pair.
Args:
df: Pandas dataframe containing the target column (named 'target').
features: The list of features to include in the correlation plot.
font_size: Font size for heatmap cells and axis labels.
figsize: The size of the plot.
save_filename: (Optional) The path of the file to save a high-res version of the plot to. | train | https://github.com/YuriyGuts/pygoose/blob/4d9b8827c6d6c4b79949d1cd653393498c0bb3c2/pygoose/kg/eda.py#L97-L138 | null | import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
def plot_real_feature(df, feature_name, bins=50, figsize=(15, 15)):
"""
Plot the distribution of a real-valued feature conditioned by the target.
Examples:
`plot_real_feature(X, 'emb_mean_euclidean')`
Args:
df: Pandas dataframe containing the target column (named 'target').
feature_name: The name of the feature to plot.
bins: The number of histogram bins for the distribution plot.
figsize: The size of the plotted figure.
"""
ix_negative_target = df[df.target == 0].index
ix_positive_target = df[df.target == 1].index
plt.figure(figsize=figsize)
ax_overall_dist = plt.subplot2grid((3, 2), (0, 0), colspan=2)
ax_target_conditional_dist = plt.subplot2grid((3, 2), (1, 0), colspan=2)
ax_botplot = plt.subplot2grid((3, 2), (2, 0))
ax_violin_plot = plt.subplot2grid((3, 2), (2, 1))
ax_overall_dist.set_title('Distribution of {}'.format(feature_name), fontsize=16)
sns.distplot(
df[feature_name],
bins=50,
ax=ax_overall_dist
)
sns.distplot(
df.loc[ix_positive_target][feature_name],
bins=bins,
ax=ax_target_conditional_dist,
label='Positive Target'
)
sns.distplot(
df.loc[ix_negative_target][feature_name],
bins=bins,
ax=ax_target_conditional_dist,
label='Negative Target'
)
ax_target_conditional_dist.legend(loc='upper right', prop={'size': 14})
sns.boxplot(
y=feature_name,
x='target',
data=df,
ax=ax_botplot
)
sns.violinplot(
y=feature_name,
x='target',
data=df,
ax=ax_violin_plot
)
plt.show()
def plot_pair(df, feature_name_1, feature_name_2, kind='scatter', alpha=0.01, **kwargs):
"""
Plot a scatterplot of two features against one another,
and calculate Pearson correlation coefficient.
Examples:
`plot_pair(X, 'emb_mean_euclidean', 'emb_mean_cosine')`
Args:
df:
feature_name_1: The name of the first feature.
feature_name_2: The name of the second feature.
kind: One of the values { 'scatter' | 'reg' | 'resid' | 'kde' | 'hex' }.
alpha: Alpha channel value.
**kwargs: Additional argument to 'sns.jointplot'.
"""
plt.figure()
sns.jointplot(
feature_name_1,
feature_name_2,
df,
alpha=alpha,
kind=kind,
**kwargs
)
plt.show()
def scatterplot_matrix(df, features, downsample_frac=None, figsize=(15, 15)):
"""
Plot a scatterplot matrix for a list of features, colored by target value.
Example: `scatterplot_matrix(X, X.columns.tolist(), downsample_frac=0.01)`
Args:
df: Pandas dataframe containing the target column (named 'target').
features: The list of features to include in the correlation plot.
downsample_frac: Dataframe downsampling rate (0.1 to include 10% of the dataset).
figsize: The size of the plot.
"""
if downsample_frac:
df = df.sample(frac=downsample_frac)
plt.figure(figsize=figsize)
sns.pairplot(df[features], hue='target')
plt.show()
|
YuriyGuts/pygoose | pygoose/kg/eda.py | scatterplot_matrix | python | def scatterplot_matrix(df, features, downsample_frac=None, figsize=(15, 15)):
if downsample_frac:
df = df.sample(frac=downsample_frac)
plt.figure(figsize=figsize)
sns.pairplot(df[features], hue='target')
plt.show() | Plot a scatterplot matrix for a list of features, colored by target value.
Example: `scatterplot_matrix(X, X.columns.tolist(), downsample_frac=0.01)`
Args:
df: Pandas dataframe containing the target column (named 'target').
features: The list of features to include in the correlation plot.
downsample_frac: Dataframe downsampling rate (0.1 to include 10% of the dataset).
figsize: The size of the plot. | train | https://github.com/YuriyGuts/pygoose/blob/4d9b8827c6d6c4b79949d1cd653393498c0bb3c2/pygoose/kg/eda.py#L141-L159 | null | import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
def plot_real_feature(df, feature_name, bins=50, figsize=(15, 15)):
"""
Plot the distribution of a real-valued feature conditioned by the target.
Examples:
`plot_real_feature(X, 'emb_mean_euclidean')`
Args:
df: Pandas dataframe containing the target column (named 'target').
feature_name: The name of the feature to plot.
bins: The number of histogram bins for the distribution plot.
figsize: The size of the plotted figure.
"""
ix_negative_target = df[df.target == 0].index
ix_positive_target = df[df.target == 1].index
plt.figure(figsize=figsize)
ax_overall_dist = plt.subplot2grid((3, 2), (0, 0), colspan=2)
ax_target_conditional_dist = plt.subplot2grid((3, 2), (1, 0), colspan=2)
ax_botplot = plt.subplot2grid((3, 2), (2, 0))
ax_violin_plot = plt.subplot2grid((3, 2), (2, 1))
ax_overall_dist.set_title('Distribution of {}'.format(feature_name), fontsize=16)
sns.distplot(
df[feature_name],
bins=50,
ax=ax_overall_dist
)
sns.distplot(
df.loc[ix_positive_target][feature_name],
bins=bins,
ax=ax_target_conditional_dist,
label='Positive Target'
)
sns.distplot(
df.loc[ix_negative_target][feature_name],
bins=bins,
ax=ax_target_conditional_dist,
label='Negative Target'
)
ax_target_conditional_dist.legend(loc='upper right', prop={'size': 14})
sns.boxplot(
y=feature_name,
x='target',
data=df,
ax=ax_botplot
)
sns.violinplot(
y=feature_name,
x='target',
data=df,
ax=ax_violin_plot
)
plt.show()
def plot_pair(df, feature_name_1, feature_name_2, kind='scatter', alpha=0.01, **kwargs):
"""
Plot a scatterplot of two features against one another,
and calculate Pearson correlation coefficient.
Examples:
`plot_pair(X, 'emb_mean_euclidean', 'emb_mean_cosine')`
Args:
df:
feature_name_1: The name of the first feature.
feature_name_2: The name of the second feature.
kind: One of the values { 'scatter' | 'reg' | 'resid' | 'kde' | 'hex' }.
alpha: Alpha channel value.
**kwargs: Additional argument to 'sns.jointplot'.
"""
plt.figure()
sns.jointplot(
feature_name_1,
feature_name_2,
df,
alpha=alpha,
kind=kind,
**kwargs
)
plt.show()
def plot_feature_correlation_heatmap(df, features, font_size=9, figsize=(15, 15), save_filename=None):
"""
Plot a correlation heatmap between every feature pair.
Args:
df: Pandas dataframe containing the target column (named 'target').
features: The list of features to include in the correlation plot.
font_size: Font size for heatmap cells and axis labels.
figsize: The size of the plot.
save_filename: (Optional) The path of the file to save a high-res version of the plot to.
"""
features = features[:]
features += ['target']
mcorr = df[features].corr()
mask = np.zeros_like(mcorr, dtype=np.bool)
mask[np.triu_indices_from(mask)] = True
cmap = sns.diverging_palette(220, 10, as_cmap=True)
fig = plt.figure(figsize=figsize)
heatmap = sns.heatmap(
mcorr,
mask=mask,
cmap=cmap,
square=True,
annot=True,
fmt='0.2f',
annot_kws={'size': font_size},
)
heatmap.tick_params(axis='both', which='major', labelsize=font_size)
heatmap.tick_params(axis='both', which='minor', labelsize=font_size)
heatmap.set_xticklabels(features, rotation=90)
heatmap.set_yticklabels(reversed(features))
plt.show()
if save_filename is not None:
fig.savefig(save_filename, dpi=300)
|
YuriyGuts/pygoose | pygoose/kg/gpu.py | _cuda_get_gpu_spec_string | python | def _cuda_get_gpu_spec_string(gpu_ids=None):
if gpu_ids is None:
return ''
if isinstance(gpu_ids, list):
return ','.join(str(gpu_id) for gpu_id in gpu_ids)
if isinstance(gpu_ids, int):
return str(gpu_ids)
return gpu_ids | Build a GPU id string to be used for CUDA_VISIBLE_DEVICES. | train | https://github.com/YuriyGuts/pygoose/blob/4d9b8827c6d6c4b79949d1cd653393498c0bb3c2/pygoose/kg/gpu.py#L4-L18 | null | import os
def cuda_use_gpus(gpu_ids):
"""
Restrict visible GPU devices only to the specified device IDs.
The order of the IDs is determined by PCI bus.
Examples:
Use only the first GPU:
`cuda_use_gpus(0)`
Use only the second GPU:
`cuda_use_gpus(1)`
Use the first three GPUs:
`cuda_use_gpus([0, 1, 2])`
Args:
gpu_ids: The list of GPU ids to make visible to the current application.
"""
os.environ['CUDA_DEVICE_ORDER'] = 'PCI_BUS_ID'
os.environ['CUDA_VISIBLE_DEVICES'] = _cuda_get_gpu_spec_string(gpu_ids)
def cuda_disable_gpus():
"""
Hide all GPUs from CUDA.
"""
cuda_use_gpus(None)
|
YuriyGuts/pygoose | pygoose/kg/project.py | Project.load_feature_lists | python | def load_feature_lists(self, feature_lists):
column_names = []
feature_ranges = []
running_feature_count = 0
for list_id in feature_lists:
feature_list_names = load_lines(self.features_dir + 'X_train_{}.names'.format(list_id))
column_names.extend(feature_list_names)
start_index = running_feature_count
end_index = running_feature_count + len(feature_list_names) - 1
running_feature_count += len(feature_list_names)
feature_ranges.append([list_id, start_index, end_index])
X_train = np.hstack([
load(self.features_dir + 'X_train_{}.pickle'.format(list_id))
for list_id in feature_lists
])
X_test = np.hstack([
load(self.features_dir + 'X_test_{}.pickle'.format(list_id))
for list_id in feature_lists
])
df_train = pd.DataFrame(X_train, columns=column_names)
df_test = pd.DataFrame(X_test, columns=column_names)
return df_train, df_test, feature_ranges | Load pickled features for train and test sets, assuming they are saved
in the `features` folder along with their column names.
Args:
feature_lists: A list containing the names of the feature lists to load.
Returns:
A tuple containing 3 items: train dataframe, test dataframe,
and a list describing the index ranges for the feature lists. | train | https://github.com/YuriyGuts/pygoose/blob/4d9b8827c6d6c4b79949d1cd653393498c0bb3c2/pygoose/kg/project.py#L89-L126 | [
"def load_lines(filename):\n \"\"\"\n Load a text file as an array of lines.\n\n Args:\n filename: Path to the input file.\n\n Returns:\n An array of strings, each representing an individual line.\n \"\"\"\n\n with open(filename, 'r', encoding='utf-8') as f:\n return [line.rstrip('\\n') for line in f.readlines()]\n"
] | class Project:
def __init__(self, root_dir):
self._root_dir = root_dir
self._compute_dependent_paths()
def _compute_dependent_paths(self):
self._data_dir = os.path.join(self._root_dir, 'data')
self._notebooks_dir = os.path.join(self._root_dir, 'notebooks')
self._aux_data_dir = os.path.join(self._data_dir, 'aux')
self._preprocessed_data_dir = os.path.join(self._data_dir, 'preprocessed')
self._features_dir = os.path.join(self._data_dir, 'features')
self._submissions_dir = os.path.join(self._data_dir, 'submissions')
self._trained_model_dir = os.path.join(self._data_dir, 'trained')
self._temp_dir = os.path.join(self._data_dir, 'tmp')
@property
def root_dir(self):
return self._root_dir + os.path.sep
@property
def data_dir(self):
return self._data_dir + os.path.sep
@property
def notebooks_dir(self):
return self._notebooks_dir + os.path.sep
@property
def aux_dir(self):
return self._aux_data_dir + os.path.sep
@property
def preprocessed_data_dir(self):
return self._preprocessed_data_dir + os.path.sep
@property
def features_dir(self):
return self._features_dir + os.path.sep
@property
def submissions_dir(self):
return self._submissions_dir + os.path.sep
@property
def trained_model_dir(self):
return self._trained_model_dir + os.path.sep
@property
def temp_dir(self):
return self._temp_dir + os.path.sep
def save_features(self, train_features, test_features, feature_names, feature_list_id):
"""
Save features for the training and test sets to disk, along with their metadata.
Args:
train_features: A NumPy array of features for the training set.
test_features: A NumPy array of features for the test set.
feature_names: A list containing the names of the feature columns.
feature_list_id: The name for this feature list.
"""
self.save_feature_names(feature_names, feature_list_id)
self.save_feature_list(train_features, 'train', feature_list_id)
self.save_feature_list(test_features, 'test', feature_list_id)
def save_feature_names(self, feature_names, feature_list_id):
"""
Save the names of the features for the given feature list to a metadata file.
Example: `save_feature_names(['num_employees', 'stock_price'], 'company')`.
Args:
feature_names: A list containing the names of the features, matching the column order.
feature_list_id: The name for this feature list.
"""
save_lines(feature_names, self.features_dir + 'X_train_{}.names'.format(feature_list_id))
def save_feature_list(self, obj, set_id, feature_list_id):
"""
Pickle the specified feature list to a file.
Example: `save_feature_list(project, X_tfidf_train, 'train', 'tfidf')`.
Args:
obj: The object to pickle (e.g., a numpy array or a Pandas dataframe)
project: An instance of pygoose project.
set_id: The id of the subset (e.g., 'train' or 'test')
feature_list_id: The name for this feature list.
"""
save(obj, self.features_dir + 'X_{}_{}.pickle'.format(set_id, feature_list_id))
@staticmethod
def discover():
"""
Automatically discover the paths to various data folders in this project
and compose a Project instance.
Returns:
A constructed Project object.
Raises:
ValueError: if the paths could not be figured out automatically.
In this case, you have to create a Project manually using the initializer.
"""
# Try ../data: we're most likely running a Jupyter notebook from the 'notebooks' directory
candidate_path = os.path.abspath(os.path.join(os.curdir, os.pardir, 'data'))
if os.path.exists(candidate_path):
return Project(os.path.abspath(os.path.join(candidate_path, os.pardir)))
# Try ./data
candidate_path = os.path.abspath(os.path.join(os.curdir, 'data'))
if os.path.exists(candidate_path):
return Project(os.path.abspath(os.curdir))
# Try ../../data
candidate_path = os.path.abspath(os.path.join(os.curdir, os.pardir, 'data'))
if os.path.exists(candidate_path):
return Project(os.path.abspath(os.path.join(candidate_path, os.pardir, os.pardir)))
# Out of ideas at this point.
raise ValueError('Cannot discover the structure of the project. Make sure that the data directory exists')
@staticmethod
def init():
"""
Creates the project infrastructure assuming the current directory is the project root.
Typically used as a command-line entry point called by `pygoose init`.
"""
project = Project(os.path.abspath(os.getcwd()))
paths_to_create = [
project.data_dir,
project.notebooks_dir,
project.aux_dir,
project.features_dir,
project.preprocessed_data_dir,
project.submissions_dir,
project.trained_model_dir,
project.temp_dir,
]
for path in paths_to_create:
os.makedirs(path, exist_ok=True)
|
YuriyGuts/pygoose | pygoose/kg/project.py | Project.save_features | python | def save_features(self, train_features, test_features, feature_names, feature_list_id):
self.save_feature_names(feature_names, feature_list_id)
self.save_feature_list(train_features, 'train', feature_list_id)
self.save_feature_list(test_features, 'test', feature_list_id) | Save features for the training and test sets to disk, along with their metadata.
Args:
train_features: A NumPy array of features for the training set.
test_features: A NumPy array of features for the test set.
feature_names: A list containing the names of the feature columns.
feature_list_id: The name for this feature list. | train | https://github.com/YuriyGuts/pygoose/blob/4d9b8827c6d6c4b79949d1cd653393498c0bb3c2/pygoose/kg/project.py#L128-L141 | [
"def save_feature_names(self, feature_names, feature_list_id):\n \"\"\"\n Save the names of the features for the given feature list to a metadata file.\n Example: `save_feature_names(['num_employees', 'stock_price'], 'company')`.\n\n Args:\n feature_names: A list containing the names of the features, matching the column order.\n feature_list_id: The name for this feature list.\n \"\"\"\n\n save_lines(feature_names, self.features_dir + 'X_train_{}.names'.format(feature_list_id))\n",
"def save_feature_list(self, obj, set_id, feature_list_id):\n \"\"\"\n Pickle the specified feature list to a file.\n Example: `save_feature_list(project, X_tfidf_train, 'train', 'tfidf')`.\n\n Args:\n obj: The object to pickle (e.g., a numpy array or a Pandas dataframe)\n project: An instance of pygoose project.\n set_id: The id of the subset (e.g., 'train' or 'test')\n feature_list_id: The name for this feature list.\n \"\"\"\n\n save(obj, self.features_dir + 'X_{}_{}.pickle'.format(set_id, feature_list_id))\n"
] | class Project:
def __init__(self, root_dir):
self._root_dir = root_dir
self._compute_dependent_paths()
def _compute_dependent_paths(self):
self._data_dir = os.path.join(self._root_dir, 'data')
self._notebooks_dir = os.path.join(self._root_dir, 'notebooks')
self._aux_data_dir = os.path.join(self._data_dir, 'aux')
self._preprocessed_data_dir = os.path.join(self._data_dir, 'preprocessed')
self._features_dir = os.path.join(self._data_dir, 'features')
self._submissions_dir = os.path.join(self._data_dir, 'submissions')
self._trained_model_dir = os.path.join(self._data_dir, 'trained')
self._temp_dir = os.path.join(self._data_dir, 'tmp')
@property
def root_dir(self):
return self._root_dir + os.path.sep
@property
def data_dir(self):
return self._data_dir + os.path.sep
@property
def notebooks_dir(self):
return self._notebooks_dir + os.path.sep
@property
def aux_dir(self):
return self._aux_data_dir + os.path.sep
@property
def preprocessed_data_dir(self):
return self._preprocessed_data_dir + os.path.sep
@property
def features_dir(self):
return self._features_dir + os.path.sep
@property
def submissions_dir(self):
return self._submissions_dir + os.path.sep
@property
def trained_model_dir(self):
return self._trained_model_dir + os.path.sep
@property
def temp_dir(self):
return self._temp_dir + os.path.sep
def load_feature_lists(self, feature_lists):
"""
Load pickled features for train and test sets, assuming they are saved
in the `features` folder along with their column names.
Args:
feature_lists: A list containing the names of the feature lists to load.
Returns:
A tuple containing 3 items: train dataframe, test dataframe,
and a list describing the index ranges for the feature lists.
"""
column_names = []
feature_ranges = []
running_feature_count = 0
for list_id in feature_lists:
feature_list_names = load_lines(self.features_dir + 'X_train_{}.names'.format(list_id))
column_names.extend(feature_list_names)
start_index = running_feature_count
end_index = running_feature_count + len(feature_list_names) - 1
running_feature_count += len(feature_list_names)
feature_ranges.append([list_id, start_index, end_index])
X_train = np.hstack([
load(self.features_dir + 'X_train_{}.pickle'.format(list_id))
for list_id in feature_lists
])
X_test = np.hstack([
load(self.features_dir + 'X_test_{}.pickle'.format(list_id))
for list_id in feature_lists
])
df_train = pd.DataFrame(X_train, columns=column_names)
df_test = pd.DataFrame(X_test, columns=column_names)
return df_train, df_test, feature_ranges
def save_feature_names(self, feature_names, feature_list_id):
"""
Save the names of the features for the given feature list to a metadata file.
Example: `save_feature_names(['num_employees', 'stock_price'], 'company')`.
Args:
feature_names: A list containing the names of the features, matching the column order.
feature_list_id: The name for this feature list.
"""
save_lines(feature_names, self.features_dir + 'X_train_{}.names'.format(feature_list_id))
def save_feature_list(self, obj, set_id, feature_list_id):
"""
Pickle the specified feature list to a file.
Example: `save_feature_list(project, X_tfidf_train, 'train', 'tfidf')`.
Args:
obj: The object to pickle (e.g., a numpy array or a Pandas dataframe)
project: An instance of pygoose project.
set_id: The id of the subset (e.g., 'train' or 'test')
feature_list_id: The name for this feature list.
"""
save(obj, self.features_dir + 'X_{}_{}.pickle'.format(set_id, feature_list_id))
@staticmethod
def discover():
"""
Automatically discover the paths to various data folders in this project
and compose a Project instance.
Returns:
A constructed Project object.
Raises:
ValueError: if the paths could not be figured out automatically.
In this case, you have to create a Project manually using the initializer.
"""
# Try ../data: we're most likely running a Jupyter notebook from the 'notebooks' directory
candidate_path = os.path.abspath(os.path.join(os.curdir, os.pardir, 'data'))
if os.path.exists(candidate_path):
return Project(os.path.abspath(os.path.join(candidate_path, os.pardir)))
# Try ./data
candidate_path = os.path.abspath(os.path.join(os.curdir, 'data'))
if os.path.exists(candidate_path):
return Project(os.path.abspath(os.curdir))
# Try ../../data
candidate_path = os.path.abspath(os.path.join(os.curdir, os.pardir, 'data'))
if os.path.exists(candidate_path):
return Project(os.path.abspath(os.path.join(candidate_path, os.pardir, os.pardir)))
# Out of ideas at this point.
raise ValueError('Cannot discover the structure of the project. Make sure that the data directory exists')
@staticmethod
def init():
"""
Creates the project infrastructure assuming the current directory is the project root.
Typically used as a command-line entry point called by `pygoose init`.
"""
project = Project(os.path.abspath(os.getcwd()))
paths_to_create = [
project.data_dir,
project.notebooks_dir,
project.aux_dir,
project.features_dir,
project.preprocessed_data_dir,
project.submissions_dir,
project.trained_model_dir,
project.temp_dir,
]
for path in paths_to_create:
os.makedirs(path, exist_ok=True)
|
YuriyGuts/pygoose | pygoose/kg/project.py | Project.save_feature_names | python | def save_feature_names(self, feature_names, feature_list_id):
save_lines(feature_names, self.features_dir + 'X_train_{}.names'.format(feature_list_id)) | Save the names of the features for the given feature list to a metadata file.
Example: `save_feature_names(['num_employees', 'stock_price'], 'company')`.
Args:
feature_names: A list containing the names of the features, matching the column order.
feature_list_id: The name for this feature list. | train | https://github.com/YuriyGuts/pygoose/blob/4d9b8827c6d6c4b79949d1cd653393498c0bb3c2/pygoose/kg/project.py#L143-L153 | [
"def save_lines(lines, filename):\n \"\"\"\n Save an array of lines to a file.\n\n Args:\n lines: An array of strings that will be saved as individual lines.\n filename: Path to the output file.\n \"\"\"\n\n with open(filename, 'w', encoding='utf-8') as f:\n f.write('\\n'.join(lines))\n"
] | class Project:
def __init__(self, root_dir):
self._root_dir = root_dir
self._compute_dependent_paths()
def _compute_dependent_paths(self):
self._data_dir = os.path.join(self._root_dir, 'data')
self._notebooks_dir = os.path.join(self._root_dir, 'notebooks')
self._aux_data_dir = os.path.join(self._data_dir, 'aux')
self._preprocessed_data_dir = os.path.join(self._data_dir, 'preprocessed')
self._features_dir = os.path.join(self._data_dir, 'features')
self._submissions_dir = os.path.join(self._data_dir, 'submissions')
self._trained_model_dir = os.path.join(self._data_dir, 'trained')
self._temp_dir = os.path.join(self._data_dir, 'tmp')
@property
def root_dir(self):
return self._root_dir + os.path.sep
@property
def data_dir(self):
return self._data_dir + os.path.sep
@property
def notebooks_dir(self):
return self._notebooks_dir + os.path.sep
@property
def aux_dir(self):
return self._aux_data_dir + os.path.sep
@property
def preprocessed_data_dir(self):
return self._preprocessed_data_dir + os.path.sep
@property
def features_dir(self):
return self._features_dir + os.path.sep
@property
def submissions_dir(self):
return self._submissions_dir + os.path.sep
@property
def trained_model_dir(self):
return self._trained_model_dir + os.path.sep
@property
def temp_dir(self):
return self._temp_dir + os.path.sep
def load_feature_lists(self, feature_lists):
"""
Load pickled features for train and test sets, assuming they are saved
in the `features` folder along with their column names.
Args:
feature_lists: A list containing the names of the feature lists to load.
Returns:
A tuple containing 3 items: train dataframe, test dataframe,
and a list describing the index ranges for the feature lists.
"""
column_names = []
feature_ranges = []
running_feature_count = 0
for list_id in feature_lists:
feature_list_names = load_lines(self.features_dir + 'X_train_{}.names'.format(list_id))
column_names.extend(feature_list_names)
start_index = running_feature_count
end_index = running_feature_count + len(feature_list_names) - 1
running_feature_count += len(feature_list_names)
feature_ranges.append([list_id, start_index, end_index])
X_train = np.hstack([
load(self.features_dir + 'X_train_{}.pickle'.format(list_id))
for list_id in feature_lists
])
X_test = np.hstack([
load(self.features_dir + 'X_test_{}.pickle'.format(list_id))
for list_id in feature_lists
])
df_train = pd.DataFrame(X_train, columns=column_names)
df_test = pd.DataFrame(X_test, columns=column_names)
return df_train, df_test, feature_ranges
def save_features(self, train_features, test_features, feature_names, feature_list_id):
"""
Save features for the training and test sets to disk, along with their metadata.
Args:
train_features: A NumPy array of features for the training set.
test_features: A NumPy array of features for the test set.
feature_names: A list containing the names of the feature columns.
feature_list_id: The name for this feature list.
"""
self.save_feature_names(feature_names, feature_list_id)
self.save_feature_list(train_features, 'train', feature_list_id)
self.save_feature_list(test_features, 'test', feature_list_id)
def save_feature_list(self, obj, set_id, feature_list_id):
"""
Pickle the specified feature list to a file.
Example: `save_feature_list(project, X_tfidf_train, 'train', 'tfidf')`.
Args:
obj: The object to pickle (e.g., a numpy array or a Pandas dataframe)
project: An instance of pygoose project.
set_id: The id of the subset (e.g., 'train' or 'test')
feature_list_id: The name for this feature list.
"""
save(obj, self.features_dir + 'X_{}_{}.pickle'.format(set_id, feature_list_id))
@staticmethod
def discover():
"""
Automatically discover the paths to various data folders in this project
and compose a Project instance.
Returns:
A constructed Project object.
Raises:
ValueError: if the paths could not be figured out automatically.
In this case, you have to create a Project manually using the initializer.
"""
# Try ../data: we're most likely running a Jupyter notebook from the 'notebooks' directory
candidate_path = os.path.abspath(os.path.join(os.curdir, os.pardir, 'data'))
if os.path.exists(candidate_path):
return Project(os.path.abspath(os.path.join(candidate_path, os.pardir)))
# Try ./data
candidate_path = os.path.abspath(os.path.join(os.curdir, 'data'))
if os.path.exists(candidate_path):
return Project(os.path.abspath(os.curdir))
# Try ../../data
candidate_path = os.path.abspath(os.path.join(os.curdir, os.pardir, 'data'))
if os.path.exists(candidate_path):
return Project(os.path.abspath(os.path.join(candidate_path, os.pardir, os.pardir)))
# Out of ideas at this point.
raise ValueError('Cannot discover the structure of the project. Make sure that the data directory exists')
@staticmethod
def init():
"""
Creates the project infrastructure assuming the current directory is the project root.
Typically used as a command-line entry point called by `pygoose init`.
"""
project = Project(os.path.abspath(os.getcwd()))
paths_to_create = [
project.data_dir,
project.notebooks_dir,
project.aux_dir,
project.features_dir,
project.preprocessed_data_dir,
project.submissions_dir,
project.trained_model_dir,
project.temp_dir,
]
for path in paths_to_create:
os.makedirs(path, exist_ok=True)
|
YuriyGuts/pygoose | pygoose/kg/project.py | Project.save_feature_list | python | def save_feature_list(self, obj, set_id, feature_list_id):
save(obj, self.features_dir + 'X_{}_{}.pickle'.format(set_id, feature_list_id)) | Pickle the specified feature list to a file.
Example: `save_feature_list(project, X_tfidf_train, 'train', 'tfidf')`.
Args:
obj: The object to pickle (e.g., a numpy array or a Pandas dataframe)
project: An instance of pygoose project.
set_id: The id of the subset (e.g., 'train' or 'test')
feature_list_id: The name for this feature list. | train | https://github.com/YuriyGuts/pygoose/blob/4d9b8827c6d6c4b79949d1cd653393498c0bb3c2/pygoose/kg/project.py#L155-L167 | [
"def save(obj, filename, protocol=4):\n \"\"\"\n Serialize an object to disk using pickle protocol.\n\n Args:\n obj: The object to serialize.\n filename: Path to the output file.\n protocol: Version of the pickle protocol.\n \"\"\"\n\n with open(filename, 'wb') as f:\n pickle.dump(obj, f, protocol=protocol)\n"
] | class Project:
def __init__(self, root_dir):
self._root_dir = root_dir
self._compute_dependent_paths()
def _compute_dependent_paths(self):
self._data_dir = os.path.join(self._root_dir, 'data')
self._notebooks_dir = os.path.join(self._root_dir, 'notebooks')
self._aux_data_dir = os.path.join(self._data_dir, 'aux')
self._preprocessed_data_dir = os.path.join(self._data_dir, 'preprocessed')
self._features_dir = os.path.join(self._data_dir, 'features')
self._submissions_dir = os.path.join(self._data_dir, 'submissions')
self._trained_model_dir = os.path.join(self._data_dir, 'trained')
self._temp_dir = os.path.join(self._data_dir, 'tmp')
@property
def root_dir(self):
return self._root_dir + os.path.sep
@property
def data_dir(self):
return self._data_dir + os.path.sep
@property
def notebooks_dir(self):
return self._notebooks_dir + os.path.sep
@property
def aux_dir(self):
return self._aux_data_dir + os.path.sep
@property
def preprocessed_data_dir(self):
return self._preprocessed_data_dir + os.path.sep
@property
def features_dir(self):
return self._features_dir + os.path.sep
@property
def submissions_dir(self):
return self._submissions_dir + os.path.sep
@property
def trained_model_dir(self):
return self._trained_model_dir + os.path.sep
@property
def temp_dir(self):
return self._temp_dir + os.path.sep
def load_feature_lists(self, feature_lists):
"""
Load pickled features for train and test sets, assuming they are saved
in the `features` folder along with their column names.
Args:
feature_lists: A list containing the names of the feature lists to load.
Returns:
A tuple containing 3 items: train dataframe, test dataframe,
and a list describing the index ranges for the feature lists.
"""
column_names = []
feature_ranges = []
running_feature_count = 0
for list_id in feature_lists:
feature_list_names = load_lines(self.features_dir + 'X_train_{}.names'.format(list_id))
column_names.extend(feature_list_names)
start_index = running_feature_count
end_index = running_feature_count + len(feature_list_names) - 1
running_feature_count += len(feature_list_names)
feature_ranges.append([list_id, start_index, end_index])
X_train = np.hstack([
load(self.features_dir + 'X_train_{}.pickle'.format(list_id))
for list_id in feature_lists
])
X_test = np.hstack([
load(self.features_dir + 'X_test_{}.pickle'.format(list_id))
for list_id in feature_lists
])
df_train = pd.DataFrame(X_train, columns=column_names)
df_test = pd.DataFrame(X_test, columns=column_names)
return df_train, df_test, feature_ranges
def save_features(self, train_features, test_features, feature_names, feature_list_id):
"""
Save features for the training and test sets to disk, along with their metadata.
Args:
train_features: A NumPy array of features for the training set.
test_features: A NumPy array of features for the test set.
feature_names: A list containing the names of the feature columns.
feature_list_id: The name for this feature list.
"""
self.save_feature_names(feature_names, feature_list_id)
self.save_feature_list(train_features, 'train', feature_list_id)
self.save_feature_list(test_features, 'test', feature_list_id)
def save_feature_names(self, feature_names, feature_list_id):
"""
Save the names of the features for the given feature list to a metadata file.
Example: `save_feature_names(['num_employees', 'stock_price'], 'company')`.
Args:
feature_names: A list containing the names of the features, matching the column order.
feature_list_id: The name for this feature list.
"""
save_lines(feature_names, self.features_dir + 'X_train_{}.names'.format(feature_list_id))
@staticmethod
def discover():
"""
Automatically discover the paths to various data folders in this project
and compose a Project instance.
Returns:
A constructed Project object.
Raises:
ValueError: if the paths could not be figured out automatically.
In this case, you have to create a Project manually using the initializer.
"""
# Try ../data: we're most likely running a Jupyter notebook from the 'notebooks' directory
candidate_path = os.path.abspath(os.path.join(os.curdir, os.pardir, 'data'))
if os.path.exists(candidate_path):
return Project(os.path.abspath(os.path.join(candidate_path, os.pardir)))
# Try ./data
candidate_path = os.path.abspath(os.path.join(os.curdir, 'data'))
if os.path.exists(candidate_path):
return Project(os.path.abspath(os.curdir))
# Try ../../data
candidate_path = os.path.abspath(os.path.join(os.curdir, os.pardir, 'data'))
if os.path.exists(candidate_path):
return Project(os.path.abspath(os.path.join(candidate_path, os.pardir, os.pardir)))
# Out of ideas at this point.
raise ValueError('Cannot discover the structure of the project. Make sure that the data directory exists')
@staticmethod
def init():
"""
Creates the project infrastructure assuming the current directory is the project root.
Typically used as a command-line entry point called by `pygoose init`.
"""
project = Project(os.path.abspath(os.getcwd()))
paths_to_create = [
project.data_dir,
project.notebooks_dir,
project.aux_dir,
project.features_dir,
project.preprocessed_data_dir,
project.submissions_dir,
project.trained_model_dir,
project.temp_dir,
]
for path in paths_to_create:
os.makedirs(path, exist_ok=True)
|
YuriyGuts/pygoose | pygoose/kg/project.py | Project.discover | python | def discover():
# Try ../data: we're most likely running a Jupyter notebook from the 'notebooks' directory
candidate_path = os.path.abspath(os.path.join(os.curdir, os.pardir, 'data'))
if os.path.exists(candidate_path):
return Project(os.path.abspath(os.path.join(candidate_path, os.pardir)))
# Try ./data
candidate_path = os.path.abspath(os.path.join(os.curdir, 'data'))
if os.path.exists(candidate_path):
return Project(os.path.abspath(os.curdir))
# Try ../../data
candidate_path = os.path.abspath(os.path.join(os.curdir, os.pardir, 'data'))
if os.path.exists(candidate_path):
return Project(os.path.abspath(os.path.join(candidate_path, os.pardir, os.pardir)))
# Out of ideas at this point.
raise ValueError('Cannot discover the structure of the project. Make sure that the data directory exists') | Automatically discover the paths to various data folders in this project
and compose a Project instance.
Returns:
A constructed Project object.
Raises:
ValueError: if the paths could not be figured out automatically.
In this case, you have to create a Project manually using the initializer. | train | https://github.com/YuriyGuts/pygoose/blob/4d9b8827c6d6c4b79949d1cd653393498c0bb3c2/pygoose/kg/project.py#L170-L199 | null | class Project:
def __init__(self, root_dir):
self._root_dir = root_dir
self._compute_dependent_paths()
def _compute_dependent_paths(self):
self._data_dir = os.path.join(self._root_dir, 'data')
self._notebooks_dir = os.path.join(self._root_dir, 'notebooks')
self._aux_data_dir = os.path.join(self._data_dir, 'aux')
self._preprocessed_data_dir = os.path.join(self._data_dir, 'preprocessed')
self._features_dir = os.path.join(self._data_dir, 'features')
self._submissions_dir = os.path.join(self._data_dir, 'submissions')
self._trained_model_dir = os.path.join(self._data_dir, 'trained')
self._temp_dir = os.path.join(self._data_dir, 'tmp')
@property
def root_dir(self):
return self._root_dir + os.path.sep
@property
def data_dir(self):
return self._data_dir + os.path.sep
@property
def notebooks_dir(self):
return self._notebooks_dir + os.path.sep
@property
def aux_dir(self):
return self._aux_data_dir + os.path.sep
@property
def preprocessed_data_dir(self):
return self._preprocessed_data_dir + os.path.sep
@property
def features_dir(self):
return self._features_dir + os.path.sep
@property
def submissions_dir(self):
return self._submissions_dir + os.path.sep
@property
def trained_model_dir(self):
return self._trained_model_dir + os.path.sep
@property
def temp_dir(self):
return self._temp_dir + os.path.sep
def load_feature_lists(self, feature_lists):
"""
Load pickled features for train and test sets, assuming they are saved
in the `features` folder along with their column names.
Args:
feature_lists: A list containing the names of the feature lists to load.
Returns:
A tuple containing 3 items: train dataframe, test dataframe,
and a list describing the index ranges for the feature lists.
"""
column_names = []
feature_ranges = []
running_feature_count = 0
for list_id in feature_lists:
feature_list_names = load_lines(self.features_dir + 'X_train_{}.names'.format(list_id))
column_names.extend(feature_list_names)
start_index = running_feature_count
end_index = running_feature_count + len(feature_list_names) - 1
running_feature_count += len(feature_list_names)
feature_ranges.append([list_id, start_index, end_index])
X_train = np.hstack([
load(self.features_dir + 'X_train_{}.pickle'.format(list_id))
for list_id in feature_lists
])
X_test = np.hstack([
load(self.features_dir + 'X_test_{}.pickle'.format(list_id))
for list_id in feature_lists
])
df_train = pd.DataFrame(X_train, columns=column_names)
df_test = pd.DataFrame(X_test, columns=column_names)
return df_train, df_test, feature_ranges
def save_features(self, train_features, test_features, feature_names, feature_list_id):
"""
Save features for the training and test sets to disk, along with their metadata.
Args:
train_features: A NumPy array of features for the training set.
test_features: A NumPy array of features for the test set.
feature_names: A list containing the names of the feature columns.
feature_list_id: The name for this feature list.
"""
self.save_feature_names(feature_names, feature_list_id)
self.save_feature_list(train_features, 'train', feature_list_id)
self.save_feature_list(test_features, 'test', feature_list_id)
def save_feature_names(self, feature_names, feature_list_id):
"""
Save the names of the features for the given feature list to a metadata file.
Example: `save_feature_names(['num_employees', 'stock_price'], 'company')`.
Args:
feature_names: A list containing the names of the features, matching the column order.
feature_list_id: The name for this feature list.
"""
save_lines(feature_names, self.features_dir + 'X_train_{}.names'.format(feature_list_id))
def save_feature_list(self, obj, set_id, feature_list_id):
"""
Pickle the specified feature list to a file.
Example: `save_feature_list(project, X_tfidf_train, 'train', 'tfidf')`.
Args:
obj: The object to pickle (e.g., a numpy array or a Pandas dataframe)
project: An instance of pygoose project.
set_id: The id of the subset (e.g., 'train' or 'test')
feature_list_id: The name for this feature list.
"""
save(obj, self.features_dir + 'X_{}_{}.pickle'.format(set_id, feature_list_id))
@staticmethod
@staticmethod
def init():
"""
Creates the project infrastructure assuming the current directory is the project root.
Typically used as a command-line entry point called by `pygoose init`.
"""
project = Project(os.path.abspath(os.getcwd()))
paths_to_create = [
project.data_dir,
project.notebooks_dir,
project.aux_dir,
project.features_dir,
project.preprocessed_data_dir,
project.submissions_dir,
project.trained_model_dir,
project.temp_dir,
]
for path in paths_to_create:
os.makedirs(path, exist_ok=True)
|
YuriyGuts/pygoose | pygoose/kg/project.py | Project.init | python | def init():
project = Project(os.path.abspath(os.getcwd()))
paths_to_create = [
project.data_dir,
project.notebooks_dir,
project.aux_dir,
project.features_dir,
project.preprocessed_data_dir,
project.submissions_dir,
project.trained_model_dir,
project.temp_dir,
]
for path in paths_to_create:
os.makedirs(path, exist_ok=True) | Creates the project infrastructure assuming the current directory is the project root.
Typically used as a command-line entry point called by `pygoose init`. | train | https://github.com/YuriyGuts/pygoose/blob/4d9b8827c6d6c4b79949d1cd653393498c0bb3c2/pygoose/kg/project.py#L202-L221 | null | class Project:
def __init__(self, root_dir):
self._root_dir = root_dir
self._compute_dependent_paths()
def _compute_dependent_paths(self):
self._data_dir = os.path.join(self._root_dir, 'data')
self._notebooks_dir = os.path.join(self._root_dir, 'notebooks')
self._aux_data_dir = os.path.join(self._data_dir, 'aux')
self._preprocessed_data_dir = os.path.join(self._data_dir, 'preprocessed')
self._features_dir = os.path.join(self._data_dir, 'features')
self._submissions_dir = os.path.join(self._data_dir, 'submissions')
self._trained_model_dir = os.path.join(self._data_dir, 'trained')
self._temp_dir = os.path.join(self._data_dir, 'tmp')
@property
def root_dir(self):
return self._root_dir + os.path.sep
@property
def data_dir(self):
return self._data_dir + os.path.sep
@property
def notebooks_dir(self):
return self._notebooks_dir + os.path.sep
@property
def aux_dir(self):
return self._aux_data_dir + os.path.sep
@property
def preprocessed_data_dir(self):
return self._preprocessed_data_dir + os.path.sep
@property
def features_dir(self):
return self._features_dir + os.path.sep
@property
def submissions_dir(self):
return self._submissions_dir + os.path.sep
@property
def trained_model_dir(self):
return self._trained_model_dir + os.path.sep
@property
def temp_dir(self):
return self._temp_dir + os.path.sep
def load_feature_lists(self, feature_lists):
"""
Load pickled features for train and test sets, assuming they are saved
in the `features` folder along with their column names.
Args:
feature_lists: A list containing the names of the feature lists to load.
Returns:
A tuple containing 3 items: train dataframe, test dataframe,
and a list describing the index ranges for the feature lists.
"""
column_names = []
feature_ranges = []
running_feature_count = 0
for list_id in feature_lists:
feature_list_names = load_lines(self.features_dir + 'X_train_{}.names'.format(list_id))
column_names.extend(feature_list_names)
start_index = running_feature_count
end_index = running_feature_count + len(feature_list_names) - 1
running_feature_count += len(feature_list_names)
feature_ranges.append([list_id, start_index, end_index])
X_train = np.hstack([
load(self.features_dir + 'X_train_{}.pickle'.format(list_id))
for list_id in feature_lists
])
X_test = np.hstack([
load(self.features_dir + 'X_test_{}.pickle'.format(list_id))
for list_id in feature_lists
])
df_train = pd.DataFrame(X_train, columns=column_names)
df_test = pd.DataFrame(X_test, columns=column_names)
return df_train, df_test, feature_ranges
def save_features(self, train_features, test_features, feature_names, feature_list_id):
"""
Save features for the training and test sets to disk, along with their metadata.
Args:
train_features: A NumPy array of features for the training set.
test_features: A NumPy array of features for the test set.
feature_names: A list containing the names of the feature columns.
feature_list_id: The name for this feature list.
"""
self.save_feature_names(feature_names, feature_list_id)
self.save_feature_list(train_features, 'train', feature_list_id)
self.save_feature_list(test_features, 'test', feature_list_id)
def save_feature_names(self, feature_names, feature_list_id):
"""
Save the names of the features for the given feature list to a metadata file.
Example: `save_feature_names(['num_employees', 'stock_price'], 'company')`.
Args:
feature_names: A list containing the names of the features, matching the column order.
feature_list_id: The name for this feature list.
"""
save_lines(feature_names, self.features_dir + 'X_train_{}.names'.format(feature_list_id))
def save_feature_list(self, obj, set_id, feature_list_id):
"""
Pickle the specified feature list to a file.
Example: `save_feature_list(project, X_tfidf_train, 'train', 'tfidf')`.
Args:
obj: The object to pickle (e.g., a numpy array or a Pandas dataframe)
project: An instance of pygoose project.
set_id: The id of the subset (e.g., 'train' or 'test')
feature_list_id: The name for this feature list.
"""
save(obj, self.features_dir + 'X_{}_{}.pickle'.format(set_id, feature_list_id))
@staticmethod
def discover():
"""
Automatically discover the paths to various data folders in this project
and compose a Project instance.
Returns:
A constructed Project object.
Raises:
ValueError: if the paths could not be figured out automatically.
In this case, you have to create a Project manually using the initializer.
"""
# Try ../data: we're most likely running a Jupyter notebook from the 'notebooks' directory
candidate_path = os.path.abspath(os.path.join(os.curdir, os.pardir, 'data'))
if os.path.exists(candidate_path):
return Project(os.path.abspath(os.path.join(candidate_path, os.pardir)))
# Try ./data
candidate_path = os.path.abspath(os.path.join(os.curdir, 'data'))
if os.path.exists(candidate_path):
return Project(os.path.abspath(os.curdir))
# Try ../../data
candidate_path = os.path.abspath(os.path.join(os.curdir, os.pardir, 'data'))
if os.path.exists(candidate_path):
return Project(os.path.abspath(os.path.join(candidate_path, os.pardir, os.pardir)))
# Out of ideas at this point.
raise ValueError('Cannot discover the structure of the project. Make sure that the data directory exists')
@staticmethod
|
YuriyGuts/pygoose | pygoose/kg/keras.py | get_class_weights | python | def get_class_weights(y, smooth_factor=0):
from collections import Counter
counter = Counter(y)
if smooth_factor > 0:
p = max(counter.values()) * smooth_factor
for k in counter.keys():
counter[k] += p
majority = max(counter.values())
return {cls: float(majority / count) for cls, count in counter.items()} | Returns the weights for each class based on the frequencies of the samples.
Args:
y: A list of true labels (the labels must be hashable).
smooth_factor: A factor that smooths extremely uneven weights.
Returns:
A dictionary with the weight for each class. | train | https://github.com/YuriyGuts/pygoose/blob/4d9b8827c6d6c4b79949d1cd653393498c0bb3c2/pygoose/kg/keras.py#L4-L26 | null | import matplotlib.pyplot as plt
def plot_loss_history(history, figsize=(15, 8)):
"""
Plots the learning history for a Keras model,
assuming the validation data was provided to the 'fit' function.
Args:
history: The return value from the 'fit' function.
figsize: The size of the plot.
"""
plt.figure(figsize=figsize)
plt.plot(history.history["loss"])
plt.plot(history.history["val_loss"])
plt.xlabel("# Epochs")
plt.ylabel("Loss")
plt.legend(["Training", "Validation"])
plt.title("Loss over time")
plt.show()
def plot_accuracy_history(history, figsize=(15, 8)):
"""
Plots the learning history for a Keras model,
assuming the validation data was provided to the 'fit' function.
Args:
history: The return value from the 'fit' function.
figsize: The size of the plot.
"""
plt.figure(figsize=figsize)
plt.plot(history.history["acc"])
plt.plot(history.history["val_acc"])
plt.xlabel("# Epochs")
plt.ylabel("Accuracy")
plt.legend(["Training", "Validation"])
plt.title("Accuracy over time")
plt.show()
|
YuriyGuts/pygoose | pygoose/kg/keras.py | plot_loss_history | python | def plot_loss_history(history, figsize=(15, 8)):
plt.figure(figsize=figsize)
plt.plot(history.history["loss"])
plt.plot(history.history["val_loss"])
plt.xlabel("# Epochs")
plt.ylabel("Loss")
plt.legend(["Training", "Validation"])
plt.title("Loss over time")
plt.show() | Plots the learning history for a Keras model,
assuming the validation data was provided to the 'fit' function.
Args:
history: The return value from the 'fit' function.
figsize: The size of the plot. | train | https://github.com/YuriyGuts/pygoose/blob/4d9b8827c6d6c4b79949d1cd653393498c0bb3c2/pygoose/kg/keras.py#L29-L49 | null | import matplotlib.pyplot as plt
def get_class_weights(y, smooth_factor=0):
"""
Returns the weights for each class based on the frequencies of the samples.
Args:
y: A list of true labels (the labels must be hashable).
smooth_factor: A factor that smooths extremely uneven weights.
Returns:
A dictionary with the weight for each class.
"""
from collections import Counter
counter = Counter(y)
if smooth_factor > 0:
p = max(counter.values()) * smooth_factor
for k in counter.keys():
counter[k] += p
majority = max(counter.values())
return {cls: float(majority / count) for cls, count in counter.items()}
def plot_accuracy_history(history, figsize=(15, 8)):
"""
Plots the learning history for a Keras model,
assuming the validation data was provided to the 'fit' function.
Args:
history: The return value from the 'fit' function.
figsize: The size of the plot.
"""
plt.figure(figsize=figsize)
plt.plot(history.history["acc"])
plt.plot(history.history["val_acc"])
plt.xlabel("# Epochs")
plt.ylabel("Accuracy")
plt.legend(["Training", "Validation"])
plt.title("Accuracy over time")
plt.show()
|
YuriyGuts/pygoose | pygoose/kg/io.py | save | python | def save(obj, filename, protocol=4):
with open(filename, 'wb') as f:
pickle.dump(obj, f, protocol=protocol) | Serialize an object to disk using pickle protocol.
Args:
obj: The object to serialize.
filename: Path to the output file.
protocol: Version of the pickle protocol. | train | https://github.com/YuriyGuts/pygoose/blob/4d9b8827c6d6c4b79949d1cd653393498c0bb3c2/pygoose/kg/io.py#L20-L31 | null | import json
import pickle
def load(filename):
"""
Deserialize a pickled object from disk.
Args:
filename: Path to the input pickle file.
Returns:
The deserialized object.
"""
with open(filename, 'rb') as f:
return pickle.load(f)
def load_json(filename, **kwargs):
"""
Load a JSON object from the specified file.
Args:
filename: Path to the input JSON file.
**kwargs: Additional arguments to `json.load`.
Returns:
The object deserialized from JSON.
"""
with open(filename, 'r', encoding='utf-8') as f:
return json.load(f, **kwargs)
def save_json(obj, filename, **kwargs):
"""
Save an object as a JSON file.
Args:
obj: The object to save. Must be JSON-serializable.
filename: Path to the output file.
**kwargs: Additional arguments to `json.dump`.
"""
with open(filename, 'w', encoding='utf-8') as f:
json.dump(obj, f, **kwargs)
def load_lines(filename):
"""
Load a text file as an array of lines.
Args:
filename: Path to the input file.
Returns:
An array of strings, each representing an individual line.
"""
with open(filename, 'r', encoding='utf-8') as f:
return [line.rstrip('\n') for line in f.readlines()]
def save_lines(lines, filename):
"""
Save an array of lines to a file.
Args:
lines: An array of strings that will be saved as individual lines.
filename: Path to the output file.
"""
with open(filename, 'w', encoding='utf-8') as f:
f.write('\n'.join(lines))
|
YuriyGuts/pygoose | pygoose/kg/io.py | load_json | python | def load_json(filename, **kwargs):
with open(filename, 'r', encoding='utf-8') as f:
return json.load(f, **kwargs) | Load a JSON object from the specified file.
Args:
filename: Path to the input JSON file.
**kwargs: Additional arguments to `json.load`.
Returns:
The object deserialized from JSON. | train | https://github.com/YuriyGuts/pygoose/blob/4d9b8827c6d6c4b79949d1cd653393498c0bb3c2/pygoose/kg/io.py#L34-L47 | null | import json
import pickle
def load(filename):
"""
Deserialize a pickled object from disk.
Args:
filename: Path to the input pickle file.
Returns:
The deserialized object.
"""
with open(filename, 'rb') as f:
return pickle.load(f)
def save(obj, filename, protocol=4):
"""
Serialize an object to disk using pickle protocol.
Args:
obj: The object to serialize.
filename: Path to the output file.
protocol: Version of the pickle protocol.
"""
with open(filename, 'wb') as f:
pickle.dump(obj, f, protocol=protocol)
def save_json(obj, filename, **kwargs):
"""
Save an object as a JSON file.
Args:
obj: The object to save. Must be JSON-serializable.
filename: Path to the output file.
**kwargs: Additional arguments to `json.dump`.
"""
with open(filename, 'w', encoding='utf-8') as f:
json.dump(obj, f, **kwargs)
def load_lines(filename):
"""
Load a text file as an array of lines.
Args:
filename: Path to the input file.
Returns:
An array of strings, each representing an individual line.
"""
with open(filename, 'r', encoding='utf-8') as f:
return [line.rstrip('\n') for line in f.readlines()]
def save_lines(lines, filename):
"""
Save an array of lines to a file.
Args:
lines: An array of strings that will be saved as individual lines.
filename: Path to the output file.
"""
with open(filename, 'w', encoding='utf-8') as f:
f.write('\n'.join(lines))
|
YuriyGuts/pygoose | pygoose/kg/io.py | save_json | python | def save_json(obj, filename, **kwargs):
with open(filename, 'w', encoding='utf-8') as f:
json.dump(obj, f, **kwargs) | Save an object as a JSON file.
Args:
obj: The object to save. Must be JSON-serializable.
filename: Path to the output file.
**kwargs: Additional arguments to `json.dump`. | train | https://github.com/YuriyGuts/pygoose/blob/4d9b8827c6d6c4b79949d1cd653393498c0bb3c2/pygoose/kg/io.py#L50-L61 | null | import json
import pickle
def load(filename):
"""
Deserialize a pickled object from disk.
Args:
filename: Path to the input pickle file.
Returns:
The deserialized object.
"""
with open(filename, 'rb') as f:
return pickle.load(f)
def save(obj, filename, protocol=4):
"""
Serialize an object to disk using pickle protocol.
Args:
obj: The object to serialize.
filename: Path to the output file.
protocol: Version of the pickle protocol.
"""
with open(filename, 'wb') as f:
pickle.dump(obj, f, protocol=protocol)
def load_json(filename, **kwargs):
"""
Load a JSON object from the specified file.
Args:
filename: Path to the input JSON file.
**kwargs: Additional arguments to `json.load`.
Returns:
The object deserialized from JSON.
"""
with open(filename, 'r', encoding='utf-8') as f:
return json.load(f, **kwargs)
def load_lines(filename):
"""
Load a text file as an array of lines.
Args:
filename: Path to the input file.
Returns:
An array of strings, each representing an individual line.
"""
with open(filename, 'r', encoding='utf-8') as f:
return [line.rstrip('\n') for line in f.readlines()]
def save_lines(lines, filename):
"""
Save an array of lines to a file.
Args:
lines: An array of strings that will be saved as individual lines.
filename: Path to the output file.
"""
with open(filename, 'w', encoding='utf-8') as f:
f.write('\n'.join(lines))
|
YuriyGuts/pygoose | pygoose/kg/io.py | load_lines | python | def load_lines(filename):
with open(filename, 'r', encoding='utf-8') as f:
return [line.rstrip('\n') for line in f.readlines()] | Load a text file as an array of lines.
Args:
filename: Path to the input file.
Returns:
An array of strings, each representing an individual line. | train | https://github.com/YuriyGuts/pygoose/blob/4d9b8827c6d6c4b79949d1cd653393498c0bb3c2/pygoose/kg/io.py#L64-L76 | null | import json
import pickle
def load(filename):
"""
Deserialize a pickled object from disk.
Args:
filename: Path to the input pickle file.
Returns:
The deserialized object.
"""
with open(filename, 'rb') as f:
return pickle.load(f)
def save(obj, filename, protocol=4):
"""
Serialize an object to disk using pickle protocol.
Args:
obj: The object to serialize.
filename: Path to the output file.
protocol: Version of the pickle protocol.
"""
with open(filename, 'wb') as f:
pickle.dump(obj, f, protocol=protocol)
def load_json(filename, **kwargs):
"""
Load a JSON object from the specified file.
Args:
filename: Path to the input JSON file.
**kwargs: Additional arguments to `json.load`.
Returns:
The object deserialized from JSON.
"""
with open(filename, 'r', encoding='utf-8') as f:
return json.load(f, **kwargs)
def save_json(obj, filename, **kwargs):
"""
Save an object as a JSON file.
Args:
obj: The object to save. Must be JSON-serializable.
filename: Path to the output file.
**kwargs: Additional arguments to `json.dump`.
"""
with open(filename, 'w', encoding='utf-8') as f:
json.dump(obj, f, **kwargs)
def save_lines(lines, filename):
"""
Save an array of lines to a file.
Args:
lines: An array of strings that will be saved as individual lines.
filename: Path to the output file.
"""
with open(filename, 'w', encoding='utf-8') as f:
f.write('\n'.join(lines))
|
YuriyGuts/pygoose | pygoose/kg/io.py | save_lines | python | def save_lines(lines, filename):
with open(filename, 'w', encoding='utf-8') as f:
f.write('\n'.join(lines)) | Save an array of lines to a file.
Args:
lines: An array of strings that will be saved as individual lines.
filename: Path to the output file. | train | https://github.com/YuriyGuts/pygoose/blob/4d9b8827c6d6c4b79949d1cd653393498c0bb3c2/pygoose/kg/io.py#L79-L89 | null | import json
import pickle
def load(filename):
"""
Deserialize a pickled object from disk.
Args:
filename: Path to the input pickle file.
Returns:
The deserialized object.
"""
with open(filename, 'rb') as f:
return pickle.load(f)
def save(obj, filename, protocol=4):
"""
Serialize an object to disk using pickle protocol.
Args:
obj: The object to serialize.
filename: Path to the output file.
protocol: Version of the pickle protocol.
"""
with open(filename, 'wb') as f:
pickle.dump(obj, f, protocol=protocol)
def load_json(filename, **kwargs):
"""
Load a JSON object from the specified file.
Args:
filename: Path to the input JSON file.
**kwargs: Additional arguments to `json.load`.
Returns:
The object deserialized from JSON.
"""
with open(filename, 'r', encoding='utf-8') as f:
return json.load(f, **kwargs)
def save_json(obj, filename, **kwargs):
"""
Save an object as a JSON file.
Args:
obj: The object to save. Must be JSON-serializable.
filename: Path to the output file.
**kwargs: Additional arguments to `json.dump`.
"""
with open(filename, 'w', encoding='utf-8') as f:
json.dump(obj, f, **kwargs)
def load_lines(filename):
"""
Load a text file as an array of lines.
Args:
filename: Path to the input file.
Returns:
An array of strings, each representing an individual line.
"""
with open(filename, 'r', encoding='utf-8') as f:
return [line.rstrip('\n') for line in f.readlines()]
|
quikmile/trellio | trellio/services.py | publish | python | def publish(func):
@wraps(func)
def wrapper(self, *args, **kwargs): # outgoing
payload = func(self, *args, **kwargs)
payload.pop('self', None)
self._publish(func.__name__, payload)
return None
wrapper.is_publish = True
return wrapper | publish the return value of this function as a message from this endpoint | train | https://github.com/quikmile/trellio/blob/e8b050077562acf32805fcbb9c0c162248a23c62/trellio/services.py#L26-L40 | null | import asyncio
import json
import logging
import socket
import time
from asyncio import iscoroutine, coroutine, wait_for, TimeoutError, Future, get_event_loop, async
from functools import wraps, partial
import setproctitle
from again.utils import unique_hex
from aiohttp.web import Response
from retrial.retrial.retry import retry
from trellio.packet import ControlPacket
from .exceptions import RequestException, ClientException, TrellioServiceException
from .packet import MessagePacket
from .utils.helpers import Singleton # we need non singleton subclasses
from .utils.helpers import default_preflight_response
from .utils.ordered_class_member import OrderedClassMembers
from .utils.stats import Aggregator, Stats
from .views import HTTPView
API_TIMEOUT = 60 * 10
def subscribe(func):
"""
use to listen for publications from a specific endpoint of a service,
this method receives a publication from a remote service
"""
wrapper = _get_subscribe_decorator(func)
wrapper.is_subscribe = True
return wrapper
def xsubscribe(func=None, strategy='DESIGNATION'):
"""
Used to listen for publications from a specific endpoint of a service. If multiple instances
subscribe to an endpoint, only one of them receives the event. And the publish event is retried till
an acknowledgment is received from the other end.
:param func: the function to decorate with. The name of the function is the event subscribers will subscribe to.
:param strategy: The strategy of delivery. Can be 'RANDOM' or 'LEADER'. If 'RANDOM', then the event will be randomly
passed to any one of the interested parties. If 'LEADER' then it is passed to the first instance alive
which registered for that endpoint.
"""
if func is None:
return partial(xsubscribe, strategy=strategy)
else:
wrapper = _get_subscribe_decorator(func)
wrapper.is_xsubscribe = True
wrapper.strategy = strategy
return wrapper
def _get_subscribe_decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
coroutine_func = func
if not iscoroutine(func):
coroutine_func = coroutine(func)
return (async(coroutine_func(*args, **kwargs)))
return wrapper
def request(func=None, timeout=600):
"""
use to request an api call from a specific endpoint
"""
if func is None:
return partial(request, timeout=timeout)
@wraps(func)
def wrapper(self, *args, **kwargs):
params = func(self, *args, **kwargs)
self = params.pop('self', None)
entity = params.pop('entity', None)
app_name = params.pop('app_name', None)
request_id = unique_hex()
params['request_id'] = request_id
future = self._send_request(app_name, endpoint=func.__name__, entity=entity, params=params, timeout=timeout)
return future
wrapper.is_request = True
return wrapper
def api(func=None, timeout=API_TIMEOUT): # incoming
"""
provide a request/response api
receives any requests here and return value is the response
all functions must have the following signature
- request_id
- entity (partition/routing key)
followed by kwargs
"""
if func is None:
return partial(api, timeout=timeout)
else:
wrapper = _get_api_decorator(func=func, timeout=timeout)
return wrapper
def apideprecated(func=None, replacement_api=None):
if func is None:
return partial(apideprecated, replacement_api=replacement_api)
else:
wrapper = _get_api_decorator(func=func, old_api=func.__name__, replacement_api=replacement_api)
return wrapper
def _get_api_decorator(func=None, old_api=None, replacement_api=None, timeout=API_TIMEOUT):
@coroutine
@wraps(func)
def wrapper(*args, **kwargs):
_logger = logging.getLogger(__name__)
start_time = int(time.time() * 1000)
self = args[0]
rid = kwargs.pop('request_id')
entity = kwargs.pop('entity')
from_id = kwargs.pop('from_id')
wrapped_func = func
result = None
error = None
failed = False
status = 'successful'
success = True
if not iscoroutine(func):
wrapped_func = coroutine(func)
Stats.tcp_stats['total_requests'] += 1
try:
result = yield from wait_for(wrapped_func(self, **kwargs), timeout)
except TimeoutError as e:
Stats.tcp_stats['timedout'] += 1
error = str(e)
status = 'timeout'
success = False
failed = True
_logger.exception("TCP request had a timeout for method %s", func.__name__)
except TrellioServiceException as e:
Stats.tcp_stats['total_responses'] += 1
error = str(e)
status = 'handled_error'
_logger.exception('Handled exception %s for method %s ', e.__class__.__name__, func.__name__)
except Exception as e:
Stats.tcp_stats['total_errors'] += 1
error = str(e)
status = 'unhandled_error'
success = False
failed = True
_logger.exception('Unhandled exception %s for method %s ', e.__class__.__name__, func.__name__)
else:
Stats.tcp_stats['total_responses'] += 1
end_time = int(time.time() * 1000)
hostname = socket.gethostname()
service_name = '_'.join(setproctitle.getproctitle().split('_')[1:-1])
logd = {
'endpoint': func.__name__,
'time_taken': end_time - start_time,
'hostname': hostname, 'service_name': service_name
}
_logger.debug('Time taken for %s is %d milliseconds', func.__name__, end_time - start_time)
# call to update aggregator, designed to replace the stats module.
Aggregator.update_stats(endpoint=func.__name__, status=status, success=success,
server_type='tcp', time_taken=end_time - start_time)
if not old_api:
return self._make_response_packet(request_id=rid, from_id=from_id, entity=entity, result=result,
error=error, failed=failed)
else:
return self._make_response_packet(request_id=rid, from_id=from_id, entity=entity, result=result,
error=error, failed=failed, old_api=old_api,
replacement_api=replacement_api)
wrapper.is_api = True
return wrapper
def make_request(func, self, args, kwargs, method):
params = func(self, *args, **kwargs)
entity = params.pop('entity', None)
app_name = params.pop('app_name', None)
self = params.pop('self')
response = yield from self._send_http_request(app_name, method, entity, params)
return response
def _enable_http_middleware(func): # pre and post http, processing
@wraps(func)
async def f(self, *args, **kwargs):
if hasattr(self, 'middlewares'):
for i in self.middlewares:
if hasattr(i, 'pre_request'):
pre_request = getattr(i, 'pre_request')
if callable(pre_request):
try:
res = await pre_request(self, *args, **kwargs) # passing service as first argument
if res:
return res
except Exception as e:
return Response(status=400, content_type='application/json',
body=json.dumps(
{'error': str(e), 'sector': getattr(i, 'middleware_info')}).encode())
_func = coroutine(func) # func is a generator object
result = await _func(self, *args, **kwargs)
if hasattr(self, 'middlewares'):
for i in self.middlewares:
if hasattr(i, 'post_request'):
post_request = getattr(i, 'post_request')
if callable(post_request):
try:
res = await post_request(self, result, *args, **kwargs)
if res:
return res
except Exception as e:
return Response(status=400, content_type='application/json',
body=json.dumps(
{'error': str(e), 'sector': getattr(i, 'middleware_info')}).encode())
return result
return f
def get_decorated_fun(method, path, required_params, timeout):
def decorator(func):
@wraps(func)
@_enable_http_middleware
def f(self, *args, **kwargs):
if isinstance(self, HTTPServiceClient):
return (yield from make_request(func, self, args, kwargs, method))
elif isinstance(self, HTTPService) or isinstance(self, HTTPView):
Stats.http_stats['total_requests'] += 1
if required_params is not None:
req = args[0]
query_params = req.GET
params = required_params
if not isinstance(required_params, list):
params = [required_params]
missing_params = list(filter(lambda x: x not in query_params, params))
if len(missing_params) > 0:
res_d = {'error': 'Required params {} not found'.format(','.join(missing_params))}
Stats.http_stats['total_responses'] += 1
Aggregator.update_stats(endpoint=func.__name__, status=400, success=False,
server_type='http', time_taken=0)
return Response(status=400, content_type='application/json', body=json.dumps(res_d).encode())
t1 = time.time()
wrapped_func = func
success = True
_logger = logging.getLogger()
if not iscoroutine(func):
wrapped_func = coroutine(func)
try:
result = yield from wait_for(wrapped_func(self, *args, **kwargs), timeout)
except TimeoutError as e:
Stats.http_stats['timedout'] += 1
status = 'timeout'
success = False
_logger.exception("HTTP request had a timeout for method %s", func.__name__)
return Response(status=408, body='Request Timeout'.encode())
except TrellioServiceException as e:
Stats.http_stats['total_responses'] += 1
status = 'handled_exception'
_logger.exception('Handled exception %s for method %s ', e.__class__.__name__, func.__name__)
raise e
except Exception as e:
Stats.http_stats['total_errors'] += 1
status = 'unhandled_exception'
success = False
_logger.exception('Unhandled exception %s for method %s ', e.__class__.__name__, func.__name__)
raise e
else:
t2 = time.time()
hostname = socket.gethostname()
service_name = '_'.join(setproctitle.getproctitle().split('_')[1:-1])
status = result.status
logd = {
'status': result.status,
'time_taken': int((t2 - t1) * 1000),
'type': 'http',
'hostname': hostname, 'service_name': service_name
}
logging.getLogger('stats').debug(logd)
Stats.http_stats['total_responses'] += 1
return result
finally:
t2 = time.time()
Aggregator.update_stats(endpoint=func.__name__, status=status, success=success,
server_type='http', time_taken=int((t2 - t1) * 1000))
f.is_http_method = True
f.method = method
f.paths = path
if not isinstance(path, list):
f.paths = [path]
return f
return decorator
def get(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('get', path, required_params, timeout)
def head(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('head', path, required_params, timeout)
def options(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('options', path, required_params, timeout)
def patch(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('patch', path, required_params, timeout)
def post(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('post', path, required_params, timeout)
def put(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('put', path, required_params, timeout)
def trace(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('put', path, required_params, timeout)
def delete(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('delete', path, required_params, timeout)
class _Service:
_PUB_PKT_STR = 'publish'
_REQ_PKT_STR = 'request'
_RES_PKT_STR = 'response'
def __init__(self, service_name, service_version):
self._service_name = service_name.lower()
self._service_version = str(service_version)
self._tcp_bus = None
self._pubsub_bus = None
self._http_bus = None
@property
def name(self):
return self._service_name
@property
def version(self):
return self._service_version
@property
def properties(self):
return self.name, self.version
@staticmethod
def time_future(future: Future, timeout: int):
def timer_callback(f):
if not f.done() and not f.cancelled():
f.set_exception(TimeoutError())
get_event_loop().call_later(timeout, timer_callback, future)
class TCPServiceClient(Singleton, _Service):
def __init__(self, service_name, service_version, ssl_context=None):
if not self.has_inited(): # to maintain singleton behaviour
super(TCPServiceClient, self).__init__(service_name, service_version)
self._pending_requests = {}
self.tcp_bus = None
self._ssl_context = ssl_context
self.init_done()
@property
def ssl_context(self):
return self._ssl_context
def _send_request(self, app_name, endpoint, entity, params, timeout):
packet = MessagePacket.request(self.name, self.version, app_name, _Service._REQ_PKT_STR, endpoint, params,
entity)
future = Future()
request_id = params['request_id']
self._pending_requests[request_id] = future
try:
self.tcp_bus.send(packet)
except ClientException:
if not future.done() and not future.cancelled():
error = 'Client not found'
exception = ClientException(error)
exception.error = error
future.set_exception(exception)
_Service.time_future(future, timeout)
return future
def receive(self, packet: dict, protocol, transport):
if packet['type'] == 'ping':
pass
else:
self._process_response(packet)
def process_packet(self, packet):
if packet['type'] == _Service._RES_PKT_STR:
self._process_response(packet)
elif packet['type'] == _Service._PUB_PKT_STR:
self._process_publication(packet)
else:
print('Invalid packet', packet)
def _process_response(self, packet):
payload = packet['payload']
request_id = payload['request_id']
has_result = 'result' in payload
has_error = 'error' in payload
if 'old_api' in payload:
warning = 'Deprecated API: ' + payload['old_api']
if 'replacement_api' in payload:
warning += ', New API: ' + payload['replacement_api']
logging.getLogger().warning(warning)
future = self._pending_requests.pop(request_id)
if has_result:
if not future.done() and not future.cancelled():
future.set_result(payload['result'])
elif has_error:
if payload.get('failed', False):
if not future.done() and not future.cancelled():
future.set_exception(Exception(payload['error']))
else:
exception = RequestException()
exception.error = payload['error']
if not future.done() and not future.cancelled():
future.set_exception(exception)
else:
print('Invalid response to request:', packet)
def _process_publication(self, packet):
endpoint = packet['endpoint']
func = getattr(self, endpoint)
func(**packet['payload'])
def _handle_connection_lost(self):
vendor = self.tcp_bus._registry_client._get_full_service_name(self.name, self.version)
for host, port, node_id, service_type in self.tcp_bus._registry_client._available_services[vendor]:
packet = ControlPacket.deregister(self.name, self.version, node_id)
self.tcp_bus._registry_client._handle_deregistration(packet)
class _ServiceHost(_Service):
def __init__(self, service_name, service_version, host_ip, host_port):
super(_ServiceHost, self).__init__(service_name, service_version)
self._node_id = unique_hex()
self._ip = host_ip
self._port = host_port
self._clients = []
def is_for_me(self, service, version):
return service == self.name and version == self.version
@property
def node_id(self):
return self._node_id
@property
def tcp_bus(self):
return self._tcp_bus
@tcp_bus.setter
def tcp_bus(self, bus):
for client in self._clients:
if isinstance(client, TCPServiceClient):
client.tcp_bus = bus
self._tcp_bus = bus
@property
def http_bus(self):
return self._http_bus
@http_bus.setter
def http_bus(self, bus):
for client in self._clients:
if isinstance(client, HTTPServiceClient):
client._http_bus = self._http_bus
self._http_bus = bus
# @property
# def pubsub_bus(self):
# return self._pubsub_bus
#
# @pubsub_bus.setter
# def pubsub_bus(self, bus):
# self._pubsub_bus = bus
@property
def clients(self):
return self._clients
@clients.setter
def clients(self, clients):
self._clients = clients
@property
def socket_address(self):
return self._ip, self._port
@property
def host(self):
return self._ip
@property
def port(self):
return self._port
def initiate(self):
self.tcp_bus.register()
# yield from self.pubsub_bus.create_pubsub_handler()
# async(self.pubsub_bus.register_for_subscription(self.host, self.port, self.node_id, self.clients))
class TCPService(_ServiceHost):
def __init__(self, service_name, service_version, host_ip=None, host_port=None, ssl_context=None):
super(TCPService, self).__init__(service_name, service_version, host_ip, host_port)
self._ssl_context = ssl_context
@property
def ssl_context(self):
return self._ssl_context
# def _publish(self, endpoint, payload):
# self._pubsub_bus.publish(self.name, self.version, endpoint, payload)
#
# def _xpublish(self, endpoint, payload, strategy):
# self._pubsub_bus.xpublish(self.name, self.version, endpoint, payload, strategy)
@staticmethod
def _make_response_packet(request_id: str, from_id: str, entity: str, result: object, error: object,
failed: bool, old_api=None, replacement_api=None):
if failed:
payload = {'request_id': request_id, 'error': error, 'failed': failed}
else:
payload = {'request_id': request_id, 'result': result}
if old_api:
payload['old_api'] = old_api
if replacement_api:
payload['replacement_api'] = replacement_api
packet = {'pid': unique_hex(),
'to': from_id,
'entity': entity,
'type': _Service._RES_PKT_STR,
'payload': payload}
return packet
class HTTPService(_ServiceHost, metaclass=OrderedClassMembers):
def __init__(self, service_name, service_version, host_ip=None, host_port=None, ssl_context=None,
allow_cross_domain=True,
preflight_response=default_preflight_response):
super(HTTPService, self).__init__(service_name, service_version, host_ip, host_port)
self._ssl_context = ssl_context
self._allow_cross_domain = allow_cross_domain
self._preflight_response = preflight_response
@property
def ssl_context(self):
return self._ssl_context
@property
def cross_domain_allowed(self):
return self._allow_cross_domain
@property
def preflight_response(self):
return self._preflight_response
@get('/ping')
def pong(self, _):
return Response()
@get('/_stats')
def stats(self, _):
res_d = Aggregator.dump_stats()
return Response(status=200, content_type='application/json', body=json.dumps(res_d).encode())
class HTTPServiceClient(Singleton, _Service):
def __init__(self, service_name, service_version):
if not self.has_inited():
super(HTTPServiceClient, self).__init__(service_name, service_version)
self.init_done()
def _send_http_request(self, app_name, method, entity, params):
response = yield from self._http_bus.send_http_request(app_name, self.name, self.version, method, entity,
params)
return response
|
quikmile/trellio | trellio/services.py | xsubscribe | python | def xsubscribe(func=None, strategy='DESIGNATION'):
if func is None:
return partial(xsubscribe, strategy=strategy)
else:
wrapper = _get_subscribe_decorator(func)
wrapper.is_xsubscribe = True
wrapper.strategy = strategy
return wrapper | Used to listen for publications from a specific endpoint of a service. If multiple instances
subscribe to an endpoint, only one of them receives the event. And the publish event is retried till
an acknowledgment is received from the other end.
:param func: the function to decorate with. The name of the function is the event subscribers will subscribe to.
:param strategy: The strategy of delivery. Can be 'RANDOM' or 'LEADER'. If 'RANDOM', then the event will be randomly
passed to any one of the interested parties. If 'LEADER' then it is passed to the first instance alive
which registered for that endpoint. | train | https://github.com/quikmile/trellio/blob/e8b050077562acf32805fcbb9c0c162248a23c62/trellio/services.py#L53-L69 | [
"def _get_subscribe_decorator(func):\n @wraps(func)\n def wrapper(*args, **kwargs):\n coroutine_func = func\n if not iscoroutine(func):\n coroutine_func = coroutine(func)\n return (async(coroutine_func(*args, **kwargs)))\n\n return wrapper\n"
] | import asyncio
import json
import logging
import socket
import time
from asyncio import iscoroutine, coroutine, wait_for, TimeoutError, Future, get_event_loop, async
from functools import wraps, partial
import setproctitle
from again.utils import unique_hex
from aiohttp.web import Response
from retrial.retrial.retry import retry
from trellio.packet import ControlPacket
from .exceptions import RequestException, ClientException, TrellioServiceException
from .packet import MessagePacket
from .utils.helpers import Singleton # we need non singleton subclasses
from .utils.helpers import default_preflight_response
from .utils.ordered_class_member import OrderedClassMembers
from .utils.stats import Aggregator, Stats
from .views import HTTPView
API_TIMEOUT = 60 * 10
def publish(func):
"""
publish the return value of this function as a message from this endpoint
"""
@wraps(func)
def wrapper(self, *args, **kwargs): # outgoing
payload = func(self, *args, **kwargs)
payload.pop('self', None)
self._publish(func.__name__, payload)
return None
wrapper.is_publish = True
return wrapper
def subscribe(func):
"""
use to listen for publications from a specific endpoint of a service,
this method receives a publication from a remote service
"""
wrapper = _get_subscribe_decorator(func)
wrapper.is_subscribe = True
return wrapper
def _get_subscribe_decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
coroutine_func = func
if not iscoroutine(func):
coroutine_func = coroutine(func)
return (async(coroutine_func(*args, **kwargs)))
return wrapper
def request(func=None, timeout=600):
"""
use to request an api call from a specific endpoint
"""
if func is None:
return partial(request, timeout=timeout)
@wraps(func)
def wrapper(self, *args, **kwargs):
params = func(self, *args, **kwargs)
self = params.pop('self', None)
entity = params.pop('entity', None)
app_name = params.pop('app_name', None)
request_id = unique_hex()
params['request_id'] = request_id
future = self._send_request(app_name, endpoint=func.__name__, entity=entity, params=params, timeout=timeout)
return future
wrapper.is_request = True
return wrapper
def api(func=None, timeout=API_TIMEOUT): # incoming
"""
provide a request/response api
receives any requests here and return value is the response
all functions must have the following signature
- request_id
- entity (partition/routing key)
followed by kwargs
"""
if func is None:
return partial(api, timeout=timeout)
else:
wrapper = _get_api_decorator(func=func, timeout=timeout)
return wrapper
def apideprecated(func=None, replacement_api=None):
if func is None:
return partial(apideprecated, replacement_api=replacement_api)
else:
wrapper = _get_api_decorator(func=func, old_api=func.__name__, replacement_api=replacement_api)
return wrapper
def _get_api_decorator(func=None, old_api=None, replacement_api=None, timeout=API_TIMEOUT):
@coroutine
@wraps(func)
def wrapper(*args, **kwargs):
_logger = logging.getLogger(__name__)
start_time = int(time.time() * 1000)
self = args[0]
rid = kwargs.pop('request_id')
entity = kwargs.pop('entity')
from_id = kwargs.pop('from_id')
wrapped_func = func
result = None
error = None
failed = False
status = 'successful'
success = True
if not iscoroutine(func):
wrapped_func = coroutine(func)
Stats.tcp_stats['total_requests'] += 1
try:
result = yield from wait_for(wrapped_func(self, **kwargs), timeout)
except TimeoutError as e:
Stats.tcp_stats['timedout'] += 1
error = str(e)
status = 'timeout'
success = False
failed = True
_logger.exception("TCP request had a timeout for method %s", func.__name__)
except TrellioServiceException as e:
Stats.tcp_stats['total_responses'] += 1
error = str(e)
status = 'handled_error'
_logger.exception('Handled exception %s for method %s ', e.__class__.__name__, func.__name__)
except Exception as e:
Stats.tcp_stats['total_errors'] += 1
error = str(e)
status = 'unhandled_error'
success = False
failed = True
_logger.exception('Unhandled exception %s for method %s ', e.__class__.__name__, func.__name__)
else:
Stats.tcp_stats['total_responses'] += 1
end_time = int(time.time() * 1000)
hostname = socket.gethostname()
service_name = '_'.join(setproctitle.getproctitle().split('_')[1:-1])
logd = {
'endpoint': func.__name__,
'time_taken': end_time - start_time,
'hostname': hostname, 'service_name': service_name
}
_logger.debug('Time taken for %s is %d milliseconds', func.__name__, end_time - start_time)
# call to update aggregator, designed to replace the stats module.
Aggregator.update_stats(endpoint=func.__name__, status=status, success=success,
server_type='tcp', time_taken=end_time - start_time)
if not old_api:
return self._make_response_packet(request_id=rid, from_id=from_id, entity=entity, result=result,
error=error, failed=failed)
else:
return self._make_response_packet(request_id=rid, from_id=from_id, entity=entity, result=result,
error=error, failed=failed, old_api=old_api,
replacement_api=replacement_api)
wrapper.is_api = True
return wrapper
def make_request(func, self, args, kwargs, method):
params = func(self, *args, **kwargs)
entity = params.pop('entity', None)
app_name = params.pop('app_name', None)
self = params.pop('self')
response = yield from self._send_http_request(app_name, method, entity, params)
return response
def _enable_http_middleware(func): # pre and post http, processing
@wraps(func)
async def f(self, *args, **kwargs):
if hasattr(self, 'middlewares'):
for i in self.middlewares:
if hasattr(i, 'pre_request'):
pre_request = getattr(i, 'pre_request')
if callable(pre_request):
try:
res = await pre_request(self, *args, **kwargs) # passing service as first argument
if res:
return res
except Exception as e:
return Response(status=400, content_type='application/json',
body=json.dumps(
{'error': str(e), 'sector': getattr(i, 'middleware_info')}).encode())
_func = coroutine(func) # func is a generator object
result = await _func(self, *args, **kwargs)
if hasattr(self, 'middlewares'):
for i in self.middlewares:
if hasattr(i, 'post_request'):
post_request = getattr(i, 'post_request')
if callable(post_request):
try:
res = await post_request(self, result, *args, **kwargs)
if res:
return res
except Exception as e:
return Response(status=400, content_type='application/json',
body=json.dumps(
{'error': str(e), 'sector': getattr(i, 'middleware_info')}).encode())
return result
return f
def get_decorated_fun(method, path, required_params, timeout):
def decorator(func):
@wraps(func)
@_enable_http_middleware
def f(self, *args, **kwargs):
if isinstance(self, HTTPServiceClient):
return (yield from make_request(func, self, args, kwargs, method))
elif isinstance(self, HTTPService) or isinstance(self, HTTPView):
Stats.http_stats['total_requests'] += 1
if required_params is not None:
req = args[0]
query_params = req.GET
params = required_params
if not isinstance(required_params, list):
params = [required_params]
missing_params = list(filter(lambda x: x not in query_params, params))
if len(missing_params) > 0:
res_d = {'error': 'Required params {} not found'.format(','.join(missing_params))}
Stats.http_stats['total_responses'] += 1
Aggregator.update_stats(endpoint=func.__name__, status=400, success=False,
server_type='http', time_taken=0)
return Response(status=400, content_type='application/json', body=json.dumps(res_d).encode())
t1 = time.time()
wrapped_func = func
success = True
_logger = logging.getLogger()
if not iscoroutine(func):
wrapped_func = coroutine(func)
try:
result = yield from wait_for(wrapped_func(self, *args, **kwargs), timeout)
except TimeoutError as e:
Stats.http_stats['timedout'] += 1
status = 'timeout'
success = False
_logger.exception("HTTP request had a timeout for method %s", func.__name__)
return Response(status=408, body='Request Timeout'.encode())
except TrellioServiceException as e:
Stats.http_stats['total_responses'] += 1
status = 'handled_exception'
_logger.exception('Handled exception %s for method %s ', e.__class__.__name__, func.__name__)
raise e
except Exception as e:
Stats.http_stats['total_errors'] += 1
status = 'unhandled_exception'
success = False
_logger.exception('Unhandled exception %s for method %s ', e.__class__.__name__, func.__name__)
raise e
else:
t2 = time.time()
hostname = socket.gethostname()
service_name = '_'.join(setproctitle.getproctitle().split('_')[1:-1])
status = result.status
logd = {
'status': result.status,
'time_taken': int((t2 - t1) * 1000),
'type': 'http',
'hostname': hostname, 'service_name': service_name
}
logging.getLogger('stats').debug(logd)
Stats.http_stats['total_responses'] += 1
return result
finally:
t2 = time.time()
Aggregator.update_stats(endpoint=func.__name__, status=status, success=success,
server_type='http', time_taken=int((t2 - t1) * 1000))
f.is_http_method = True
f.method = method
f.paths = path
if not isinstance(path, list):
f.paths = [path]
return f
return decorator
def get(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('get', path, required_params, timeout)
def head(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('head', path, required_params, timeout)
def options(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('options', path, required_params, timeout)
def patch(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('patch', path, required_params, timeout)
def post(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('post', path, required_params, timeout)
def put(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('put', path, required_params, timeout)
def trace(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('put', path, required_params, timeout)
def delete(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('delete', path, required_params, timeout)
class _Service:
_PUB_PKT_STR = 'publish'
_REQ_PKT_STR = 'request'
_RES_PKT_STR = 'response'
def __init__(self, service_name, service_version):
self._service_name = service_name.lower()
self._service_version = str(service_version)
self._tcp_bus = None
self._pubsub_bus = None
self._http_bus = None
@property
def name(self):
return self._service_name
@property
def version(self):
return self._service_version
@property
def properties(self):
return self.name, self.version
@staticmethod
def time_future(future: Future, timeout: int):
def timer_callback(f):
if not f.done() and not f.cancelled():
f.set_exception(TimeoutError())
get_event_loop().call_later(timeout, timer_callback, future)
class TCPServiceClient(Singleton, _Service):
def __init__(self, service_name, service_version, ssl_context=None):
if not self.has_inited(): # to maintain singleton behaviour
super(TCPServiceClient, self).__init__(service_name, service_version)
self._pending_requests = {}
self.tcp_bus = None
self._ssl_context = ssl_context
self.init_done()
@property
def ssl_context(self):
return self._ssl_context
def _send_request(self, app_name, endpoint, entity, params, timeout):
packet = MessagePacket.request(self.name, self.version, app_name, _Service._REQ_PKT_STR, endpoint, params,
entity)
future = Future()
request_id = params['request_id']
self._pending_requests[request_id] = future
try:
self.tcp_bus.send(packet)
except ClientException:
if not future.done() and not future.cancelled():
error = 'Client not found'
exception = ClientException(error)
exception.error = error
future.set_exception(exception)
_Service.time_future(future, timeout)
return future
def receive(self, packet: dict, protocol, transport):
if packet['type'] == 'ping':
pass
else:
self._process_response(packet)
def process_packet(self, packet):
if packet['type'] == _Service._RES_PKT_STR:
self._process_response(packet)
elif packet['type'] == _Service._PUB_PKT_STR:
self._process_publication(packet)
else:
print('Invalid packet', packet)
def _process_response(self, packet):
payload = packet['payload']
request_id = payload['request_id']
has_result = 'result' in payload
has_error = 'error' in payload
if 'old_api' in payload:
warning = 'Deprecated API: ' + payload['old_api']
if 'replacement_api' in payload:
warning += ', New API: ' + payload['replacement_api']
logging.getLogger().warning(warning)
future = self._pending_requests.pop(request_id)
if has_result:
if not future.done() and not future.cancelled():
future.set_result(payload['result'])
elif has_error:
if payload.get('failed', False):
if not future.done() and not future.cancelled():
future.set_exception(Exception(payload['error']))
else:
exception = RequestException()
exception.error = payload['error']
if not future.done() and not future.cancelled():
future.set_exception(exception)
else:
print('Invalid response to request:', packet)
def _process_publication(self, packet):
endpoint = packet['endpoint']
func = getattr(self, endpoint)
func(**packet['payload'])
def _handle_connection_lost(self):
vendor = self.tcp_bus._registry_client._get_full_service_name(self.name, self.version)
for host, port, node_id, service_type in self.tcp_bus._registry_client._available_services[vendor]:
packet = ControlPacket.deregister(self.name, self.version, node_id)
self.tcp_bus._registry_client._handle_deregistration(packet)
class _ServiceHost(_Service):
def __init__(self, service_name, service_version, host_ip, host_port):
super(_ServiceHost, self).__init__(service_name, service_version)
self._node_id = unique_hex()
self._ip = host_ip
self._port = host_port
self._clients = []
def is_for_me(self, service, version):
return service == self.name and version == self.version
@property
def node_id(self):
return self._node_id
@property
def tcp_bus(self):
return self._tcp_bus
@tcp_bus.setter
def tcp_bus(self, bus):
for client in self._clients:
if isinstance(client, TCPServiceClient):
client.tcp_bus = bus
self._tcp_bus = bus
@property
def http_bus(self):
return self._http_bus
@http_bus.setter
def http_bus(self, bus):
for client in self._clients:
if isinstance(client, HTTPServiceClient):
client._http_bus = self._http_bus
self._http_bus = bus
# @property
# def pubsub_bus(self):
# return self._pubsub_bus
#
# @pubsub_bus.setter
# def pubsub_bus(self, bus):
# self._pubsub_bus = bus
@property
def clients(self):
return self._clients
@clients.setter
def clients(self, clients):
self._clients = clients
@property
def socket_address(self):
return self._ip, self._port
@property
def host(self):
return self._ip
@property
def port(self):
return self._port
def initiate(self):
self.tcp_bus.register()
# yield from self.pubsub_bus.create_pubsub_handler()
# async(self.pubsub_bus.register_for_subscription(self.host, self.port, self.node_id, self.clients))
class TCPService(_ServiceHost):
def __init__(self, service_name, service_version, host_ip=None, host_port=None, ssl_context=None):
super(TCPService, self).__init__(service_name, service_version, host_ip, host_port)
self._ssl_context = ssl_context
@property
def ssl_context(self):
return self._ssl_context
# def _publish(self, endpoint, payload):
# self._pubsub_bus.publish(self.name, self.version, endpoint, payload)
#
# def _xpublish(self, endpoint, payload, strategy):
# self._pubsub_bus.xpublish(self.name, self.version, endpoint, payload, strategy)
@staticmethod
def _make_response_packet(request_id: str, from_id: str, entity: str, result: object, error: object,
failed: bool, old_api=None, replacement_api=None):
if failed:
payload = {'request_id': request_id, 'error': error, 'failed': failed}
else:
payload = {'request_id': request_id, 'result': result}
if old_api:
payload['old_api'] = old_api
if replacement_api:
payload['replacement_api'] = replacement_api
packet = {'pid': unique_hex(),
'to': from_id,
'entity': entity,
'type': _Service._RES_PKT_STR,
'payload': payload}
return packet
class HTTPService(_ServiceHost, metaclass=OrderedClassMembers):
def __init__(self, service_name, service_version, host_ip=None, host_port=None, ssl_context=None,
allow_cross_domain=True,
preflight_response=default_preflight_response):
super(HTTPService, self).__init__(service_name, service_version, host_ip, host_port)
self._ssl_context = ssl_context
self._allow_cross_domain = allow_cross_domain
self._preflight_response = preflight_response
@property
def ssl_context(self):
return self._ssl_context
@property
def cross_domain_allowed(self):
return self._allow_cross_domain
@property
def preflight_response(self):
return self._preflight_response
@get('/ping')
def pong(self, _):
return Response()
@get('/_stats')
def stats(self, _):
res_d = Aggregator.dump_stats()
return Response(status=200, content_type='application/json', body=json.dumps(res_d).encode())
class HTTPServiceClient(Singleton, _Service):
def __init__(self, service_name, service_version):
if not self.has_inited():
super(HTTPServiceClient, self).__init__(service_name, service_version)
self.init_done()
def _send_http_request(self, app_name, method, entity, params):
response = yield from self._http_bus.send_http_request(app_name, self.name, self.version, method, entity,
params)
return response
|
quikmile/trellio | trellio/services.py | request | python | def request(func=None, timeout=600):
if func is None:
return partial(request, timeout=timeout)
@wraps(func)
def wrapper(self, *args, **kwargs):
params = func(self, *args, **kwargs)
self = params.pop('self', None)
entity = params.pop('entity', None)
app_name = params.pop('app_name', None)
request_id = unique_hex()
params['request_id'] = request_id
future = self._send_request(app_name, endpoint=func.__name__, entity=entity, params=params, timeout=timeout)
return future
wrapper.is_request = True
return wrapper | use to request an api call from a specific endpoint | train | https://github.com/quikmile/trellio/blob/e8b050077562acf32805fcbb9c0c162248a23c62/trellio/services.py#L83-L102 | null | import asyncio
import json
import logging
import socket
import time
from asyncio import iscoroutine, coroutine, wait_for, TimeoutError, Future, get_event_loop, async
from functools import wraps, partial
import setproctitle
from again.utils import unique_hex
from aiohttp.web import Response
from retrial.retrial.retry import retry
from trellio.packet import ControlPacket
from .exceptions import RequestException, ClientException, TrellioServiceException
from .packet import MessagePacket
from .utils.helpers import Singleton # we need non singleton subclasses
from .utils.helpers import default_preflight_response
from .utils.ordered_class_member import OrderedClassMembers
from .utils.stats import Aggregator, Stats
from .views import HTTPView
API_TIMEOUT = 60 * 10
def publish(func):
"""
publish the return value of this function as a message from this endpoint
"""
@wraps(func)
def wrapper(self, *args, **kwargs): # outgoing
payload = func(self, *args, **kwargs)
payload.pop('self', None)
self._publish(func.__name__, payload)
return None
wrapper.is_publish = True
return wrapper
def subscribe(func):
"""
use to listen for publications from a specific endpoint of a service,
this method receives a publication from a remote service
"""
wrapper = _get_subscribe_decorator(func)
wrapper.is_subscribe = True
return wrapper
def xsubscribe(func=None, strategy='DESIGNATION'):
"""
Used to listen for publications from a specific endpoint of a service. If multiple instances
subscribe to an endpoint, only one of them receives the event. And the publish event is retried till
an acknowledgment is received from the other end.
:param func: the function to decorate with. The name of the function is the event subscribers will subscribe to.
:param strategy: The strategy of delivery. Can be 'RANDOM' or 'LEADER'. If 'RANDOM', then the event will be randomly
passed to any one of the interested parties. If 'LEADER' then it is passed to the first instance alive
which registered for that endpoint.
"""
if func is None:
return partial(xsubscribe, strategy=strategy)
else:
wrapper = _get_subscribe_decorator(func)
wrapper.is_xsubscribe = True
wrapper.strategy = strategy
return wrapper
def _get_subscribe_decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
coroutine_func = func
if not iscoroutine(func):
coroutine_func = coroutine(func)
return (async(coroutine_func(*args, **kwargs)))
return wrapper
def api(func=None, timeout=API_TIMEOUT): # incoming
"""
provide a request/response api
receives any requests here and return value is the response
all functions must have the following signature
- request_id
- entity (partition/routing key)
followed by kwargs
"""
if func is None:
return partial(api, timeout=timeout)
else:
wrapper = _get_api_decorator(func=func, timeout=timeout)
return wrapper
def apideprecated(func=None, replacement_api=None):
if func is None:
return partial(apideprecated, replacement_api=replacement_api)
else:
wrapper = _get_api_decorator(func=func, old_api=func.__name__, replacement_api=replacement_api)
return wrapper
def _get_api_decorator(func=None, old_api=None, replacement_api=None, timeout=API_TIMEOUT):
@coroutine
@wraps(func)
def wrapper(*args, **kwargs):
_logger = logging.getLogger(__name__)
start_time = int(time.time() * 1000)
self = args[0]
rid = kwargs.pop('request_id')
entity = kwargs.pop('entity')
from_id = kwargs.pop('from_id')
wrapped_func = func
result = None
error = None
failed = False
status = 'successful'
success = True
if not iscoroutine(func):
wrapped_func = coroutine(func)
Stats.tcp_stats['total_requests'] += 1
try:
result = yield from wait_for(wrapped_func(self, **kwargs), timeout)
except TimeoutError as e:
Stats.tcp_stats['timedout'] += 1
error = str(e)
status = 'timeout'
success = False
failed = True
_logger.exception("TCP request had a timeout for method %s", func.__name__)
except TrellioServiceException as e:
Stats.tcp_stats['total_responses'] += 1
error = str(e)
status = 'handled_error'
_logger.exception('Handled exception %s for method %s ', e.__class__.__name__, func.__name__)
except Exception as e:
Stats.tcp_stats['total_errors'] += 1
error = str(e)
status = 'unhandled_error'
success = False
failed = True
_logger.exception('Unhandled exception %s for method %s ', e.__class__.__name__, func.__name__)
else:
Stats.tcp_stats['total_responses'] += 1
end_time = int(time.time() * 1000)
hostname = socket.gethostname()
service_name = '_'.join(setproctitle.getproctitle().split('_')[1:-1])
logd = {
'endpoint': func.__name__,
'time_taken': end_time - start_time,
'hostname': hostname, 'service_name': service_name
}
_logger.debug('Time taken for %s is %d milliseconds', func.__name__, end_time - start_time)
# call to update aggregator, designed to replace the stats module.
Aggregator.update_stats(endpoint=func.__name__, status=status, success=success,
server_type='tcp', time_taken=end_time - start_time)
if not old_api:
return self._make_response_packet(request_id=rid, from_id=from_id, entity=entity, result=result,
error=error, failed=failed)
else:
return self._make_response_packet(request_id=rid, from_id=from_id, entity=entity, result=result,
error=error, failed=failed, old_api=old_api,
replacement_api=replacement_api)
wrapper.is_api = True
return wrapper
def make_request(func, self, args, kwargs, method):
params = func(self, *args, **kwargs)
entity = params.pop('entity', None)
app_name = params.pop('app_name', None)
self = params.pop('self')
response = yield from self._send_http_request(app_name, method, entity, params)
return response
def _enable_http_middleware(func): # pre and post http, processing
@wraps(func)
async def f(self, *args, **kwargs):
if hasattr(self, 'middlewares'):
for i in self.middlewares:
if hasattr(i, 'pre_request'):
pre_request = getattr(i, 'pre_request')
if callable(pre_request):
try:
res = await pre_request(self, *args, **kwargs) # passing service as first argument
if res:
return res
except Exception as e:
return Response(status=400, content_type='application/json',
body=json.dumps(
{'error': str(e), 'sector': getattr(i, 'middleware_info')}).encode())
_func = coroutine(func) # func is a generator object
result = await _func(self, *args, **kwargs)
if hasattr(self, 'middlewares'):
for i in self.middlewares:
if hasattr(i, 'post_request'):
post_request = getattr(i, 'post_request')
if callable(post_request):
try:
res = await post_request(self, result, *args, **kwargs)
if res:
return res
except Exception as e:
return Response(status=400, content_type='application/json',
body=json.dumps(
{'error': str(e), 'sector': getattr(i, 'middleware_info')}).encode())
return result
return f
def get_decorated_fun(method, path, required_params, timeout):
def decorator(func):
@wraps(func)
@_enable_http_middleware
def f(self, *args, **kwargs):
if isinstance(self, HTTPServiceClient):
return (yield from make_request(func, self, args, kwargs, method))
elif isinstance(self, HTTPService) or isinstance(self, HTTPView):
Stats.http_stats['total_requests'] += 1
if required_params is not None:
req = args[0]
query_params = req.GET
params = required_params
if not isinstance(required_params, list):
params = [required_params]
missing_params = list(filter(lambda x: x not in query_params, params))
if len(missing_params) > 0:
res_d = {'error': 'Required params {} not found'.format(','.join(missing_params))}
Stats.http_stats['total_responses'] += 1
Aggregator.update_stats(endpoint=func.__name__, status=400, success=False,
server_type='http', time_taken=0)
return Response(status=400, content_type='application/json', body=json.dumps(res_d).encode())
t1 = time.time()
wrapped_func = func
success = True
_logger = logging.getLogger()
if not iscoroutine(func):
wrapped_func = coroutine(func)
try:
result = yield from wait_for(wrapped_func(self, *args, **kwargs), timeout)
except TimeoutError as e:
Stats.http_stats['timedout'] += 1
status = 'timeout'
success = False
_logger.exception("HTTP request had a timeout for method %s", func.__name__)
return Response(status=408, body='Request Timeout'.encode())
except TrellioServiceException as e:
Stats.http_stats['total_responses'] += 1
status = 'handled_exception'
_logger.exception('Handled exception %s for method %s ', e.__class__.__name__, func.__name__)
raise e
except Exception as e:
Stats.http_stats['total_errors'] += 1
status = 'unhandled_exception'
success = False
_logger.exception('Unhandled exception %s for method %s ', e.__class__.__name__, func.__name__)
raise e
else:
t2 = time.time()
hostname = socket.gethostname()
service_name = '_'.join(setproctitle.getproctitle().split('_')[1:-1])
status = result.status
logd = {
'status': result.status,
'time_taken': int((t2 - t1) * 1000),
'type': 'http',
'hostname': hostname, 'service_name': service_name
}
logging.getLogger('stats').debug(logd)
Stats.http_stats['total_responses'] += 1
return result
finally:
t2 = time.time()
Aggregator.update_stats(endpoint=func.__name__, status=status, success=success,
server_type='http', time_taken=int((t2 - t1) * 1000))
f.is_http_method = True
f.method = method
f.paths = path
if not isinstance(path, list):
f.paths = [path]
return f
return decorator
def get(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('get', path, required_params, timeout)
def head(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('head', path, required_params, timeout)
def options(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('options', path, required_params, timeout)
def patch(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('patch', path, required_params, timeout)
def post(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('post', path, required_params, timeout)
def put(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('put', path, required_params, timeout)
def trace(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('put', path, required_params, timeout)
def delete(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('delete', path, required_params, timeout)
class _Service:
_PUB_PKT_STR = 'publish'
_REQ_PKT_STR = 'request'
_RES_PKT_STR = 'response'
def __init__(self, service_name, service_version):
self._service_name = service_name.lower()
self._service_version = str(service_version)
self._tcp_bus = None
self._pubsub_bus = None
self._http_bus = None
@property
def name(self):
return self._service_name
@property
def version(self):
return self._service_version
@property
def properties(self):
return self.name, self.version
@staticmethod
def time_future(future: Future, timeout: int):
def timer_callback(f):
if not f.done() and not f.cancelled():
f.set_exception(TimeoutError())
get_event_loop().call_later(timeout, timer_callback, future)
class TCPServiceClient(Singleton, _Service):
def __init__(self, service_name, service_version, ssl_context=None):
if not self.has_inited(): # to maintain singleton behaviour
super(TCPServiceClient, self).__init__(service_name, service_version)
self._pending_requests = {}
self.tcp_bus = None
self._ssl_context = ssl_context
self.init_done()
@property
def ssl_context(self):
return self._ssl_context
def _send_request(self, app_name, endpoint, entity, params, timeout):
packet = MessagePacket.request(self.name, self.version, app_name, _Service._REQ_PKT_STR, endpoint, params,
entity)
future = Future()
request_id = params['request_id']
self._pending_requests[request_id] = future
try:
self.tcp_bus.send(packet)
except ClientException:
if not future.done() and not future.cancelled():
error = 'Client not found'
exception = ClientException(error)
exception.error = error
future.set_exception(exception)
_Service.time_future(future, timeout)
return future
def receive(self, packet: dict, protocol, transport):
if packet['type'] == 'ping':
pass
else:
self._process_response(packet)
def process_packet(self, packet):
if packet['type'] == _Service._RES_PKT_STR:
self._process_response(packet)
elif packet['type'] == _Service._PUB_PKT_STR:
self._process_publication(packet)
else:
print('Invalid packet', packet)
def _process_response(self, packet):
payload = packet['payload']
request_id = payload['request_id']
has_result = 'result' in payload
has_error = 'error' in payload
if 'old_api' in payload:
warning = 'Deprecated API: ' + payload['old_api']
if 'replacement_api' in payload:
warning += ', New API: ' + payload['replacement_api']
logging.getLogger().warning(warning)
future = self._pending_requests.pop(request_id)
if has_result:
if not future.done() and not future.cancelled():
future.set_result(payload['result'])
elif has_error:
if payload.get('failed', False):
if not future.done() and not future.cancelled():
future.set_exception(Exception(payload['error']))
else:
exception = RequestException()
exception.error = payload['error']
if not future.done() and not future.cancelled():
future.set_exception(exception)
else:
print('Invalid response to request:', packet)
def _process_publication(self, packet):
endpoint = packet['endpoint']
func = getattr(self, endpoint)
func(**packet['payload'])
def _handle_connection_lost(self):
vendor = self.tcp_bus._registry_client._get_full_service_name(self.name, self.version)
for host, port, node_id, service_type in self.tcp_bus._registry_client._available_services[vendor]:
packet = ControlPacket.deregister(self.name, self.version, node_id)
self.tcp_bus._registry_client._handle_deregistration(packet)
class _ServiceHost(_Service):
def __init__(self, service_name, service_version, host_ip, host_port):
super(_ServiceHost, self).__init__(service_name, service_version)
self._node_id = unique_hex()
self._ip = host_ip
self._port = host_port
self._clients = []
def is_for_me(self, service, version):
return service == self.name and version == self.version
@property
def node_id(self):
return self._node_id
@property
def tcp_bus(self):
return self._tcp_bus
@tcp_bus.setter
def tcp_bus(self, bus):
for client in self._clients:
if isinstance(client, TCPServiceClient):
client.tcp_bus = bus
self._tcp_bus = bus
@property
def http_bus(self):
return self._http_bus
@http_bus.setter
def http_bus(self, bus):
for client in self._clients:
if isinstance(client, HTTPServiceClient):
client._http_bus = self._http_bus
self._http_bus = bus
# @property
# def pubsub_bus(self):
# return self._pubsub_bus
#
# @pubsub_bus.setter
# def pubsub_bus(self, bus):
# self._pubsub_bus = bus
@property
def clients(self):
return self._clients
@clients.setter
def clients(self, clients):
self._clients = clients
@property
def socket_address(self):
return self._ip, self._port
@property
def host(self):
return self._ip
@property
def port(self):
return self._port
def initiate(self):
self.tcp_bus.register()
# yield from self.pubsub_bus.create_pubsub_handler()
# async(self.pubsub_bus.register_for_subscription(self.host, self.port, self.node_id, self.clients))
class TCPService(_ServiceHost):
def __init__(self, service_name, service_version, host_ip=None, host_port=None, ssl_context=None):
super(TCPService, self).__init__(service_name, service_version, host_ip, host_port)
self._ssl_context = ssl_context
@property
def ssl_context(self):
return self._ssl_context
# def _publish(self, endpoint, payload):
# self._pubsub_bus.publish(self.name, self.version, endpoint, payload)
#
# def _xpublish(self, endpoint, payload, strategy):
# self._pubsub_bus.xpublish(self.name, self.version, endpoint, payload, strategy)
@staticmethod
def _make_response_packet(request_id: str, from_id: str, entity: str, result: object, error: object,
failed: bool, old_api=None, replacement_api=None):
if failed:
payload = {'request_id': request_id, 'error': error, 'failed': failed}
else:
payload = {'request_id': request_id, 'result': result}
if old_api:
payload['old_api'] = old_api
if replacement_api:
payload['replacement_api'] = replacement_api
packet = {'pid': unique_hex(),
'to': from_id,
'entity': entity,
'type': _Service._RES_PKT_STR,
'payload': payload}
return packet
class HTTPService(_ServiceHost, metaclass=OrderedClassMembers):
def __init__(self, service_name, service_version, host_ip=None, host_port=None, ssl_context=None,
allow_cross_domain=True,
preflight_response=default_preflight_response):
super(HTTPService, self).__init__(service_name, service_version, host_ip, host_port)
self._ssl_context = ssl_context
self._allow_cross_domain = allow_cross_domain
self._preflight_response = preflight_response
@property
def ssl_context(self):
return self._ssl_context
@property
def cross_domain_allowed(self):
return self._allow_cross_domain
@property
def preflight_response(self):
return self._preflight_response
@get('/ping')
def pong(self, _):
return Response()
@get('/_stats')
def stats(self, _):
res_d = Aggregator.dump_stats()
return Response(status=200, content_type='application/json', body=json.dumps(res_d).encode())
class HTTPServiceClient(Singleton, _Service):
def __init__(self, service_name, service_version):
if not self.has_inited():
super(HTTPServiceClient, self).__init__(service_name, service_version)
self.init_done()
def _send_http_request(self, app_name, method, entity, params):
response = yield from self._http_bus.send_http_request(app_name, self.name, self.version, method, entity,
params)
return response
|
quikmile/trellio | trellio/services.py | api | python | def api(func=None, timeout=API_TIMEOUT): # incoming
if func is None:
return partial(api, timeout=timeout)
else:
wrapper = _get_api_decorator(func=func, timeout=timeout)
return wrapper | provide a request/response api
receives any requests here and return value is the response
all functions must have the following signature
- request_id
- entity (partition/routing key)
followed by kwargs | train | https://github.com/quikmile/trellio/blob/e8b050077562acf32805fcbb9c0c162248a23c62/trellio/services.py#L105-L118 | [
"def _get_api_decorator(func=None, old_api=None, replacement_api=None, timeout=API_TIMEOUT):\n @coroutine\n @wraps(func)\n def wrapper(*args, **kwargs):\n _logger = logging.getLogger(__name__)\n start_time = int(time.time() * 1000)\n self = args[0]\n rid = kwargs.pop('request_id')\n entity = kwargs.pop('entity')\n from_id = kwargs.pop('from_id')\n wrapped_func = func\n result = None\n error = None\n failed = False\n\n status = 'successful'\n success = True\n if not iscoroutine(func):\n wrapped_func = coroutine(func)\n\n Stats.tcp_stats['total_requests'] += 1\n\n try:\n result = yield from wait_for(wrapped_func(self, **kwargs), timeout)\n\n except TimeoutError as e:\n Stats.tcp_stats['timedout'] += 1\n error = str(e)\n status = 'timeout'\n success = False\n failed = True\n _logger.exception(\"TCP request had a timeout for method %s\", func.__name__)\n\n except TrellioServiceException as e:\n Stats.tcp_stats['total_responses'] += 1\n error = str(e)\n status = 'handled_error'\n _logger.exception('Handled exception %s for method %s ', e.__class__.__name__, func.__name__)\n\n except Exception as e:\n Stats.tcp_stats['total_errors'] += 1\n error = str(e)\n status = 'unhandled_error'\n success = False\n failed = True\n _logger.exception('Unhandled exception %s for method %s ', e.__class__.__name__, func.__name__)\n else:\n Stats.tcp_stats['total_responses'] += 1\n end_time = int(time.time() * 1000)\n\n hostname = socket.gethostname()\n service_name = '_'.join(setproctitle.getproctitle().split('_')[1:-1])\n\n logd = {\n 'endpoint': func.__name__,\n 'time_taken': end_time - start_time,\n 'hostname': hostname, 'service_name': service_name\n }\n _logger.debug('Time taken for %s is %d milliseconds', func.__name__, end_time - start_time)\n\n # call to update aggregator, designed to replace the stats module.\n Aggregator.update_stats(endpoint=func.__name__, status=status, success=success,\n server_type='tcp', time_taken=end_time - start_time)\n\n if not old_api:\n return self._make_response_packet(request_id=rid, from_id=from_id, entity=entity, result=result,\n error=error, failed=failed)\n else:\n return self._make_response_packet(request_id=rid, from_id=from_id, entity=entity, result=result,\n error=error, failed=failed, old_api=old_api,\n replacement_api=replacement_api)\n\n wrapper.is_api = True\n return wrapper\n"
] | import asyncio
import json
import logging
import socket
import time
from asyncio import iscoroutine, coroutine, wait_for, TimeoutError, Future, get_event_loop, async
from functools import wraps, partial
import setproctitle
from again.utils import unique_hex
from aiohttp.web import Response
from retrial.retrial.retry import retry
from trellio.packet import ControlPacket
from .exceptions import RequestException, ClientException, TrellioServiceException
from .packet import MessagePacket
from .utils.helpers import Singleton # we need non singleton subclasses
from .utils.helpers import default_preflight_response
from .utils.ordered_class_member import OrderedClassMembers
from .utils.stats import Aggregator, Stats
from .views import HTTPView
API_TIMEOUT = 60 * 10
def publish(func):
"""
publish the return value of this function as a message from this endpoint
"""
@wraps(func)
def wrapper(self, *args, **kwargs): # outgoing
payload = func(self, *args, **kwargs)
payload.pop('self', None)
self._publish(func.__name__, payload)
return None
wrapper.is_publish = True
return wrapper
def subscribe(func):
"""
use to listen for publications from a specific endpoint of a service,
this method receives a publication from a remote service
"""
wrapper = _get_subscribe_decorator(func)
wrapper.is_subscribe = True
return wrapper
def xsubscribe(func=None, strategy='DESIGNATION'):
"""
Used to listen for publications from a specific endpoint of a service. If multiple instances
subscribe to an endpoint, only one of them receives the event. And the publish event is retried till
an acknowledgment is received from the other end.
:param func: the function to decorate with. The name of the function is the event subscribers will subscribe to.
:param strategy: The strategy of delivery. Can be 'RANDOM' or 'LEADER'. If 'RANDOM', then the event will be randomly
passed to any one of the interested parties. If 'LEADER' then it is passed to the first instance alive
which registered for that endpoint.
"""
if func is None:
return partial(xsubscribe, strategy=strategy)
else:
wrapper = _get_subscribe_decorator(func)
wrapper.is_xsubscribe = True
wrapper.strategy = strategy
return wrapper
def _get_subscribe_decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
coroutine_func = func
if not iscoroutine(func):
coroutine_func = coroutine(func)
return (async(coroutine_func(*args, **kwargs)))
return wrapper
def request(func=None, timeout=600):
"""
use to request an api call from a specific endpoint
"""
if func is None:
return partial(request, timeout=timeout)
@wraps(func)
def wrapper(self, *args, **kwargs):
params = func(self, *args, **kwargs)
self = params.pop('self', None)
entity = params.pop('entity', None)
app_name = params.pop('app_name', None)
request_id = unique_hex()
params['request_id'] = request_id
future = self._send_request(app_name, endpoint=func.__name__, entity=entity, params=params, timeout=timeout)
return future
wrapper.is_request = True
return wrapper
def apideprecated(func=None, replacement_api=None):
if func is None:
return partial(apideprecated, replacement_api=replacement_api)
else:
wrapper = _get_api_decorator(func=func, old_api=func.__name__, replacement_api=replacement_api)
return wrapper
def _get_api_decorator(func=None, old_api=None, replacement_api=None, timeout=API_TIMEOUT):
@coroutine
@wraps(func)
def wrapper(*args, **kwargs):
_logger = logging.getLogger(__name__)
start_time = int(time.time() * 1000)
self = args[0]
rid = kwargs.pop('request_id')
entity = kwargs.pop('entity')
from_id = kwargs.pop('from_id')
wrapped_func = func
result = None
error = None
failed = False
status = 'successful'
success = True
if not iscoroutine(func):
wrapped_func = coroutine(func)
Stats.tcp_stats['total_requests'] += 1
try:
result = yield from wait_for(wrapped_func(self, **kwargs), timeout)
except TimeoutError as e:
Stats.tcp_stats['timedout'] += 1
error = str(e)
status = 'timeout'
success = False
failed = True
_logger.exception("TCP request had a timeout for method %s", func.__name__)
except TrellioServiceException as e:
Stats.tcp_stats['total_responses'] += 1
error = str(e)
status = 'handled_error'
_logger.exception('Handled exception %s for method %s ', e.__class__.__name__, func.__name__)
except Exception as e:
Stats.tcp_stats['total_errors'] += 1
error = str(e)
status = 'unhandled_error'
success = False
failed = True
_logger.exception('Unhandled exception %s for method %s ', e.__class__.__name__, func.__name__)
else:
Stats.tcp_stats['total_responses'] += 1
end_time = int(time.time() * 1000)
hostname = socket.gethostname()
service_name = '_'.join(setproctitle.getproctitle().split('_')[1:-1])
logd = {
'endpoint': func.__name__,
'time_taken': end_time - start_time,
'hostname': hostname, 'service_name': service_name
}
_logger.debug('Time taken for %s is %d milliseconds', func.__name__, end_time - start_time)
# call to update aggregator, designed to replace the stats module.
Aggregator.update_stats(endpoint=func.__name__, status=status, success=success,
server_type='tcp', time_taken=end_time - start_time)
if not old_api:
return self._make_response_packet(request_id=rid, from_id=from_id, entity=entity, result=result,
error=error, failed=failed)
else:
return self._make_response_packet(request_id=rid, from_id=from_id, entity=entity, result=result,
error=error, failed=failed, old_api=old_api,
replacement_api=replacement_api)
wrapper.is_api = True
return wrapper
def make_request(func, self, args, kwargs, method):
params = func(self, *args, **kwargs)
entity = params.pop('entity', None)
app_name = params.pop('app_name', None)
self = params.pop('self')
response = yield from self._send_http_request(app_name, method, entity, params)
return response
def _enable_http_middleware(func): # pre and post http, processing
@wraps(func)
async def f(self, *args, **kwargs):
if hasattr(self, 'middlewares'):
for i in self.middlewares:
if hasattr(i, 'pre_request'):
pre_request = getattr(i, 'pre_request')
if callable(pre_request):
try:
res = await pre_request(self, *args, **kwargs) # passing service as first argument
if res:
return res
except Exception as e:
return Response(status=400, content_type='application/json',
body=json.dumps(
{'error': str(e), 'sector': getattr(i, 'middleware_info')}).encode())
_func = coroutine(func) # func is a generator object
result = await _func(self, *args, **kwargs)
if hasattr(self, 'middlewares'):
for i in self.middlewares:
if hasattr(i, 'post_request'):
post_request = getattr(i, 'post_request')
if callable(post_request):
try:
res = await post_request(self, result, *args, **kwargs)
if res:
return res
except Exception as e:
return Response(status=400, content_type='application/json',
body=json.dumps(
{'error': str(e), 'sector': getattr(i, 'middleware_info')}).encode())
return result
return f
def get_decorated_fun(method, path, required_params, timeout):
def decorator(func):
@wraps(func)
@_enable_http_middleware
def f(self, *args, **kwargs):
if isinstance(self, HTTPServiceClient):
return (yield from make_request(func, self, args, kwargs, method))
elif isinstance(self, HTTPService) or isinstance(self, HTTPView):
Stats.http_stats['total_requests'] += 1
if required_params is not None:
req = args[0]
query_params = req.GET
params = required_params
if not isinstance(required_params, list):
params = [required_params]
missing_params = list(filter(lambda x: x not in query_params, params))
if len(missing_params) > 0:
res_d = {'error': 'Required params {} not found'.format(','.join(missing_params))}
Stats.http_stats['total_responses'] += 1
Aggregator.update_stats(endpoint=func.__name__, status=400, success=False,
server_type='http', time_taken=0)
return Response(status=400, content_type='application/json', body=json.dumps(res_d).encode())
t1 = time.time()
wrapped_func = func
success = True
_logger = logging.getLogger()
if not iscoroutine(func):
wrapped_func = coroutine(func)
try:
result = yield from wait_for(wrapped_func(self, *args, **kwargs), timeout)
except TimeoutError as e:
Stats.http_stats['timedout'] += 1
status = 'timeout'
success = False
_logger.exception("HTTP request had a timeout for method %s", func.__name__)
return Response(status=408, body='Request Timeout'.encode())
except TrellioServiceException as e:
Stats.http_stats['total_responses'] += 1
status = 'handled_exception'
_logger.exception('Handled exception %s for method %s ', e.__class__.__name__, func.__name__)
raise e
except Exception as e:
Stats.http_stats['total_errors'] += 1
status = 'unhandled_exception'
success = False
_logger.exception('Unhandled exception %s for method %s ', e.__class__.__name__, func.__name__)
raise e
else:
t2 = time.time()
hostname = socket.gethostname()
service_name = '_'.join(setproctitle.getproctitle().split('_')[1:-1])
status = result.status
logd = {
'status': result.status,
'time_taken': int((t2 - t1) * 1000),
'type': 'http',
'hostname': hostname, 'service_name': service_name
}
logging.getLogger('stats').debug(logd)
Stats.http_stats['total_responses'] += 1
return result
finally:
t2 = time.time()
Aggregator.update_stats(endpoint=func.__name__, status=status, success=success,
server_type='http', time_taken=int((t2 - t1) * 1000))
f.is_http_method = True
f.method = method
f.paths = path
if not isinstance(path, list):
f.paths = [path]
return f
return decorator
def get(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('get', path, required_params, timeout)
def head(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('head', path, required_params, timeout)
def options(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('options', path, required_params, timeout)
def patch(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('patch', path, required_params, timeout)
def post(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('post', path, required_params, timeout)
def put(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('put', path, required_params, timeout)
def trace(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('put', path, required_params, timeout)
def delete(path=None, required_params=None, timeout=API_TIMEOUT):
return get_decorated_fun('delete', path, required_params, timeout)
class _Service:
_PUB_PKT_STR = 'publish'
_REQ_PKT_STR = 'request'
_RES_PKT_STR = 'response'
def __init__(self, service_name, service_version):
self._service_name = service_name.lower()
self._service_version = str(service_version)
self._tcp_bus = None
self._pubsub_bus = None
self._http_bus = None
@property
def name(self):
return self._service_name
@property
def version(self):
return self._service_version
@property
def properties(self):
return self.name, self.version
@staticmethod
def time_future(future: Future, timeout: int):
def timer_callback(f):
if not f.done() and not f.cancelled():
f.set_exception(TimeoutError())
get_event_loop().call_later(timeout, timer_callback, future)
class TCPServiceClient(Singleton, _Service):
def __init__(self, service_name, service_version, ssl_context=None):
if not self.has_inited(): # to maintain singleton behaviour
super(TCPServiceClient, self).__init__(service_name, service_version)
self._pending_requests = {}
self.tcp_bus = None
self._ssl_context = ssl_context
self.init_done()
@property
def ssl_context(self):
return self._ssl_context
def _send_request(self, app_name, endpoint, entity, params, timeout):
packet = MessagePacket.request(self.name, self.version, app_name, _Service._REQ_PKT_STR, endpoint, params,
entity)
future = Future()
request_id = params['request_id']
self._pending_requests[request_id] = future
try:
self.tcp_bus.send(packet)
except ClientException:
if not future.done() and not future.cancelled():
error = 'Client not found'
exception = ClientException(error)
exception.error = error
future.set_exception(exception)
_Service.time_future(future, timeout)
return future
def receive(self, packet: dict, protocol, transport):
if packet['type'] == 'ping':
pass
else:
self._process_response(packet)
def process_packet(self, packet):
if packet['type'] == _Service._RES_PKT_STR:
self._process_response(packet)
elif packet['type'] == _Service._PUB_PKT_STR:
self._process_publication(packet)
else:
print('Invalid packet', packet)
def _process_response(self, packet):
payload = packet['payload']
request_id = payload['request_id']
has_result = 'result' in payload
has_error = 'error' in payload
if 'old_api' in payload:
warning = 'Deprecated API: ' + payload['old_api']
if 'replacement_api' in payload:
warning += ', New API: ' + payload['replacement_api']
logging.getLogger().warning(warning)
future = self._pending_requests.pop(request_id)
if has_result:
if not future.done() and not future.cancelled():
future.set_result(payload['result'])
elif has_error:
if payload.get('failed', False):
if not future.done() and not future.cancelled():
future.set_exception(Exception(payload['error']))
else:
exception = RequestException()
exception.error = payload['error']
if not future.done() and not future.cancelled():
future.set_exception(exception)
else:
print('Invalid response to request:', packet)
def _process_publication(self, packet):
endpoint = packet['endpoint']
func = getattr(self, endpoint)
func(**packet['payload'])
def _handle_connection_lost(self):
vendor = self.tcp_bus._registry_client._get_full_service_name(self.name, self.version)
for host, port, node_id, service_type in self.tcp_bus._registry_client._available_services[vendor]:
packet = ControlPacket.deregister(self.name, self.version, node_id)
self.tcp_bus._registry_client._handle_deregistration(packet)
class _ServiceHost(_Service):
def __init__(self, service_name, service_version, host_ip, host_port):
super(_ServiceHost, self).__init__(service_name, service_version)
self._node_id = unique_hex()
self._ip = host_ip
self._port = host_port
self._clients = []
def is_for_me(self, service, version):
return service == self.name and version == self.version
@property
def node_id(self):
return self._node_id
@property
def tcp_bus(self):
return self._tcp_bus
@tcp_bus.setter
def tcp_bus(self, bus):
for client in self._clients:
if isinstance(client, TCPServiceClient):
client.tcp_bus = bus
self._tcp_bus = bus
@property
def http_bus(self):
return self._http_bus
@http_bus.setter
def http_bus(self, bus):
for client in self._clients:
if isinstance(client, HTTPServiceClient):
client._http_bus = self._http_bus
self._http_bus = bus
# @property
# def pubsub_bus(self):
# return self._pubsub_bus
#
# @pubsub_bus.setter
# def pubsub_bus(self, bus):
# self._pubsub_bus = bus
@property
def clients(self):
return self._clients
@clients.setter
def clients(self, clients):
self._clients = clients
@property
def socket_address(self):
return self._ip, self._port
@property
def host(self):
return self._ip
@property
def port(self):
return self._port
def initiate(self):
self.tcp_bus.register()
# yield from self.pubsub_bus.create_pubsub_handler()
# async(self.pubsub_bus.register_for_subscription(self.host, self.port, self.node_id, self.clients))
class TCPService(_ServiceHost):
def __init__(self, service_name, service_version, host_ip=None, host_port=None, ssl_context=None):
super(TCPService, self).__init__(service_name, service_version, host_ip, host_port)
self._ssl_context = ssl_context
@property
def ssl_context(self):
return self._ssl_context
# def _publish(self, endpoint, payload):
# self._pubsub_bus.publish(self.name, self.version, endpoint, payload)
#
# def _xpublish(self, endpoint, payload, strategy):
# self._pubsub_bus.xpublish(self.name, self.version, endpoint, payload, strategy)
@staticmethod
def _make_response_packet(request_id: str, from_id: str, entity: str, result: object, error: object,
failed: bool, old_api=None, replacement_api=None):
if failed:
payload = {'request_id': request_id, 'error': error, 'failed': failed}
else:
payload = {'request_id': request_id, 'result': result}
if old_api:
payload['old_api'] = old_api
if replacement_api:
payload['replacement_api'] = replacement_api
packet = {'pid': unique_hex(),
'to': from_id,
'entity': entity,
'type': _Service._RES_PKT_STR,
'payload': payload}
return packet
class HTTPService(_ServiceHost, metaclass=OrderedClassMembers):
def __init__(self, service_name, service_version, host_ip=None, host_port=None, ssl_context=None,
allow_cross_domain=True,
preflight_response=default_preflight_response):
super(HTTPService, self).__init__(service_name, service_version, host_ip, host_port)
self._ssl_context = ssl_context
self._allow_cross_domain = allow_cross_domain
self._preflight_response = preflight_response
@property
def ssl_context(self):
return self._ssl_context
@property
def cross_domain_allowed(self):
return self._allow_cross_domain
@property
def preflight_response(self):
return self._preflight_response
@get('/ping')
def pong(self, _):
return Response()
@get('/_stats')
def stats(self, _):
res_d = Aggregator.dump_stats()
return Response(status=200, content_type='application/json', body=json.dumps(res_d).encode())
class HTTPServiceClient(Singleton, _Service):
def __init__(self, service_name, service_version):
if not self.has_inited():
super(HTTPServiceClient, self).__init__(service_name, service_version)
self.init_done()
def _send_http_request(self, app_name, method, entity, params):
response = yield from self._http_bus.send_http_request(app_name, self.name, self.version, method, entity,
params)
return response
|
quikmile/trellio | trellio/host.py | Host.configure | python | def configure(cls, host_name: str = '', service_name: str = '', service_version='',
http_host: str = '127.0.0.1', http_port: int = 8000,
tcp_host: str = '127.0.0.1', tcp_port: int = 8001, ssl_context=None,
registry_host: str = "0.0.0.0", registry_port: int = 4500,
pubsub_host: str = "0.0.0.0", pubsub_port: int = 6379, ronin: bool = False):
Host.host_name = host_name
Host.service_name = service_name
Host.service_version = str(service_version)
Host.http_host = http_host
Host.http_port = http_port
Host.tcp_host = tcp_host
Host.tcp_port = tcp_port
Host.registry_host = registry_host
Host.registry_port = registry_port
Host.pubsub_host = pubsub_host
Host.pubsub_port = pubsub_port
Host.ssl_context = ssl_context
Host.ronin = ronin | A convenience method for providing registry and pubsub(redis) endpoints
:param host_name: Used for process name
:param registry_host: IP Address for trellio-registry; default = 0.0.0.0
:param registry_port: Port for trellio-registry; default = 4500
:param pubsub_host: IP Address for pubsub component, usually redis; default = 0.0.0.0
:param pubsub_port: Port for pubsub component; default= 6379
:return: None | train | https://github.com/quikmile/trellio/blob/e8b050077562acf32805fcbb9c0c162248a23c62/trellio/host.py#L55-L81 | null | class Host:
"""Serves as a static entry point and provides the boilerplate required to host and run a trellio Service.
Example::
Host.configure('SampleService')
Host.attachService(SampleHTTPService())
Host.run()
"""
registry_host = None
registry_port = None
pubsub_host = None
pubsub_port = None
host_name = None
service_name = None
http_host = None
http_port = None
tcp_host = None
tcp_port = None
ssl_context = None
ronin = False # If true, the trellio service runs solo without a registry
_host_id = None
_tcp_service = None
_http_service = None
_publisher = None
_subscribers = []
_tcp_views = []
_http_views = []
_logger = logging.getLogger(__name__)
_smtp_handler = None
@classmethod
@classmethod
def get_http_service(cls):
return cls._http_service
@classmethod
def get_tcp_service(cls):
return cls._tcp_service
@classmethod
def get_tcp_clients(cls):
tcp_service = cls.get_tcp_service()
if tcp_service:
return tcp_service.clients
@classmethod
def get_publisher(cls):
return cls._publisher
@classmethod
def get_subscribers(cls):
return cls._subscribers
@classmethod
def get_tcp_views(cls):
return cls._tcp_views
@classmethod
def get_http_views(cls):
return cls._http_views
@classmethod
@deprecated
def attach_service(cls, service):
""" Allows you to attach one TCP and one HTTP service
deprecated:: 2.1.73 use http and tcp specific methods
:param service: A trellio TCP or HTTP service that needs to be hosted
"""
if isinstance(service, HTTPService):
cls._http_service = service
elif isinstance(service, TCPService):
cls._tcp_service = service
else:
cls._logger.error('Invalid argument attached as service')
cls._set_bus(service)
@classmethod
def attach_http_service(cls, http_service: HTTPService):
""" Attaches a service for hosting
:param http_service: A HTTPService instance
"""
if cls._http_service is None:
cls._http_service = http_service
cls._set_bus(http_service)
else:
warnings.warn('HTTP service is already attached')
@classmethod
def attach_tcp_service(cls, tcp_service: TCPService):
""" Attaches a service for hosting
:param tcp_service: A TCPService instance
"""
if cls._tcp_service is None:
cls._tcp_service = tcp_service
cls._set_bus(tcp_service)
else:
warnings.warn('TCP service is already attached')
@classmethod
def attach_http_views(cls, http_views: list):
views_instances = []
for view_class in http_views:
instance = view_class()
instance.host = Host
views_instances.append(instance)
cls._http_views.extend(views_instances)
@classmethod
def attach_tcp_views(cls, tcp_views: list):
views_instances = []
for view_class in tcp_views:
instance = view_class()
instance.host = Host
views_instances.append(instance)
cls._tcp_views.extend(views_instances)
@classmethod
def attach_publisher(cls, publisher: Publisher):
if cls._publisher is None:
cls._publisher = publisher
else:
warnings.warn('Publisher is already attached')
@classmethod
def attach_subscribers(cls, subscribers: list):
if all([isinstance(subscriber, Subscriber) for subscriber in subscribers]):
if not cls._subscribers:
cls._subscribers = subscribers
else:
warnings.warn('Subscribers are already attached')
@classmethod
def run(cls):
""" Fires up the event loop and starts serving attached services
"""
if cls._tcp_service or cls._http_service or cls._http_views or cls._tcp_views:
cls._set_host_id()
cls._setup_logging()
cls._set_process_name()
cls._set_signal_handlers()
cls._start_pubsub()
cls._start_server()
else:
cls._logger.error('No services to host')
@classmethod
def _set_process_name(cls):
from setproctitle import setproctitle
setproctitle('trellio_{}_{}'.format(cls.host_name, cls._host_id))
@classmethod
def _stop(cls, signame: str):
cls._logger.info('\ngot signal {} - exiting'.format(signame))
asyncio.get_event_loop().stop()
@classmethod
def _set_signal_handlers(cls):
asyncio.get_event_loop().add_signal_handler(getattr(signal, 'SIGINT'), partial(cls._stop, 'SIGINT'))
asyncio.get_event_loop().add_signal_handler(getattr(signal, 'SIGTERM'), partial(cls._stop, 'SIGTERM'))
@classmethod
def _create_tcp_server(cls):
if cls._tcp_service:
ssl_context = cls._tcp_service.ssl_context
host_ip, host_port = cls._tcp_service.socket_address
task = asyncio.get_event_loop().create_server(partial(get_trellio_protocol, cls._tcp_service.tcp_bus),
host_ip, host_port, ssl=ssl_context)
result = asyncio.get_event_loop().run_until_complete(task)
return result
@classmethod
def _create_http_server(cls):
if cls._http_service or cls._http_views:
host_ip, host_port = cls.http_host, cls.http_port
ssl_context = cls.ssl_context
handler = cls._make_aiohttp_handler()
task = asyncio.get_event_loop().create_server(handler, host_ip, host_port, ssl=ssl_context)
return asyncio.get_event_loop().run_until_complete(task)
@classmethod
def _make_aiohttp_handler(cls):
app = Application(loop=asyncio.get_event_loop())
if cls._http_service:
for each in cls._http_service.__ordered__:
# iterate all attributes in the service looking for http endpoints and add them
fn = getattr(cls._http_service, each)
if callable(fn) and getattr(fn, 'is_http_method', False):
for path in fn.paths:
app.router.add_route(fn.method, path, fn)
if cls._http_service.cross_domain_allowed:
# add an 'options' for this specific path to make it CORS friendly
app.router.add_route('options', path, cls._http_service.preflight_response)
for view in cls._http_views:
for each in view.__ordered__:
fn = getattr(view, each)
if callable(fn) and getattr(fn, 'is_http_method', False):
for path in fn.paths:
app.router.add_route(fn.method, path, fn)
if view.cross_domain_allowed:
# add an 'options' for this specific path to make it CORS friendly
app.router.add_route('options', path, view.preflight_response)
handler = app.make_handler(access_log=cls._logger)
return handler
@classmethod
def _set_host_id(cls):
from uuid import uuid4
cls._host_id = uuid4()
@classmethod
def _start_server(cls):
tcp_server = cls._create_tcp_server()
http_server = cls._create_http_server()
if not cls.ronin:
if cls._tcp_service:
asyncio.get_event_loop().run_until_complete(cls._tcp_service.tcp_bus.connect())
# if cls._http_service:
# asyncio.get_event_loop().run_until_complete(cls._http_service.tcp_bus.connect())
if tcp_server:
cls._logger.info('Serving TCP on {}'.format(tcp_server.sockets[0].getsockname()))
if http_server:
cls._logger.info('Serving HTTP on {}'.format(http_server.sockets[0].getsockname()))
cls._logger.info("Event loop running forever, press CTRL+C to interrupt.")
cls._logger.info("pid %s: send SIGINT or SIGTERM to exit." % os.getpid())
cls._logger.info("Triggering ServiceReady signal")
asyncio.get_event_loop().run_until_complete(ServiceReady._run())
try:
asyncio.get_event_loop().run_forever()
except Exception as e:
print(e)
finally:
if tcp_server:
tcp_server.close()
asyncio.get_event_loop().run_until_complete(tcp_server.wait_closed())
if http_server:
http_server.close()
asyncio.get_event_loop().run_until_complete(http_server.wait_closed())
asyncio.get_event_loop().close()
@classmethod
def _start_pubsub(cls):
if not cls.ronin:
if cls._publisher:
asyncio.get_event_loop().run_until_complete(cls._publisher.create_pubsub_handler())
for subscriber in cls._subscribers:
asyncio.get_event_loop().run_until_complete(subscriber.create_pubsub_handler())
asyncio.async(subscriber.register_for_subscription())
@classmethod
def _set_bus(cls, service):
registry_client = RegistryClient(asyncio.get_event_loop(), cls.registry_host, cls.registry_port)
tcp_bus = TCPBus(registry_client)
registry_client.conn_handler = tcp_bus
# pubsub_bus = PubSubBus(cls.pubsub_host, cls.pubsub_port, registry_client) # , cls._tcp_service._ssl_context)
registry_client.bus = tcp_bus
if isinstance(service, TCPService):
tcp_bus.tcp_host = service
if isinstance(service, HTTPService):
tcp_bus.http_host = service
service.tcp_bus = tcp_bus
# service.pubsub_bus = pubsub_bus
@classmethod
def _setup_logging(cls):
identifier = '{}'.format(cls.service_name)
setup_logging(identifier)
if cls._smtp_handler:
logger = logging.getLogger()
logger.addHandler(cls._smtp_handler)
Stats.service_name = cls.service_name
Aggregator._service_name = cls.service_name
Aggregator.periodic_aggregated_stats_logger()
|
quikmile/trellio | trellio/host.py | Host.attach_service | python | def attach_service(cls, service):
if isinstance(service, HTTPService):
cls._http_service = service
elif isinstance(service, TCPService):
cls._tcp_service = service
else:
cls._logger.error('Invalid argument attached as service')
cls._set_bus(service) | Allows you to attach one TCP and one HTTP service
deprecated:: 2.1.73 use http and tcp specific methods
:param service: A trellio TCP or HTTP service that needs to be hosted | train | https://github.com/quikmile/trellio/blob/e8b050077562acf32805fcbb9c0c162248a23c62/trellio/host.py#L115-L127 | [
"def _set_bus(cls, service):\n registry_client = RegistryClient(asyncio.get_event_loop(), cls.registry_host, cls.registry_port)\n tcp_bus = TCPBus(registry_client)\n registry_client.conn_handler = tcp_bus\n # pubsub_bus = PubSubBus(cls.pubsub_host, cls.pubsub_port, registry_client) # , cls._tcp_service._ssl_context)\n registry_client.bus = tcp_bus\n if isinstance(service, TCPService):\n tcp_bus.tcp_host = service\n if isinstance(service, HTTPService):\n tcp_bus.http_host = service\n service.tcp_bus = tcp_bus\n"
] | class Host:
"""Serves as a static entry point and provides the boilerplate required to host and run a trellio Service.
Example::
Host.configure('SampleService')
Host.attachService(SampleHTTPService())
Host.run()
"""
registry_host = None
registry_port = None
pubsub_host = None
pubsub_port = None
host_name = None
service_name = None
http_host = None
http_port = None
tcp_host = None
tcp_port = None
ssl_context = None
ronin = False # If true, the trellio service runs solo without a registry
_host_id = None
_tcp_service = None
_http_service = None
_publisher = None
_subscribers = []
_tcp_views = []
_http_views = []
_logger = logging.getLogger(__name__)
_smtp_handler = None
@classmethod
def configure(cls, host_name: str = '', service_name: str = '', service_version='',
http_host: str = '127.0.0.1', http_port: int = 8000,
tcp_host: str = '127.0.0.1', tcp_port: int = 8001, ssl_context=None,
registry_host: str = "0.0.0.0", registry_port: int = 4500,
pubsub_host: str = "0.0.0.0", pubsub_port: int = 6379, ronin: bool = False):
""" A convenience method for providing registry and pubsub(redis) endpoints
:param host_name: Used for process name
:param registry_host: IP Address for trellio-registry; default = 0.0.0.0
:param registry_port: Port for trellio-registry; default = 4500
:param pubsub_host: IP Address for pubsub component, usually redis; default = 0.0.0.0
:param pubsub_port: Port for pubsub component; default= 6379
:return: None
"""
Host.host_name = host_name
Host.service_name = service_name
Host.service_version = str(service_version)
Host.http_host = http_host
Host.http_port = http_port
Host.tcp_host = tcp_host
Host.tcp_port = tcp_port
Host.registry_host = registry_host
Host.registry_port = registry_port
Host.pubsub_host = pubsub_host
Host.pubsub_port = pubsub_port
Host.ssl_context = ssl_context
Host.ronin = ronin
@classmethod
def get_http_service(cls):
return cls._http_service
@classmethod
def get_tcp_service(cls):
return cls._tcp_service
@classmethod
def get_tcp_clients(cls):
tcp_service = cls.get_tcp_service()
if tcp_service:
return tcp_service.clients
@classmethod
def get_publisher(cls):
return cls._publisher
@classmethod
def get_subscribers(cls):
return cls._subscribers
@classmethod
def get_tcp_views(cls):
return cls._tcp_views
@classmethod
def get_http_views(cls):
return cls._http_views
@classmethod
@deprecated
@classmethod
def attach_http_service(cls, http_service: HTTPService):
""" Attaches a service for hosting
:param http_service: A HTTPService instance
"""
if cls._http_service is None:
cls._http_service = http_service
cls._set_bus(http_service)
else:
warnings.warn('HTTP service is already attached')
@classmethod
def attach_tcp_service(cls, tcp_service: TCPService):
""" Attaches a service for hosting
:param tcp_service: A TCPService instance
"""
if cls._tcp_service is None:
cls._tcp_service = tcp_service
cls._set_bus(tcp_service)
else:
warnings.warn('TCP service is already attached')
@classmethod
def attach_http_views(cls, http_views: list):
views_instances = []
for view_class in http_views:
instance = view_class()
instance.host = Host
views_instances.append(instance)
cls._http_views.extend(views_instances)
@classmethod
def attach_tcp_views(cls, tcp_views: list):
views_instances = []
for view_class in tcp_views:
instance = view_class()
instance.host = Host
views_instances.append(instance)
cls._tcp_views.extend(views_instances)
@classmethod
def attach_publisher(cls, publisher: Publisher):
if cls._publisher is None:
cls._publisher = publisher
else:
warnings.warn('Publisher is already attached')
@classmethod
def attach_subscribers(cls, subscribers: list):
if all([isinstance(subscriber, Subscriber) for subscriber in subscribers]):
if not cls._subscribers:
cls._subscribers = subscribers
else:
warnings.warn('Subscribers are already attached')
@classmethod
def run(cls):
""" Fires up the event loop and starts serving attached services
"""
if cls._tcp_service or cls._http_service or cls._http_views or cls._tcp_views:
cls._set_host_id()
cls._setup_logging()
cls._set_process_name()
cls._set_signal_handlers()
cls._start_pubsub()
cls._start_server()
else:
cls._logger.error('No services to host')
@classmethod
def _set_process_name(cls):
from setproctitle import setproctitle
setproctitle('trellio_{}_{}'.format(cls.host_name, cls._host_id))
@classmethod
def _stop(cls, signame: str):
cls._logger.info('\ngot signal {} - exiting'.format(signame))
asyncio.get_event_loop().stop()
@classmethod
def _set_signal_handlers(cls):
asyncio.get_event_loop().add_signal_handler(getattr(signal, 'SIGINT'), partial(cls._stop, 'SIGINT'))
asyncio.get_event_loop().add_signal_handler(getattr(signal, 'SIGTERM'), partial(cls._stop, 'SIGTERM'))
@classmethod
def _create_tcp_server(cls):
if cls._tcp_service:
ssl_context = cls._tcp_service.ssl_context
host_ip, host_port = cls._tcp_service.socket_address
task = asyncio.get_event_loop().create_server(partial(get_trellio_protocol, cls._tcp_service.tcp_bus),
host_ip, host_port, ssl=ssl_context)
result = asyncio.get_event_loop().run_until_complete(task)
return result
@classmethod
def _create_http_server(cls):
if cls._http_service or cls._http_views:
host_ip, host_port = cls.http_host, cls.http_port
ssl_context = cls.ssl_context
handler = cls._make_aiohttp_handler()
task = asyncio.get_event_loop().create_server(handler, host_ip, host_port, ssl=ssl_context)
return asyncio.get_event_loop().run_until_complete(task)
@classmethod
def _make_aiohttp_handler(cls):
app = Application(loop=asyncio.get_event_loop())
if cls._http_service:
for each in cls._http_service.__ordered__:
# iterate all attributes in the service looking for http endpoints and add them
fn = getattr(cls._http_service, each)
if callable(fn) and getattr(fn, 'is_http_method', False):
for path in fn.paths:
app.router.add_route(fn.method, path, fn)
if cls._http_service.cross_domain_allowed:
# add an 'options' for this specific path to make it CORS friendly
app.router.add_route('options', path, cls._http_service.preflight_response)
for view in cls._http_views:
for each in view.__ordered__:
fn = getattr(view, each)
if callable(fn) and getattr(fn, 'is_http_method', False):
for path in fn.paths:
app.router.add_route(fn.method, path, fn)
if view.cross_domain_allowed:
# add an 'options' for this specific path to make it CORS friendly
app.router.add_route('options', path, view.preflight_response)
handler = app.make_handler(access_log=cls._logger)
return handler
@classmethod
def _set_host_id(cls):
from uuid import uuid4
cls._host_id = uuid4()
@classmethod
def _start_server(cls):
tcp_server = cls._create_tcp_server()
http_server = cls._create_http_server()
if not cls.ronin:
if cls._tcp_service:
asyncio.get_event_loop().run_until_complete(cls._tcp_service.tcp_bus.connect())
# if cls._http_service:
# asyncio.get_event_loop().run_until_complete(cls._http_service.tcp_bus.connect())
if tcp_server:
cls._logger.info('Serving TCP on {}'.format(tcp_server.sockets[0].getsockname()))
if http_server:
cls._logger.info('Serving HTTP on {}'.format(http_server.sockets[0].getsockname()))
cls._logger.info("Event loop running forever, press CTRL+C to interrupt.")
cls._logger.info("pid %s: send SIGINT or SIGTERM to exit." % os.getpid())
cls._logger.info("Triggering ServiceReady signal")
asyncio.get_event_loop().run_until_complete(ServiceReady._run())
try:
asyncio.get_event_loop().run_forever()
except Exception as e:
print(e)
finally:
if tcp_server:
tcp_server.close()
asyncio.get_event_loop().run_until_complete(tcp_server.wait_closed())
if http_server:
http_server.close()
asyncio.get_event_loop().run_until_complete(http_server.wait_closed())
asyncio.get_event_loop().close()
@classmethod
def _start_pubsub(cls):
if not cls.ronin:
if cls._publisher:
asyncio.get_event_loop().run_until_complete(cls._publisher.create_pubsub_handler())
for subscriber in cls._subscribers:
asyncio.get_event_loop().run_until_complete(subscriber.create_pubsub_handler())
asyncio.async(subscriber.register_for_subscription())
@classmethod
def _set_bus(cls, service):
registry_client = RegistryClient(asyncio.get_event_loop(), cls.registry_host, cls.registry_port)
tcp_bus = TCPBus(registry_client)
registry_client.conn_handler = tcp_bus
# pubsub_bus = PubSubBus(cls.pubsub_host, cls.pubsub_port, registry_client) # , cls._tcp_service._ssl_context)
registry_client.bus = tcp_bus
if isinstance(service, TCPService):
tcp_bus.tcp_host = service
if isinstance(service, HTTPService):
tcp_bus.http_host = service
service.tcp_bus = tcp_bus
# service.pubsub_bus = pubsub_bus
@classmethod
def _setup_logging(cls):
identifier = '{}'.format(cls.service_name)
setup_logging(identifier)
if cls._smtp_handler:
logger = logging.getLogger()
logger.addHandler(cls._smtp_handler)
Stats.service_name = cls.service_name
Aggregator._service_name = cls.service_name
Aggregator.periodic_aggregated_stats_logger()
|
quikmile/trellio | trellio/host.py | Host.attach_http_service | python | def attach_http_service(cls, http_service: HTTPService):
if cls._http_service is None:
cls._http_service = http_service
cls._set_bus(http_service)
else:
warnings.warn('HTTP service is already attached') | Attaches a service for hosting
:param http_service: A HTTPService instance | train | https://github.com/quikmile/trellio/blob/e8b050077562acf32805fcbb9c0c162248a23c62/trellio/host.py#L130-L138 | [
"def _set_bus(cls, service):\n registry_client = RegistryClient(asyncio.get_event_loop(), cls.registry_host, cls.registry_port)\n tcp_bus = TCPBus(registry_client)\n registry_client.conn_handler = tcp_bus\n # pubsub_bus = PubSubBus(cls.pubsub_host, cls.pubsub_port, registry_client) # , cls._tcp_service._ssl_context)\n registry_client.bus = tcp_bus\n if isinstance(service, TCPService):\n tcp_bus.tcp_host = service\n if isinstance(service, HTTPService):\n tcp_bus.http_host = service\n service.tcp_bus = tcp_bus\n"
] | class Host:
"""Serves as a static entry point and provides the boilerplate required to host and run a trellio Service.
Example::
Host.configure('SampleService')
Host.attachService(SampleHTTPService())
Host.run()
"""
registry_host = None
registry_port = None
pubsub_host = None
pubsub_port = None
host_name = None
service_name = None
http_host = None
http_port = None
tcp_host = None
tcp_port = None
ssl_context = None
ronin = False # If true, the trellio service runs solo without a registry
_host_id = None
_tcp_service = None
_http_service = None
_publisher = None
_subscribers = []
_tcp_views = []
_http_views = []
_logger = logging.getLogger(__name__)
_smtp_handler = None
@classmethod
def configure(cls, host_name: str = '', service_name: str = '', service_version='',
http_host: str = '127.0.0.1', http_port: int = 8000,
tcp_host: str = '127.0.0.1', tcp_port: int = 8001, ssl_context=None,
registry_host: str = "0.0.0.0", registry_port: int = 4500,
pubsub_host: str = "0.0.0.0", pubsub_port: int = 6379, ronin: bool = False):
""" A convenience method for providing registry and pubsub(redis) endpoints
:param host_name: Used for process name
:param registry_host: IP Address for trellio-registry; default = 0.0.0.0
:param registry_port: Port for trellio-registry; default = 4500
:param pubsub_host: IP Address for pubsub component, usually redis; default = 0.0.0.0
:param pubsub_port: Port for pubsub component; default= 6379
:return: None
"""
Host.host_name = host_name
Host.service_name = service_name
Host.service_version = str(service_version)
Host.http_host = http_host
Host.http_port = http_port
Host.tcp_host = tcp_host
Host.tcp_port = tcp_port
Host.registry_host = registry_host
Host.registry_port = registry_port
Host.pubsub_host = pubsub_host
Host.pubsub_port = pubsub_port
Host.ssl_context = ssl_context
Host.ronin = ronin
@classmethod
def get_http_service(cls):
return cls._http_service
@classmethod
def get_tcp_service(cls):
return cls._tcp_service
@classmethod
def get_tcp_clients(cls):
tcp_service = cls.get_tcp_service()
if tcp_service:
return tcp_service.clients
@classmethod
def get_publisher(cls):
return cls._publisher
@classmethod
def get_subscribers(cls):
return cls._subscribers
@classmethod
def get_tcp_views(cls):
return cls._tcp_views
@classmethod
def get_http_views(cls):
return cls._http_views
@classmethod
@deprecated
def attach_service(cls, service):
""" Allows you to attach one TCP and one HTTP service
deprecated:: 2.1.73 use http and tcp specific methods
:param service: A trellio TCP or HTTP service that needs to be hosted
"""
if isinstance(service, HTTPService):
cls._http_service = service
elif isinstance(service, TCPService):
cls._tcp_service = service
else:
cls._logger.error('Invalid argument attached as service')
cls._set_bus(service)
@classmethod
@classmethod
def attach_tcp_service(cls, tcp_service: TCPService):
""" Attaches a service for hosting
:param tcp_service: A TCPService instance
"""
if cls._tcp_service is None:
cls._tcp_service = tcp_service
cls._set_bus(tcp_service)
else:
warnings.warn('TCP service is already attached')
@classmethod
def attach_http_views(cls, http_views: list):
views_instances = []
for view_class in http_views:
instance = view_class()
instance.host = Host
views_instances.append(instance)
cls._http_views.extend(views_instances)
@classmethod
def attach_tcp_views(cls, tcp_views: list):
views_instances = []
for view_class in tcp_views:
instance = view_class()
instance.host = Host
views_instances.append(instance)
cls._tcp_views.extend(views_instances)
@classmethod
def attach_publisher(cls, publisher: Publisher):
if cls._publisher is None:
cls._publisher = publisher
else:
warnings.warn('Publisher is already attached')
@classmethod
def attach_subscribers(cls, subscribers: list):
if all([isinstance(subscriber, Subscriber) for subscriber in subscribers]):
if not cls._subscribers:
cls._subscribers = subscribers
else:
warnings.warn('Subscribers are already attached')
@classmethod
def run(cls):
""" Fires up the event loop and starts serving attached services
"""
if cls._tcp_service or cls._http_service or cls._http_views or cls._tcp_views:
cls._set_host_id()
cls._setup_logging()
cls._set_process_name()
cls._set_signal_handlers()
cls._start_pubsub()
cls._start_server()
else:
cls._logger.error('No services to host')
@classmethod
def _set_process_name(cls):
from setproctitle import setproctitle
setproctitle('trellio_{}_{}'.format(cls.host_name, cls._host_id))
@classmethod
def _stop(cls, signame: str):
cls._logger.info('\ngot signal {} - exiting'.format(signame))
asyncio.get_event_loop().stop()
@classmethod
def _set_signal_handlers(cls):
asyncio.get_event_loop().add_signal_handler(getattr(signal, 'SIGINT'), partial(cls._stop, 'SIGINT'))
asyncio.get_event_loop().add_signal_handler(getattr(signal, 'SIGTERM'), partial(cls._stop, 'SIGTERM'))
@classmethod
def _create_tcp_server(cls):
if cls._tcp_service:
ssl_context = cls._tcp_service.ssl_context
host_ip, host_port = cls._tcp_service.socket_address
task = asyncio.get_event_loop().create_server(partial(get_trellio_protocol, cls._tcp_service.tcp_bus),
host_ip, host_port, ssl=ssl_context)
result = asyncio.get_event_loop().run_until_complete(task)
return result
@classmethod
def _create_http_server(cls):
if cls._http_service or cls._http_views:
host_ip, host_port = cls.http_host, cls.http_port
ssl_context = cls.ssl_context
handler = cls._make_aiohttp_handler()
task = asyncio.get_event_loop().create_server(handler, host_ip, host_port, ssl=ssl_context)
return asyncio.get_event_loop().run_until_complete(task)
@classmethod
def _make_aiohttp_handler(cls):
app = Application(loop=asyncio.get_event_loop())
if cls._http_service:
for each in cls._http_service.__ordered__:
# iterate all attributes in the service looking for http endpoints and add them
fn = getattr(cls._http_service, each)
if callable(fn) and getattr(fn, 'is_http_method', False):
for path in fn.paths:
app.router.add_route(fn.method, path, fn)
if cls._http_service.cross_domain_allowed:
# add an 'options' for this specific path to make it CORS friendly
app.router.add_route('options', path, cls._http_service.preflight_response)
for view in cls._http_views:
for each in view.__ordered__:
fn = getattr(view, each)
if callable(fn) and getattr(fn, 'is_http_method', False):
for path in fn.paths:
app.router.add_route(fn.method, path, fn)
if view.cross_domain_allowed:
# add an 'options' for this specific path to make it CORS friendly
app.router.add_route('options', path, view.preflight_response)
handler = app.make_handler(access_log=cls._logger)
return handler
@classmethod
def _set_host_id(cls):
from uuid import uuid4
cls._host_id = uuid4()
@classmethod
def _start_server(cls):
tcp_server = cls._create_tcp_server()
http_server = cls._create_http_server()
if not cls.ronin:
if cls._tcp_service:
asyncio.get_event_loop().run_until_complete(cls._tcp_service.tcp_bus.connect())
# if cls._http_service:
# asyncio.get_event_loop().run_until_complete(cls._http_service.tcp_bus.connect())
if tcp_server:
cls._logger.info('Serving TCP on {}'.format(tcp_server.sockets[0].getsockname()))
if http_server:
cls._logger.info('Serving HTTP on {}'.format(http_server.sockets[0].getsockname()))
cls._logger.info("Event loop running forever, press CTRL+C to interrupt.")
cls._logger.info("pid %s: send SIGINT or SIGTERM to exit." % os.getpid())
cls._logger.info("Triggering ServiceReady signal")
asyncio.get_event_loop().run_until_complete(ServiceReady._run())
try:
asyncio.get_event_loop().run_forever()
except Exception as e:
print(e)
finally:
if tcp_server:
tcp_server.close()
asyncio.get_event_loop().run_until_complete(tcp_server.wait_closed())
if http_server:
http_server.close()
asyncio.get_event_loop().run_until_complete(http_server.wait_closed())
asyncio.get_event_loop().close()
@classmethod
def _start_pubsub(cls):
if not cls.ronin:
if cls._publisher:
asyncio.get_event_loop().run_until_complete(cls._publisher.create_pubsub_handler())
for subscriber in cls._subscribers:
asyncio.get_event_loop().run_until_complete(subscriber.create_pubsub_handler())
asyncio.async(subscriber.register_for_subscription())
@classmethod
def _set_bus(cls, service):
registry_client = RegistryClient(asyncio.get_event_loop(), cls.registry_host, cls.registry_port)
tcp_bus = TCPBus(registry_client)
registry_client.conn_handler = tcp_bus
# pubsub_bus = PubSubBus(cls.pubsub_host, cls.pubsub_port, registry_client) # , cls._tcp_service._ssl_context)
registry_client.bus = tcp_bus
if isinstance(service, TCPService):
tcp_bus.tcp_host = service
if isinstance(service, HTTPService):
tcp_bus.http_host = service
service.tcp_bus = tcp_bus
# service.pubsub_bus = pubsub_bus
@classmethod
def _setup_logging(cls):
identifier = '{}'.format(cls.service_name)
setup_logging(identifier)
if cls._smtp_handler:
logger = logging.getLogger()
logger.addHandler(cls._smtp_handler)
Stats.service_name = cls.service_name
Aggregator._service_name = cls.service_name
Aggregator.periodic_aggregated_stats_logger()
|
quikmile/trellio | trellio/host.py | Host.attach_tcp_service | python | def attach_tcp_service(cls, tcp_service: TCPService):
if cls._tcp_service is None:
cls._tcp_service = tcp_service
cls._set_bus(tcp_service)
else:
warnings.warn('TCP service is already attached') | Attaches a service for hosting
:param tcp_service: A TCPService instance | train | https://github.com/quikmile/trellio/blob/e8b050077562acf32805fcbb9c0c162248a23c62/trellio/host.py#L141-L149 | [
"def _set_bus(cls, service):\n registry_client = RegistryClient(asyncio.get_event_loop(), cls.registry_host, cls.registry_port)\n tcp_bus = TCPBus(registry_client)\n registry_client.conn_handler = tcp_bus\n # pubsub_bus = PubSubBus(cls.pubsub_host, cls.pubsub_port, registry_client) # , cls._tcp_service._ssl_context)\n registry_client.bus = tcp_bus\n if isinstance(service, TCPService):\n tcp_bus.tcp_host = service\n if isinstance(service, HTTPService):\n tcp_bus.http_host = service\n service.tcp_bus = tcp_bus\n"
] | class Host:
"""Serves as a static entry point and provides the boilerplate required to host and run a trellio Service.
Example::
Host.configure('SampleService')
Host.attachService(SampleHTTPService())
Host.run()
"""
registry_host = None
registry_port = None
pubsub_host = None
pubsub_port = None
host_name = None
service_name = None
http_host = None
http_port = None
tcp_host = None
tcp_port = None
ssl_context = None
ronin = False # If true, the trellio service runs solo without a registry
_host_id = None
_tcp_service = None
_http_service = None
_publisher = None
_subscribers = []
_tcp_views = []
_http_views = []
_logger = logging.getLogger(__name__)
_smtp_handler = None
@classmethod
def configure(cls, host_name: str = '', service_name: str = '', service_version='',
http_host: str = '127.0.0.1', http_port: int = 8000,
tcp_host: str = '127.0.0.1', tcp_port: int = 8001, ssl_context=None,
registry_host: str = "0.0.0.0", registry_port: int = 4500,
pubsub_host: str = "0.0.0.0", pubsub_port: int = 6379, ronin: bool = False):
""" A convenience method for providing registry and pubsub(redis) endpoints
:param host_name: Used for process name
:param registry_host: IP Address for trellio-registry; default = 0.0.0.0
:param registry_port: Port for trellio-registry; default = 4500
:param pubsub_host: IP Address for pubsub component, usually redis; default = 0.0.0.0
:param pubsub_port: Port for pubsub component; default= 6379
:return: None
"""
Host.host_name = host_name
Host.service_name = service_name
Host.service_version = str(service_version)
Host.http_host = http_host
Host.http_port = http_port
Host.tcp_host = tcp_host
Host.tcp_port = tcp_port
Host.registry_host = registry_host
Host.registry_port = registry_port
Host.pubsub_host = pubsub_host
Host.pubsub_port = pubsub_port
Host.ssl_context = ssl_context
Host.ronin = ronin
@classmethod
def get_http_service(cls):
return cls._http_service
@classmethod
def get_tcp_service(cls):
return cls._tcp_service
@classmethod
def get_tcp_clients(cls):
tcp_service = cls.get_tcp_service()
if tcp_service:
return tcp_service.clients
@classmethod
def get_publisher(cls):
return cls._publisher
@classmethod
def get_subscribers(cls):
return cls._subscribers
@classmethod
def get_tcp_views(cls):
return cls._tcp_views
@classmethod
def get_http_views(cls):
return cls._http_views
@classmethod
@deprecated
def attach_service(cls, service):
""" Allows you to attach one TCP and one HTTP service
deprecated:: 2.1.73 use http and tcp specific methods
:param service: A trellio TCP or HTTP service that needs to be hosted
"""
if isinstance(service, HTTPService):
cls._http_service = service
elif isinstance(service, TCPService):
cls._tcp_service = service
else:
cls._logger.error('Invalid argument attached as service')
cls._set_bus(service)
@classmethod
def attach_http_service(cls, http_service: HTTPService):
""" Attaches a service for hosting
:param http_service: A HTTPService instance
"""
if cls._http_service is None:
cls._http_service = http_service
cls._set_bus(http_service)
else:
warnings.warn('HTTP service is already attached')
@classmethod
@classmethod
def attach_http_views(cls, http_views: list):
views_instances = []
for view_class in http_views:
instance = view_class()
instance.host = Host
views_instances.append(instance)
cls._http_views.extend(views_instances)
@classmethod
def attach_tcp_views(cls, tcp_views: list):
views_instances = []
for view_class in tcp_views:
instance = view_class()
instance.host = Host
views_instances.append(instance)
cls._tcp_views.extend(views_instances)
@classmethod
def attach_publisher(cls, publisher: Publisher):
if cls._publisher is None:
cls._publisher = publisher
else:
warnings.warn('Publisher is already attached')
@classmethod
def attach_subscribers(cls, subscribers: list):
if all([isinstance(subscriber, Subscriber) for subscriber in subscribers]):
if not cls._subscribers:
cls._subscribers = subscribers
else:
warnings.warn('Subscribers are already attached')
@classmethod
def run(cls):
""" Fires up the event loop and starts serving attached services
"""
if cls._tcp_service or cls._http_service or cls._http_views or cls._tcp_views:
cls._set_host_id()
cls._setup_logging()
cls._set_process_name()
cls._set_signal_handlers()
cls._start_pubsub()
cls._start_server()
else:
cls._logger.error('No services to host')
@classmethod
def _set_process_name(cls):
from setproctitle import setproctitle
setproctitle('trellio_{}_{}'.format(cls.host_name, cls._host_id))
@classmethod
def _stop(cls, signame: str):
cls._logger.info('\ngot signal {} - exiting'.format(signame))
asyncio.get_event_loop().stop()
@classmethod
def _set_signal_handlers(cls):
asyncio.get_event_loop().add_signal_handler(getattr(signal, 'SIGINT'), partial(cls._stop, 'SIGINT'))
asyncio.get_event_loop().add_signal_handler(getattr(signal, 'SIGTERM'), partial(cls._stop, 'SIGTERM'))
@classmethod
def _create_tcp_server(cls):
if cls._tcp_service:
ssl_context = cls._tcp_service.ssl_context
host_ip, host_port = cls._tcp_service.socket_address
task = asyncio.get_event_loop().create_server(partial(get_trellio_protocol, cls._tcp_service.tcp_bus),
host_ip, host_port, ssl=ssl_context)
result = asyncio.get_event_loop().run_until_complete(task)
return result
@classmethod
def _create_http_server(cls):
if cls._http_service or cls._http_views:
host_ip, host_port = cls.http_host, cls.http_port
ssl_context = cls.ssl_context
handler = cls._make_aiohttp_handler()
task = asyncio.get_event_loop().create_server(handler, host_ip, host_port, ssl=ssl_context)
return asyncio.get_event_loop().run_until_complete(task)
@classmethod
def _make_aiohttp_handler(cls):
app = Application(loop=asyncio.get_event_loop())
if cls._http_service:
for each in cls._http_service.__ordered__:
# iterate all attributes in the service looking for http endpoints and add them
fn = getattr(cls._http_service, each)
if callable(fn) and getattr(fn, 'is_http_method', False):
for path in fn.paths:
app.router.add_route(fn.method, path, fn)
if cls._http_service.cross_domain_allowed:
# add an 'options' for this specific path to make it CORS friendly
app.router.add_route('options', path, cls._http_service.preflight_response)
for view in cls._http_views:
for each in view.__ordered__:
fn = getattr(view, each)
if callable(fn) and getattr(fn, 'is_http_method', False):
for path in fn.paths:
app.router.add_route(fn.method, path, fn)
if view.cross_domain_allowed:
# add an 'options' for this specific path to make it CORS friendly
app.router.add_route('options', path, view.preflight_response)
handler = app.make_handler(access_log=cls._logger)
return handler
@classmethod
def _set_host_id(cls):
from uuid import uuid4
cls._host_id = uuid4()
@classmethod
def _start_server(cls):
tcp_server = cls._create_tcp_server()
http_server = cls._create_http_server()
if not cls.ronin:
if cls._tcp_service:
asyncio.get_event_loop().run_until_complete(cls._tcp_service.tcp_bus.connect())
# if cls._http_service:
# asyncio.get_event_loop().run_until_complete(cls._http_service.tcp_bus.connect())
if tcp_server:
cls._logger.info('Serving TCP on {}'.format(tcp_server.sockets[0].getsockname()))
if http_server:
cls._logger.info('Serving HTTP on {}'.format(http_server.sockets[0].getsockname()))
cls._logger.info("Event loop running forever, press CTRL+C to interrupt.")
cls._logger.info("pid %s: send SIGINT or SIGTERM to exit." % os.getpid())
cls._logger.info("Triggering ServiceReady signal")
asyncio.get_event_loop().run_until_complete(ServiceReady._run())
try:
asyncio.get_event_loop().run_forever()
except Exception as e:
print(e)
finally:
if tcp_server:
tcp_server.close()
asyncio.get_event_loop().run_until_complete(tcp_server.wait_closed())
if http_server:
http_server.close()
asyncio.get_event_loop().run_until_complete(http_server.wait_closed())
asyncio.get_event_loop().close()
@classmethod
def _start_pubsub(cls):
if not cls.ronin:
if cls._publisher:
asyncio.get_event_loop().run_until_complete(cls._publisher.create_pubsub_handler())
for subscriber in cls._subscribers:
asyncio.get_event_loop().run_until_complete(subscriber.create_pubsub_handler())
asyncio.async(subscriber.register_for_subscription())
@classmethod
def _set_bus(cls, service):
registry_client = RegistryClient(asyncio.get_event_loop(), cls.registry_host, cls.registry_port)
tcp_bus = TCPBus(registry_client)
registry_client.conn_handler = tcp_bus
# pubsub_bus = PubSubBus(cls.pubsub_host, cls.pubsub_port, registry_client) # , cls._tcp_service._ssl_context)
registry_client.bus = tcp_bus
if isinstance(service, TCPService):
tcp_bus.tcp_host = service
if isinstance(service, HTTPService):
tcp_bus.http_host = service
service.tcp_bus = tcp_bus
# service.pubsub_bus = pubsub_bus
@classmethod
def _setup_logging(cls):
identifier = '{}'.format(cls.service_name)
setup_logging(identifier)
if cls._smtp_handler:
logger = logging.getLogger()
logger.addHandler(cls._smtp_handler)
Stats.service_name = cls.service_name
Aggregator._service_name = cls.service_name
Aggregator.periodic_aggregated_stats_logger()
|
quikmile/trellio | trellio/host.py | Host.run | python | def run(cls):
if cls._tcp_service or cls._http_service or cls._http_views or cls._tcp_views:
cls._set_host_id()
cls._setup_logging()
cls._set_process_name()
cls._set_signal_handlers()
cls._start_pubsub()
cls._start_server()
else:
cls._logger.error('No services to host') | Fires up the event loop and starts serving attached services | train | https://github.com/quikmile/trellio/blob/e8b050077562acf32805fcbb9c0c162248a23c62/trellio/host.py#L185-L196 | [
"def _set_process_name(cls):\n from setproctitle import setproctitle\n setproctitle('trellio_{}_{}'.format(cls.host_name, cls._host_id))\n",
"def _set_signal_handlers(cls):\n asyncio.get_event_loop().add_signal_handler(getattr(signal, 'SIGINT'), partial(cls._stop, 'SIGINT'))\n asyncio.get_event_loop().add_signal_handler(getattr(signal, 'SIGTERM'), partial(cls._stop, 'SIGTERM'))\n",
"def _set_host_id(cls):\n from uuid import uuid4\n cls._host_id = uuid4()\n",
"def _start_server(cls):\n tcp_server = cls._create_tcp_server()\n http_server = cls._create_http_server()\n if not cls.ronin:\n if cls._tcp_service:\n asyncio.get_event_loop().run_until_complete(cls._tcp_service.tcp_bus.connect())\n # if cls._http_service:\n # asyncio.get_event_loop().run_until_complete(cls._http_service.tcp_bus.connect())\n if tcp_server:\n cls._logger.info('Serving TCP on {}'.format(tcp_server.sockets[0].getsockname()))\n if http_server:\n cls._logger.info('Serving HTTP on {}'.format(http_server.sockets[0].getsockname()))\n cls._logger.info(\"Event loop running forever, press CTRL+C to interrupt.\")\n cls._logger.info(\"pid %s: send SIGINT or SIGTERM to exit.\" % os.getpid())\n cls._logger.info(\"Triggering ServiceReady signal\")\n asyncio.get_event_loop().run_until_complete(ServiceReady._run())\n try:\n asyncio.get_event_loop().run_forever()\n except Exception as e:\n print(e)\n finally:\n if tcp_server:\n tcp_server.close()\n asyncio.get_event_loop().run_until_complete(tcp_server.wait_closed())\n\n if http_server:\n http_server.close()\n asyncio.get_event_loop().run_until_complete(http_server.wait_closed())\n\n asyncio.get_event_loop().close()\n",
"def _start_pubsub(cls):\n if not cls.ronin:\n if cls._publisher:\n asyncio.get_event_loop().run_until_complete(cls._publisher.create_pubsub_handler())\n\n for subscriber in cls._subscribers:\n asyncio.get_event_loop().run_until_complete(subscriber.create_pubsub_handler())\n asyncio.async(subscriber.register_for_subscription())\n",
"def _setup_logging(cls):\n identifier = '{}'.format(cls.service_name)\n setup_logging(identifier)\n if cls._smtp_handler:\n logger = logging.getLogger()\n logger.addHandler(cls._smtp_handler)\n Stats.service_name = cls.service_name\n Aggregator._service_name = cls.service_name\n Aggregator.periodic_aggregated_stats_logger()\n"
] | class Host:
"""Serves as a static entry point and provides the boilerplate required to host and run a trellio Service.
Example::
Host.configure('SampleService')
Host.attachService(SampleHTTPService())
Host.run()
"""
registry_host = None
registry_port = None
pubsub_host = None
pubsub_port = None
host_name = None
service_name = None
http_host = None
http_port = None
tcp_host = None
tcp_port = None
ssl_context = None
ronin = False # If true, the trellio service runs solo without a registry
_host_id = None
_tcp_service = None
_http_service = None
_publisher = None
_subscribers = []
_tcp_views = []
_http_views = []
_logger = logging.getLogger(__name__)
_smtp_handler = None
@classmethod
def configure(cls, host_name: str = '', service_name: str = '', service_version='',
http_host: str = '127.0.0.1', http_port: int = 8000,
tcp_host: str = '127.0.0.1', tcp_port: int = 8001, ssl_context=None,
registry_host: str = "0.0.0.0", registry_port: int = 4500,
pubsub_host: str = "0.0.0.0", pubsub_port: int = 6379, ronin: bool = False):
""" A convenience method for providing registry and pubsub(redis) endpoints
:param host_name: Used for process name
:param registry_host: IP Address for trellio-registry; default = 0.0.0.0
:param registry_port: Port for trellio-registry; default = 4500
:param pubsub_host: IP Address for pubsub component, usually redis; default = 0.0.0.0
:param pubsub_port: Port for pubsub component; default= 6379
:return: None
"""
Host.host_name = host_name
Host.service_name = service_name
Host.service_version = str(service_version)
Host.http_host = http_host
Host.http_port = http_port
Host.tcp_host = tcp_host
Host.tcp_port = tcp_port
Host.registry_host = registry_host
Host.registry_port = registry_port
Host.pubsub_host = pubsub_host
Host.pubsub_port = pubsub_port
Host.ssl_context = ssl_context
Host.ronin = ronin
@classmethod
def get_http_service(cls):
return cls._http_service
@classmethod
def get_tcp_service(cls):
return cls._tcp_service
@classmethod
def get_tcp_clients(cls):
tcp_service = cls.get_tcp_service()
if tcp_service:
return tcp_service.clients
@classmethod
def get_publisher(cls):
return cls._publisher
@classmethod
def get_subscribers(cls):
return cls._subscribers
@classmethod
def get_tcp_views(cls):
return cls._tcp_views
@classmethod
def get_http_views(cls):
return cls._http_views
@classmethod
@deprecated
def attach_service(cls, service):
""" Allows you to attach one TCP and one HTTP service
deprecated:: 2.1.73 use http and tcp specific methods
:param service: A trellio TCP or HTTP service that needs to be hosted
"""
if isinstance(service, HTTPService):
cls._http_service = service
elif isinstance(service, TCPService):
cls._tcp_service = service
else:
cls._logger.error('Invalid argument attached as service')
cls._set_bus(service)
@classmethod
def attach_http_service(cls, http_service: HTTPService):
""" Attaches a service for hosting
:param http_service: A HTTPService instance
"""
if cls._http_service is None:
cls._http_service = http_service
cls._set_bus(http_service)
else:
warnings.warn('HTTP service is already attached')
@classmethod
def attach_tcp_service(cls, tcp_service: TCPService):
""" Attaches a service for hosting
:param tcp_service: A TCPService instance
"""
if cls._tcp_service is None:
cls._tcp_service = tcp_service
cls._set_bus(tcp_service)
else:
warnings.warn('TCP service is already attached')
@classmethod
def attach_http_views(cls, http_views: list):
views_instances = []
for view_class in http_views:
instance = view_class()
instance.host = Host
views_instances.append(instance)
cls._http_views.extend(views_instances)
@classmethod
def attach_tcp_views(cls, tcp_views: list):
views_instances = []
for view_class in tcp_views:
instance = view_class()
instance.host = Host
views_instances.append(instance)
cls._tcp_views.extend(views_instances)
@classmethod
def attach_publisher(cls, publisher: Publisher):
if cls._publisher is None:
cls._publisher = publisher
else:
warnings.warn('Publisher is already attached')
@classmethod
def attach_subscribers(cls, subscribers: list):
if all([isinstance(subscriber, Subscriber) for subscriber in subscribers]):
if not cls._subscribers:
cls._subscribers = subscribers
else:
warnings.warn('Subscribers are already attached')
@classmethod
@classmethod
def _set_process_name(cls):
from setproctitle import setproctitle
setproctitle('trellio_{}_{}'.format(cls.host_name, cls._host_id))
@classmethod
def _stop(cls, signame: str):
cls._logger.info('\ngot signal {} - exiting'.format(signame))
asyncio.get_event_loop().stop()
@classmethod
def _set_signal_handlers(cls):
asyncio.get_event_loop().add_signal_handler(getattr(signal, 'SIGINT'), partial(cls._stop, 'SIGINT'))
asyncio.get_event_loop().add_signal_handler(getattr(signal, 'SIGTERM'), partial(cls._stop, 'SIGTERM'))
@classmethod
def _create_tcp_server(cls):
if cls._tcp_service:
ssl_context = cls._tcp_service.ssl_context
host_ip, host_port = cls._tcp_service.socket_address
task = asyncio.get_event_loop().create_server(partial(get_trellio_protocol, cls._tcp_service.tcp_bus),
host_ip, host_port, ssl=ssl_context)
result = asyncio.get_event_loop().run_until_complete(task)
return result
@classmethod
def _create_http_server(cls):
if cls._http_service or cls._http_views:
host_ip, host_port = cls.http_host, cls.http_port
ssl_context = cls.ssl_context
handler = cls._make_aiohttp_handler()
task = asyncio.get_event_loop().create_server(handler, host_ip, host_port, ssl=ssl_context)
return asyncio.get_event_loop().run_until_complete(task)
@classmethod
def _make_aiohttp_handler(cls):
app = Application(loop=asyncio.get_event_loop())
if cls._http_service:
for each in cls._http_service.__ordered__:
# iterate all attributes in the service looking for http endpoints and add them
fn = getattr(cls._http_service, each)
if callable(fn) and getattr(fn, 'is_http_method', False):
for path in fn.paths:
app.router.add_route(fn.method, path, fn)
if cls._http_service.cross_domain_allowed:
# add an 'options' for this specific path to make it CORS friendly
app.router.add_route('options', path, cls._http_service.preflight_response)
for view in cls._http_views:
for each in view.__ordered__:
fn = getattr(view, each)
if callable(fn) and getattr(fn, 'is_http_method', False):
for path in fn.paths:
app.router.add_route(fn.method, path, fn)
if view.cross_domain_allowed:
# add an 'options' for this specific path to make it CORS friendly
app.router.add_route('options', path, view.preflight_response)
handler = app.make_handler(access_log=cls._logger)
return handler
@classmethod
def _set_host_id(cls):
from uuid import uuid4
cls._host_id = uuid4()
@classmethod
def _start_server(cls):
tcp_server = cls._create_tcp_server()
http_server = cls._create_http_server()
if not cls.ronin:
if cls._tcp_service:
asyncio.get_event_loop().run_until_complete(cls._tcp_service.tcp_bus.connect())
# if cls._http_service:
# asyncio.get_event_loop().run_until_complete(cls._http_service.tcp_bus.connect())
if tcp_server:
cls._logger.info('Serving TCP on {}'.format(tcp_server.sockets[0].getsockname()))
if http_server:
cls._logger.info('Serving HTTP on {}'.format(http_server.sockets[0].getsockname()))
cls._logger.info("Event loop running forever, press CTRL+C to interrupt.")
cls._logger.info("pid %s: send SIGINT or SIGTERM to exit." % os.getpid())
cls._logger.info("Triggering ServiceReady signal")
asyncio.get_event_loop().run_until_complete(ServiceReady._run())
try:
asyncio.get_event_loop().run_forever()
except Exception as e:
print(e)
finally:
if tcp_server:
tcp_server.close()
asyncio.get_event_loop().run_until_complete(tcp_server.wait_closed())
if http_server:
http_server.close()
asyncio.get_event_loop().run_until_complete(http_server.wait_closed())
asyncio.get_event_loop().close()
@classmethod
def _start_pubsub(cls):
if not cls.ronin:
if cls._publisher:
asyncio.get_event_loop().run_until_complete(cls._publisher.create_pubsub_handler())
for subscriber in cls._subscribers:
asyncio.get_event_loop().run_until_complete(subscriber.create_pubsub_handler())
asyncio.async(subscriber.register_for_subscription())
@classmethod
def _set_bus(cls, service):
registry_client = RegistryClient(asyncio.get_event_loop(), cls.registry_host, cls.registry_port)
tcp_bus = TCPBus(registry_client)
registry_client.conn_handler = tcp_bus
# pubsub_bus = PubSubBus(cls.pubsub_host, cls.pubsub_port, registry_client) # , cls._tcp_service._ssl_context)
registry_client.bus = tcp_bus
if isinstance(service, TCPService):
tcp_bus.tcp_host = service
if isinstance(service, HTTPService):
tcp_bus.http_host = service
service.tcp_bus = tcp_bus
# service.pubsub_bus = pubsub_bus
@classmethod
def _setup_logging(cls):
identifier = '{}'.format(cls.service_name)
setup_logging(identifier)
if cls._smtp_handler:
logger = logging.getLogger()
logger.addHandler(cls._smtp_handler)
Stats.service_name = cls.service_name
Aggregator._service_name = cls.service_name
Aggregator.periodic_aggregated_stats_logger()
|
quikmile/trellio | trellio/registry.py | registry_dump_handle | python | async def registry_dump_handle(request):
'''
only read
:param request:
:return:
'''
registry = registry_dump_handle.registry
response_dict = {}
repo = registry._repository
response_dict['registered_services'] = repo._registered_services
response_dict['uptimes'] = repo._uptimes
response_dict['service_dependencies'] = repo._service_dependencies
return web.Response(status=400, content_type='application/json', body=json.dumps(response_dict).encode()) | only read
:param request:
:return: | train | https://github.com/quikmile/trellio/blob/e8b050077562acf32805fcbb9c0c162248a23c62/trellio/registry.py#L398-L410 | null | import asyncio
import json
import logging
import signal
import ssl
import time
from collections import defaultdict, namedtuple
from functools import partial
from again.utils import natural_sort
from aiohttp import web
from .packet import ControlPacket
from .pinger import TCPPinger
from .protocol_factory import get_trellio_protocol
from .utils.log import setup_logging
import os
Service = namedtuple('Service', ['name', 'version', 'dependencies', 'host', 'port', 'node_id', 'type'])
def tree():
return defaultdict(tree)
def json_file_to_dict(_file: str) -> dict:
config = None
with open(_file) as config_file:
config = json.load(config_file)
return config
class Repository:
def __init__(self):
self._registered_services = defaultdict(lambda: defaultdict(list))
self._pending_services = defaultdict(list)
self._service_dependencies = {}
self._subscribe_list = defaultdict(lambda: defaultdict(lambda: defaultdict(list)))
self._uptimes = tree()
self.logger = logging.getLogger(__name__)
def register_service(self, service: Service):
service_name = self._get_full_service_name(service.name, service.version)
service_entry = (service.host, service.port, service.node_id, service.type)
self._registered_services[service.name][service.version].append(service_entry)
# in future there can be multiple nodes for same service, for load balancing purposes
self._pending_services[service_name].append(service.node_id)
self._uptimes[service_name][service.host] = {
'uptime': int(time.time()),
'node_id': service.node_id
}
if len(service.dependencies):
if not self._service_dependencies.get(service.name):
self._service_dependencies[service_name] = service.dependencies
def is_pending(self, name, version):
return self._get_full_service_name(name, version) in self._pending_services
def add_pending_service(self, name, version, node_id):
self._pending_services[self._get_full_service_name(name, version)].append(node_id)
def get_pending_services(self):
return [self._split_key(k) for k in self._pending_services.keys()]
def get_pending_instances(self, name, version):
return self._pending_services.get(self._get_full_service_name(name, version), [])
def remove_pending_instance(self, name, version, node_id):
self.get_pending_instances(name, version).remove(node_id)
if not len(self.get_pending_instances(name, version)):
self._pending_services.pop(self._get_full_service_name(name, version))
def get_instances(self, name, version):
return self._registered_services[name][version]
def get_versioned_instances(self, name, version):
version = self._get_non_breaking_version(version, list(self._registered_services[name].keys()))
return self._registered_services[name][version]
def get_consumers(self, name, service_version):
consumers = set()
for _name, dependencies in self._service_dependencies.items():
for dependency in dependencies:
if dependency['name'] == name and dependency['version'] == service_version:
consumers.add(self._split_key(_name))
return consumers
def get_dependencies(self, name, version):
return self._service_dependencies.get(self._get_full_service_name(name, version), [])
def get_node(self, node_id):
for name, versions in self._registered_services.items():
for version, instances in versions.items():
for host, port, node, service_type in instances:
if node_id == node:
return Service(name, version, [], host, port, node, service_type)
return None
def remove_node(self, node_id):
thehost = None
for name, versions in self._registered_services.items():
for version, instances in versions.items():
for instance in instances:
host, port, node, service_type = instance
if node_id == node:
thehost = host
instances.remove(instance)
break
for name, nodes in self._uptimes.items():
for host, uptimes in nodes.items():
if host == thehost and uptimes['node_id'] == node_id:
uptimes['downtime'] = int(time.time())
self.log_uptimes()
return None
def get_uptimes(self):
return self._uptimes
def log_uptimes(self):
for name, nodes in self._uptimes.items():
for host, d in nodes.items():
now = int(time.time())
live = d.get('downtime', 0) < d['uptime']
uptime = now - d['uptime'] if live else 0
logd = {'service_name': name.split('/')[0], 'hostname': host, 'status': live,
'uptime': int(uptime)}
logging.getLogger('stats').info(logd)
def xsubscribe(self, name, version, host, port, node_id, endpoints):
entry = (name, version, host, port, node_id)
for endpoint in endpoints:
self._subscribe_list[endpoint['name']][endpoint['version']][endpoint['endpoint']].append(
entry + (endpoint['strategy'],))
def get_subscribers(self, name, version, endpoint):
return self._subscribe_list[name][version][endpoint]
def _get_non_breaking_version(self, version, versions):
if version in versions:
return version
versions.sort(key=natural_sort, reverse=True)
for v in versions:
if self._is_non_breaking(v, version):
return v
return version
@staticmethod
def _is_non_breaking(v, version):
return version.split('.')[0] == v.split('.')[0]
@staticmethod
def _get_full_service_name(name: str, version):
return '{}/{}'.format(name, version)
@staticmethod
def _split_key(key: str):
return tuple(key.split('/'))
class Registry:
def __init__(self, ip, port, repository: Repository):
self._ip = ip
self._port = port
self._loop = asyncio.get_event_loop()
self._client_protocols = {}
self._service_protocols = {}
self._repository = repository
self._tcp_pingers = {}
self._http_pingers = {}
self.logger = logging.getLogger()
try:
config = json_file_to_dict('./config.json')
self._ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
self._ssl_context.load_cert_chain(config['SSL_CERTIFICATE'], config['SSL_KEY'])
except:
self._ssl_context = None
def _create_http_app(self):
app = web.Application()
registry_dump_handle.registry = self
app.router.add_get('/registry/', registry_dump_handle)
handler = app.make_handler(access_log=self.logger)
task = asyncio.get_event_loop().create_server(handler, self._ip, os.environ.get('TRELLIO_HTTP_PORT', 4501))
http_server = asyncio.get_event_loop().run_until_complete(task)
return http_server
def start(self):
setup_logging("registry")
self._loop.add_signal_handler(getattr(signal, 'SIGINT'), partial(self._stop, 'SIGINT'))
self._loop.add_signal_handler(getattr(signal, 'SIGTERM'), partial(self._stop, 'SIGTERM'))
registry_coroutine = self._loop.create_server(
partial(get_trellio_protocol, self), self._ip, self._port, ssl=self._ssl_context)
server = self._loop.run_until_complete(registry_coroutine)
http_server = self._create_http_app()
try:
self._loop.run_forever()
except Exception as e:
print(e)
finally:
server.close()
http_server.close()
self._loop.run_until_complete(server.wait_closed())
self._loop.close()
def _stop(self, signame: str):
print('\ngot signal {} - exiting'.format(signame))
self._loop.stop()
def receive(self, packet: dict, protocol, transport):
request_type = packet['type']
if request_type in ['register', 'get_instances', 'xsubscribe', 'get_subscribers']:
for_log = {}
params = packet['params']
for_log["caller_name"] = params['name'] + '/' + params['version']
for_log["caller_address"] = transport.get_extra_info("peername")[0]
for_log["request_type"] = request_type
self.logger.debug(for_log)
if request_type == 'register':
packet['params']['host'] = transport.get_extra_info("peername")[0]
self.register_service(packet, protocol)
elif request_type == 'get_instances':
self.get_service_instances(packet, protocol)
elif request_type == 'xsubscribe':
self._xsubscribe(packet)
elif request_type == 'get_subscribers':
self.get_subscribers(packet, protocol)
elif request_type == 'pong':
self._ping(packet)
elif request_type == 'ping':
self._handle_ping(packet, protocol)
elif request_type == 'uptime_report':
self._get_uptime_report(packet, protocol)
def deregister_service(self, host, port, node_id):
service = self._repository.get_node(node_id)
self._tcp_pingers.pop(node_id, None)
self._http_pingers.pop((host, port), None)
if service:
for_log = {"caller_name": service.name + '/' + service.version, "caller_address": service.host,
"request_type": 'deregister'}
self.logger.debug(for_log)
self._repository.remove_node(node_id)
if service is not None:
self._service_protocols.pop(node_id, None)
self._client_protocols.pop(node_id, None)
self._notify_consumers(service.name, service.version, node_id)
if not len(self._repository.get_instances(service.name, service.version)):
consumers = self._repository.get_consumers(service.name, service.version)
for consumer_name, consumer_version in consumers:
for _, _, node_id, _ in self._repository.get_instances(consumer_name, consumer_version):
self._repository.add_pending_service(consumer_name, consumer_version, node_id)
def register_service(self, packet: dict, registry_protocol):
params = packet['params']
service = Service(params['name'], params['version'], params['dependencies'], params['host'], params['port'],
params['node_id'], params['type'])
self._repository.register_service(service)
self._client_protocols[params['node_id']] = registry_protocol
if params['node_id'] not in self._service_protocols.keys():
self._connect_to_service(params['host'], params['port'], params['node_id'], params['type'])
self._handle_pending_registrations()
self._inform_consumers(service)
def _inform_consumers(self, service: Service):
consumers = self._repository.get_consumers(service.name, service.version)
for service_name, service_version in consumers:
if not self._repository.is_pending(service_name, service_version):
instances = self._repository.get_instances(service_name, service_version)
for host, port, node, type in instances:
protocol = self._client_protocols[node]
protocol.send(ControlPacket.new_instance(
service.name, service.version, service.host, service.port, service.node_id, service.type))
def _send_activated_packet(self, name, version, node):
protocol = self._client_protocols.get(node, None)
if protocol:
packet = self._make_activated_packet(name, version)
protocol.send(packet)
def _handle_pending_registrations(self):
for name, version in self._repository.get_pending_services():
dependencies = self._repository.get_dependencies(name, version) # list
should_activate = True
for dependency in dependencies:
instances = self._repository.get_versioned_instances(dependency['name'], dependency['version']) # list
tcp_instances = [instance for instance in instances if instance[3] == 'tcp']
if not len(
tcp_instances): # means the dependency doesn't have an activated tcp service, so registration
# pending
should_activate = False
break
for node in self._repository.get_pending_instances(name, version): # node is node id
if should_activate:
self._send_activated_packet(name, version, node)
self._repository.remove_pending_instance(name, version, node)
self.logger.info('%s activated', (name, version))
else:
self.logger.info('%s can\'t register because it depends on %s', (name, version), dependency)
def _make_activated_packet(self, name, version):
dependencies = self._repository.get_dependencies(name, version)
instances = {
(dependency['name'], dependency['version']): self._repository.get_versioned_instances(dependency['name'],
dependency['version'])
for dependency in dependencies}
return ControlPacket.activated(instances)
def _connect_to_service(self, host, port, node_id, service_type):
if service_type == 'tcp':
if node_id not in self._service_protocols:
coroutine = self._loop.create_connection(partial(get_trellio_protocol, self), host, port)
future = asyncio.ensure_future(coroutine)
future.add_done_callback(partial(self._handle_service_connection, node_id, host, port))
elif service_type == 'http':
pass
# if not (host, port) in self._http_pingers:
# pinger = HTTPPinger(host, port, node_id, self)
# self._http_pingers[(host, port)] = pinger
# pinger.ping()
def _handle_service_connection(self, node_id, host, port, future):
transport, protocol = future.result()
self._service_protocols[node_id] = protocol
pinger = TCPPinger(host, port, node_id, protocol, self)
self._tcp_pingers[node_id] = pinger
pinger.ping()
def _notify_consumers(self, name, version, node_id):
packet = ControlPacket.deregister(name, version, node_id)
for consumer_name, consumer_version in self._repository.get_consumers(name, version):
for host, port, node, service_type in self._repository.get_instances(consumer_name, consumer_version):
protocol = self._client_protocols[node]
protocol.send(packet)
def get_service_instances(self, packet, registry_protocol):
params = packet['params']
name, version = params['name'].lower(), params['version']
instances = self._repository.get_instances(name, version)
instance_packet = ControlPacket.send_instances(name, version, packet['request_id'], instances)
registry_protocol.send(instance_packet)
def get_subscribers(self, packet, protocol):
params = packet['params']
request_id = packet['request_id']
name, version, endpoint = params['name'].lower(), params['version'], params['endpoint']
subscribers = self._repository.get_subscribers(name, version, endpoint)
packet = ControlPacket.subscribers(name, version, endpoint, request_id, subscribers)
protocol.send(packet)
def on_timeout(self, host, port, node_id):
service = self._repository.get_node(node_id)
self.logger.debug('%s timed out', service)
self.deregister_service(host, port, node_id)
def _ping(self, packet):
pinger = self._tcp_pingers[packet['node_id']]
pinger.pong_received()
def _pong(self, packet, protocol):
protocol.send(ControlPacket.pong(packet['node_id']))
def _xsubscribe(self, packet):
params = packet['params']
name, version, host, port, node_id = (
params['name'], params['version'], params['host'], params['port'], params['node_id'])
endpoints = params['events']
self._repository.xsubscribe(name, version, host, port, node_id, endpoints)
def _get_uptime_report(self, packet, protocol):
uptimes = self._repository.get_uptimes()
protocol.send(ControlPacket.uptime(uptimes))
def periodic_uptime_logger(self):
self._repository.log_uptimes()
asyncio.get_event_loop().call_later(300, self.periodic_uptime_logger)
def _handle_ping(self, packet, protocol):
""" Responds to pings from registry_client only if the node_ids present in the ping payload are registered
:param packet: The 'ping' packet received
:param protocol: The protocol on which the pong should be sent
"""
if 'payload' in packet:
is_valid_node = True
node_ids = list(packet['payload'].values())
for node_id in node_ids:
if self._repository.get_node(node_id) is None:
is_valid_node = False
break
if is_valid_node:
self._pong(packet, protocol)
else:
self._pong(packet, protocol)
if __name__ == '__main__':
from setproctitle import setproctitle
setproctitle("trellio-registry")
REGISTRY_HOST = None
REGISTRY_PORT = 4500
registry = Registry(REGISTRY_HOST, REGISTRY_PORT, Repository())
registry.periodic_uptime_logger()
registry.start()
|
quikmile/trellio | trellio/registry.py | Registry._handle_ping | python | def _handle_ping(self, packet, protocol):
if 'payload' in packet:
is_valid_node = True
node_ids = list(packet['payload'].values())
for node_id in node_ids:
if self._repository.get_node(node_id) is None:
is_valid_node = False
break
if is_valid_node:
self._pong(packet, protocol)
else:
self._pong(packet, protocol) | Responds to pings from registry_client only if the node_ids present in the ping payload are registered
:param packet: The 'ping' packet received
:param protocol: The protocol on which the pong should be sent | train | https://github.com/quikmile/trellio/blob/e8b050077562acf32805fcbb9c0c162248a23c62/trellio/registry.py#L379-L395 | [
"def _pong(self, packet, protocol):\n protocol.send(ControlPacket.pong(packet['node_id']))\n"
] | class Registry:
def __init__(self, ip, port, repository: Repository):
self._ip = ip
self._port = port
self._loop = asyncio.get_event_loop()
self._client_protocols = {}
self._service_protocols = {}
self._repository = repository
self._tcp_pingers = {}
self._http_pingers = {}
self.logger = logging.getLogger()
try:
config = json_file_to_dict('./config.json')
self._ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
self._ssl_context.load_cert_chain(config['SSL_CERTIFICATE'], config['SSL_KEY'])
except:
self._ssl_context = None
def _create_http_app(self):
app = web.Application()
registry_dump_handle.registry = self
app.router.add_get('/registry/', registry_dump_handle)
handler = app.make_handler(access_log=self.logger)
task = asyncio.get_event_loop().create_server(handler, self._ip, os.environ.get('TRELLIO_HTTP_PORT', 4501))
http_server = asyncio.get_event_loop().run_until_complete(task)
return http_server
def start(self):
setup_logging("registry")
self._loop.add_signal_handler(getattr(signal, 'SIGINT'), partial(self._stop, 'SIGINT'))
self._loop.add_signal_handler(getattr(signal, 'SIGTERM'), partial(self._stop, 'SIGTERM'))
registry_coroutine = self._loop.create_server(
partial(get_trellio_protocol, self), self._ip, self._port, ssl=self._ssl_context)
server = self._loop.run_until_complete(registry_coroutine)
http_server = self._create_http_app()
try:
self._loop.run_forever()
except Exception as e:
print(e)
finally:
server.close()
http_server.close()
self._loop.run_until_complete(server.wait_closed())
self._loop.close()
def _stop(self, signame: str):
print('\ngot signal {} - exiting'.format(signame))
self._loop.stop()
def receive(self, packet: dict, protocol, transport):
request_type = packet['type']
if request_type in ['register', 'get_instances', 'xsubscribe', 'get_subscribers']:
for_log = {}
params = packet['params']
for_log["caller_name"] = params['name'] + '/' + params['version']
for_log["caller_address"] = transport.get_extra_info("peername")[0]
for_log["request_type"] = request_type
self.logger.debug(for_log)
if request_type == 'register':
packet['params']['host'] = transport.get_extra_info("peername")[0]
self.register_service(packet, protocol)
elif request_type == 'get_instances':
self.get_service_instances(packet, protocol)
elif request_type == 'xsubscribe':
self._xsubscribe(packet)
elif request_type == 'get_subscribers':
self.get_subscribers(packet, protocol)
elif request_type == 'pong':
self._ping(packet)
elif request_type == 'ping':
self._handle_ping(packet, protocol)
elif request_type == 'uptime_report':
self._get_uptime_report(packet, protocol)
def deregister_service(self, host, port, node_id):
service = self._repository.get_node(node_id)
self._tcp_pingers.pop(node_id, None)
self._http_pingers.pop((host, port), None)
if service:
for_log = {"caller_name": service.name + '/' + service.version, "caller_address": service.host,
"request_type": 'deregister'}
self.logger.debug(for_log)
self._repository.remove_node(node_id)
if service is not None:
self._service_protocols.pop(node_id, None)
self._client_protocols.pop(node_id, None)
self._notify_consumers(service.name, service.version, node_id)
if not len(self._repository.get_instances(service.name, service.version)):
consumers = self._repository.get_consumers(service.name, service.version)
for consumer_name, consumer_version in consumers:
for _, _, node_id, _ in self._repository.get_instances(consumer_name, consumer_version):
self._repository.add_pending_service(consumer_name, consumer_version, node_id)
def register_service(self, packet: dict, registry_protocol):
params = packet['params']
service = Service(params['name'], params['version'], params['dependencies'], params['host'], params['port'],
params['node_id'], params['type'])
self._repository.register_service(service)
self._client_protocols[params['node_id']] = registry_protocol
if params['node_id'] not in self._service_protocols.keys():
self._connect_to_service(params['host'], params['port'], params['node_id'], params['type'])
self._handle_pending_registrations()
self._inform_consumers(service)
def _inform_consumers(self, service: Service):
consumers = self._repository.get_consumers(service.name, service.version)
for service_name, service_version in consumers:
if not self._repository.is_pending(service_name, service_version):
instances = self._repository.get_instances(service_name, service_version)
for host, port, node, type in instances:
protocol = self._client_protocols[node]
protocol.send(ControlPacket.new_instance(
service.name, service.version, service.host, service.port, service.node_id, service.type))
def _send_activated_packet(self, name, version, node):
protocol = self._client_protocols.get(node, None)
if protocol:
packet = self._make_activated_packet(name, version)
protocol.send(packet)
def _handle_pending_registrations(self):
for name, version in self._repository.get_pending_services():
dependencies = self._repository.get_dependencies(name, version) # list
should_activate = True
for dependency in dependencies:
instances = self._repository.get_versioned_instances(dependency['name'], dependency['version']) # list
tcp_instances = [instance for instance in instances if instance[3] == 'tcp']
if not len(
tcp_instances): # means the dependency doesn't have an activated tcp service, so registration
# pending
should_activate = False
break
for node in self._repository.get_pending_instances(name, version): # node is node id
if should_activate:
self._send_activated_packet(name, version, node)
self._repository.remove_pending_instance(name, version, node)
self.logger.info('%s activated', (name, version))
else:
self.logger.info('%s can\'t register because it depends on %s', (name, version), dependency)
def _make_activated_packet(self, name, version):
dependencies = self._repository.get_dependencies(name, version)
instances = {
(dependency['name'], dependency['version']): self._repository.get_versioned_instances(dependency['name'],
dependency['version'])
for dependency in dependencies}
return ControlPacket.activated(instances)
def _connect_to_service(self, host, port, node_id, service_type):
if service_type == 'tcp':
if node_id not in self._service_protocols:
coroutine = self._loop.create_connection(partial(get_trellio_protocol, self), host, port)
future = asyncio.ensure_future(coroutine)
future.add_done_callback(partial(self._handle_service_connection, node_id, host, port))
elif service_type == 'http':
pass
# if not (host, port) in self._http_pingers:
# pinger = HTTPPinger(host, port, node_id, self)
# self._http_pingers[(host, port)] = pinger
# pinger.ping()
def _handle_service_connection(self, node_id, host, port, future):
transport, protocol = future.result()
self._service_protocols[node_id] = protocol
pinger = TCPPinger(host, port, node_id, protocol, self)
self._tcp_pingers[node_id] = pinger
pinger.ping()
def _notify_consumers(self, name, version, node_id):
packet = ControlPacket.deregister(name, version, node_id)
for consumer_name, consumer_version in self._repository.get_consumers(name, version):
for host, port, node, service_type in self._repository.get_instances(consumer_name, consumer_version):
protocol = self._client_protocols[node]
protocol.send(packet)
def get_service_instances(self, packet, registry_protocol):
params = packet['params']
name, version = params['name'].lower(), params['version']
instances = self._repository.get_instances(name, version)
instance_packet = ControlPacket.send_instances(name, version, packet['request_id'], instances)
registry_protocol.send(instance_packet)
def get_subscribers(self, packet, protocol):
params = packet['params']
request_id = packet['request_id']
name, version, endpoint = params['name'].lower(), params['version'], params['endpoint']
subscribers = self._repository.get_subscribers(name, version, endpoint)
packet = ControlPacket.subscribers(name, version, endpoint, request_id, subscribers)
protocol.send(packet)
def on_timeout(self, host, port, node_id):
service = self._repository.get_node(node_id)
self.logger.debug('%s timed out', service)
self.deregister_service(host, port, node_id)
def _ping(self, packet):
pinger = self._tcp_pingers[packet['node_id']]
pinger.pong_received()
def _pong(self, packet, protocol):
protocol.send(ControlPacket.pong(packet['node_id']))
def _xsubscribe(self, packet):
params = packet['params']
name, version, host, port, node_id = (
params['name'], params['version'], params['host'], params['port'], params['node_id'])
endpoints = params['events']
self._repository.xsubscribe(name, version, host, port, node_id, endpoints)
def _get_uptime_report(self, packet, protocol):
uptimes = self._repository.get_uptimes()
protocol.send(ControlPacket.uptime(uptimes))
def periodic_uptime_logger(self):
self._repository.log_uptimes()
asyncio.get_event_loop().call_later(300, self.periodic_uptime_logger)
|
quikmile/trellio | trellio/utils/log.py | log | python | def log(fn=None, logger=logging.getLogger(), debug_level=logging.DEBUG):
if fn is None:
return partial(log, logger=logger, debug_level=debug_level)
@wraps(fn)
def func(*args, **kwargs):
arg_string = ""
for i in range(0, len(args)):
var_name = fn.__code__.co_varnames[i]
if var_name not in ['self', 'cls']:
arg_string += var_name + ":" + str(args[i]) + ","
arg_string = arg_string[0:len(arg_string) - 1]
string = (RED + BOLD + '>> ' + END + 'Calling {0}({1})'.format(fn.__name__, arg_string))
if len(kwargs):
string = (
RED + BOLD + '>> ' + END + 'Calling {0} with args {1} and kwargs {2}'.format(fn.__name__, arg_string,
kwargs))
logger.log(debug_level, string)
wrapped_fn = fn
if not asyncio.iscoroutine(fn):
wrapped_fn = asyncio.coroutine(fn)
try:
result = yield from wrapped_fn(*args, **kwargs)
string = BLUE + BOLD + '<< ' + END + 'Return {0} with result :{1}'.format(fn.__name__, result)
logger.log(debug_level, string)
return result
except Exception as e:
string = (RED + BOLD + '>> ' + END + '{0} raised exception :{1}'.format(fn.__name__, str(e)))
logger.log(debug_level, string)
raise e
return func | logs parameters and result - takes no arguments | train | https://github.com/quikmile/trellio/blob/e8b050077562acf32805fcbb9c0c162248a23c62/trellio/utils/log.py#L143-L177 | null | import asyncio
import datetime
import logging
import logging.config
import sys
from functools import partial, wraps
from logging import Handler
from queue import Queue
from threading import Thread
import yaml
from pythonjsonlogger import jsonlogger
RED = '\033[91m'
BLUE = '\033[94m'
BOLD = '\033[1m'
END = '\033[0m'
class CustomTimeLoggingFormatter(logging.Formatter):
def formatTime(self, record, datefmt=None): # noqa
"""
Overrides formatTime method to use datetime module instead of time module
to display time in microseconds. Time module by default does not resolve
time to microseconds.
"""
if datefmt:
s = datetime.datetime.now().strftime(datefmt)
else:
t = datetime.datetime.now().strftime(self.default_time_format)
s = self.default_msec_format % (t, record.msecs)
return s
class CustomJsonFormatter(jsonlogger.JsonFormatter):
def __init__(self, *args, **kwargs):
self.extrad = kwargs.pop('extrad', {})
super().__init__(*args, **kwargs)
def add_fields(self, log_record, record, message_dict):
message_dict.update(self.extrad)
super().add_fields(log_record, record, message_dict)
def patch_async_emit(handler: Handler):
base_emit = handler.emit
queue = Queue()
def loop():
while True:
record = queue.get()
try:
base_emit(record)
except:
print(sys.exc_info())
def async_emit(record):
queue.put(record)
thread = Thread(target=loop)
thread.daemon = True
thread.start()
handler.emit = async_emit
return handler
def patch_add_handler(logger):
base_add_handler = logger.addHandler
def async_add_handler(handler):
async_handler = patch_async_emit(handler)
base_add_handler(async_handler)
return async_add_handler
DEFAULT_CONFIG_YAML = """
# logging config
version: 1
disable_existing_loggers: False
handlers:
stream:
class: logging.StreamHandler
level: INFO
formatter: ctf
stream: ext://sys.stdout
stats:
class: logging.StreamHandler
level: INFO
formatter: cjf
stream: ext://sys.stdout
service:
class: logging.StreamHandler
level: INFO
formatter: ctf
stream: ext://sys.stdout
formatters:
ctf:
(): trellio.utils.log.CustomTimeLoggingFormatter
format: '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
datefmt: '%Y-%m-%d %H:%M:%S,%f'
cjf:
(): trellio.utils.log.CustomJsonFormatter
format: '{ "timestamp":"%(asctime)s", "message":"%(message)s"}'
datefmt: '%Y-%m-%d %H:%M:%S,%f'
root:
handlers: [stream]
level: INFO
loggers:
registry:
handlers: [stream]
level: INFO
stats:
handlers: [stats]
level: INFO
"""
def setup_logging(_):
try:
with open('config_log.json', 'r') as f:
config_dict = yaml.load(f.read())
except:
config_dict = yaml.load(DEFAULT_CONFIG_YAML)
logging.getLogger('asyncio').setLevel(logging.DEBUG)
logger = logging.getLogger()
logger.propagate = False
logger.handlers = []
logger.addHandler = patch_add_handler(logger)
logging.config.dictConfig(config_dict)
def logx(supress_args=[], supress_all_args=False, supress_result=False, logger=logging.getLogger(),
debug_level=logging.DEBUG):
"""
logs parameters and result
takes arguments
supress_args - list of parameter names to supress
supress_all_args - boolean to supress all arguments
supress_result - boolean to supress result
receiver - custom logging function which takes a string as input; defaults to logging on stdout
"""
def decorator(fn):
def func(*args, **kwargs):
if not supress_all_args:
arg_string = ""
for i in range(0, len(args)):
var_name = fn.__code__.co_varnames[i]
if var_name != "self" and var_name not in supress_args:
arg_string += var_name + ":" + str(args[i]) + ","
arg_string = arg_string[0:len(arg_string) - 1]
string = (RED + BOLD + '>> ' + END + 'Calling {0}({1})'.format(fn.__name__, arg_string))
if len(kwargs):
string = (
RED + BOLD + '>> ' + END + 'Calling {0} with args {1} and kwargs {2}'.format(
fn.__name__,
arg_string, kwargs))
logger.log(debug_level, string)
wrapped_fn = fn
if not asyncio.iscoroutine(fn):
wrapped_fn = asyncio.coroutine(fn)
result = yield from wrapped_fn(*args, **kwargs)
if not supress_result:
string = BLUE + BOLD + '<< ' + END + 'Return {0} with result : {1}'.format(fn.__name__, result)
logger.log(debug_level, string)
return result
return func
return decorator
|
quikmile/trellio | trellio/utils/log.py | logx | python | def logx(supress_args=[], supress_all_args=False, supress_result=False, logger=logging.getLogger(),
debug_level=logging.DEBUG):
def decorator(fn):
def func(*args, **kwargs):
if not supress_all_args:
arg_string = ""
for i in range(0, len(args)):
var_name = fn.__code__.co_varnames[i]
if var_name != "self" and var_name not in supress_args:
arg_string += var_name + ":" + str(args[i]) + ","
arg_string = arg_string[0:len(arg_string) - 1]
string = (RED + BOLD + '>> ' + END + 'Calling {0}({1})'.format(fn.__name__, arg_string))
if len(kwargs):
string = (
RED + BOLD + '>> ' + END + 'Calling {0} with args {1} and kwargs {2}'.format(
fn.__name__,
arg_string, kwargs))
logger.log(debug_level, string)
wrapped_fn = fn
if not asyncio.iscoroutine(fn):
wrapped_fn = asyncio.coroutine(fn)
result = yield from wrapped_fn(*args, **kwargs)
if not supress_result:
string = BLUE + BOLD + '<< ' + END + 'Return {0} with result : {1}'.format(fn.__name__, result)
logger.log(debug_level, string)
return result
return func
return decorator | logs parameters and result
takes arguments
supress_args - list of parameter names to supress
supress_all_args - boolean to supress all arguments
supress_result - boolean to supress result
receiver - custom logging function which takes a string as input; defaults to logging on stdout | train | https://github.com/quikmile/trellio/blob/e8b050077562acf32805fcbb9c0c162248a23c62/trellio/utils/log.py#L180-L220 | null | import asyncio
import datetime
import logging
import logging.config
import sys
from functools import partial, wraps
from logging import Handler
from queue import Queue
from threading import Thread
import yaml
from pythonjsonlogger import jsonlogger
RED = '\033[91m'
BLUE = '\033[94m'
BOLD = '\033[1m'
END = '\033[0m'
class CustomTimeLoggingFormatter(logging.Formatter):
def formatTime(self, record, datefmt=None): # noqa
"""
Overrides formatTime method to use datetime module instead of time module
to display time in microseconds. Time module by default does not resolve
time to microseconds.
"""
if datefmt:
s = datetime.datetime.now().strftime(datefmt)
else:
t = datetime.datetime.now().strftime(self.default_time_format)
s = self.default_msec_format % (t, record.msecs)
return s
class CustomJsonFormatter(jsonlogger.JsonFormatter):
def __init__(self, *args, **kwargs):
self.extrad = kwargs.pop('extrad', {})
super().__init__(*args, **kwargs)
def add_fields(self, log_record, record, message_dict):
message_dict.update(self.extrad)
super().add_fields(log_record, record, message_dict)
def patch_async_emit(handler: Handler):
base_emit = handler.emit
queue = Queue()
def loop():
while True:
record = queue.get()
try:
base_emit(record)
except:
print(sys.exc_info())
def async_emit(record):
queue.put(record)
thread = Thread(target=loop)
thread.daemon = True
thread.start()
handler.emit = async_emit
return handler
def patch_add_handler(logger):
base_add_handler = logger.addHandler
def async_add_handler(handler):
async_handler = patch_async_emit(handler)
base_add_handler(async_handler)
return async_add_handler
DEFAULT_CONFIG_YAML = """
# logging config
version: 1
disable_existing_loggers: False
handlers:
stream:
class: logging.StreamHandler
level: INFO
formatter: ctf
stream: ext://sys.stdout
stats:
class: logging.StreamHandler
level: INFO
formatter: cjf
stream: ext://sys.stdout
service:
class: logging.StreamHandler
level: INFO
formatter: ctf
stream: ext://sys.stdout
formatters:
ctf:
(): trellio.utils.log.CustomTimeLoggingFormatter
format: '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
datefmt: '%Y-%m-%d %H:%M:%S,%f'
cjf:
(): trellio.utils.log.CustomJsonFormatter
format: '{ "timestamp":"%(asctime)s", "message":"%(message)s"}'
datefmt: '%Y-%m-%d %H:%M:%S,%f'
root:
handlers: [stream]
level: INFO
loggers:
registry:
handlers: [stream]
level: INFO
stats:
handlers: [stats]
level: INFO
"""
def setup_logging(_):
try:
with open('config_log.json', 'r') as f:
config_dict = yaml.load(f.read())
except:
config_dict = yaml.load(DEFAULT_CONFIG_YAML)
logging.getLogger('asyncio').setLevel(logging.DEBUG)
logger = logging.getLogger()
logger.propagate = False
logger.handlers = []
logger.addHandler = patch_add_handler(logger)
logging.config.dictConfig(config_dict)
def log(fn=None, logger=logging.getLogger(), debug_level=logging.DEBUG):
"""
logs parameters and result - takes no arguments
"""
if fn is None:
return partial(log, logger=logger, debug_level=debug_level)
@wraps(fn)
def func(*args, **kwargs):
arg_string = ""
for i in range(0, len(args)):
var_name = fn.__code__.co_varnames[i]
if var_name not in ['self', 'cls']:
arg_string += var_name + ":" + str(args[i]) + ","
arg_string = arg_string[0:len(arg_string) - 1]
string = (RED + BOLD + '>> ' + END + 'Calling {0}({1})'.format(fn.__name__, arg_string))
if len(kwargs):
string = (
RED + BOLD + '>> ' + END + 'Calling {0} with args {1} and kwargs {2}'.format(fn.__name__, arg_string,
kwargs))
logger.log(debug_level, string)
wrapped_fn = fn
if not asyncio.iscoroutine(fn):
wrapped_fn = asyncio.coroutine(fn)
try:
result = yield from wrapped_fn(*args, **kwargs)
string = BLUE + BOLD + '<< ' + END + 'Return {0} with result :{1}'.format(fn.__name__, result)
logger.log(debug_level, string)
return result
except Exception as e:
string = (RED + BOLD + '>> ' + END + '{0} raised exception :{1}'.format(fn.__name__, str(e)))
logger.log(debug_level, string)
raise e
return func
|
quikmile/trellio | trellio/utils/log.py | CustomTimeLoggingFormatter.formatTime | python | def formatTime(self, record, datefmt=None): # noqa
if datefmt:
s = datetime.datetime.now().strftime(datefmt)
else:
t = datetime.datetime.now().strftime(self.default_time_format)
s = self.default_msec_format % (t, record.msecs)
return s | Overrides formatTime method to use datetime module instead of time module
to display time in microseconds. Time module by default does not resolve
time to microseconds. | train | https://github.com/quikmile/trellio/blob/e8b050077562acf32805fcbb9c0c162248a23c62/trellio/utils/log.py#L21-L32 | null | class CustomTimeLoggingFormatter(logging.Formatter):
|
quikmile/trellio | trellio/pinger.py | Pinger.send_ping | python | def send_ping(self, payload=None):
yield from asyncio.sleep(self._interval)
self._handler.send_ping(payload=payload)
self._start_timer(payload=payload) | Sends the ping after the interval specified when initializing | train | https://github.com/quikmile/trellio/blob/e8b050077562acf32805fcbb9c0c162248a23c62/trellio/pinger.py#L38-L44 | [
"def _start_timer(self, payload=None):\n self._timer = self._loop.call_later(self._timeout, functools.partial(self._on_timeout, payload=payload))\n"
] | class Pinger:
"""
Pinger to send ping packets to an endpoint and inform if the timeout has occurred
"""
def __init__(self, handler, interval, timeout, loop=None, max_failures=5):
"""
Aysncio based pinger
:param handler: Pinger uses it to send a ping and inform when timeout occurs.
Must implement send_ping() and on_timeout() methods
:param int interval: time interval between ping after a pong
:param loop: Optional event loop
"""
self._handler = handler
self._interval = interval
self._timeout = timeout
self._loop = loop or asyncio.get_event_loop()
self._timer = None
self._failures = 0
self._max_failures = max_failures
self.logger = logging.getLogger()
@asyncio.coroutine
def pong_received(self, payload=None):
"""
Called when a pong is received. So the timer is cancelled
"""
if self._timer is not None:
self._timer.cancel()
self._failures = 0
asyncio.async(self.send_ping(payload=payload))
def _start_timer(self, payload=None):
self._timer = self._loop.call_later(self._timeout, functools.partial(self._on_timeout, payload=payload))
def stop(self):
if self._timer is not None:
self._timer.cancel()
def _on_timeout(self, payload=None):
if self._failures < self._max_failures:
self._failures += 1
asyncio.ensure_future(self.send_ping(payload=payload))
else:
self._handler.on_timeout()
|
quikmile/trellio | trellio/pinger.py | Pinger.pong_received | python | def pong_received(self, payload=None):
if self._timer is not None:
self._timer.cancel()
self._failures = 0
asyncio.async(self.send_ping(payload=payload)) | Called when a pong is received. So the timer is cancelled | train | https://github.com/quikmile/trellio/blob/e8b050077562acf32805fcbb9c0c162248a23c62/trellio/pinger.py#L46-L53 | null | class Pinger:
"""
Pinger to send ping packets to an endpoint and inform if the timeout has occurred
"""
def __init__(self, handler, interval, timeout, loop=None, max_failures=5):
"""
Aysncio based pinger
:param handler: Pinger uses it to send a ping and inform when timeout occurs.
Must implement send_ping() and on_timeout() methods
:param int interval: time interval between ping after a pong
:param loop: Optional event loop
"""
self._handler = handler
self._interval = interval
self._timeout = timeout
self._loop = loop or asyncio.get_event_loop()
self._timer = None
self._failures = 0
self._max_failures = max_failures
self.logger = logging.getLogger()
@asyncio.coroutine
def send_ping(self, payload=None):
"""
Sends the ping after the interval specified when initializing
"""
yield from asyncio.sleep(self._interval)
self._handler.send_ping(payload=payload)
self._start_timer(payload=payload)
def _start_timer(self, payload=None):
self._timer = self._loop.call_later(self._timeout, functools.partial(self._on_timeout, payload=payload))
def stop(self):
if self._timer is not None:
self._timer.cancel()
def _on_timeout(self, payload=None):
if self._failures < self._max_failures:
self._failures += 1
asyncio.ensure_future(self.send_ping(payload=payload))
else:
self._handler.on_timeout()
|
quikmile/trellio | trellio/conf_manager/conf_client.py | ConfigHandler.enable_signals | python | def enable_signals(self):
'''
e.g signal_dict = {signal_path:signal_receiver_path_list, ....}
:return:
'''
signal_dict = self.settings[self.signal_key] or {}
for i in signal_dict.keys():
sig_module, signal_class = self.import_class_from_path(i)
for j in signal_dict[i]:
recv_module, recv_coro = self.import_class_from_path(j)
signal_class.register(recv_coro) | e.g signal_dict = {signal_path:signal_receiver_path_list, ....}
:return: | train | https://github.com/quikmile/trellio/blob/e8b050077562acf32805fcbb9c0c162248a23c62/trellio/conf_manager/conf_client.py#L288-L298 | [
"def import_class_from_path(self, path):\n broken = path.split('.')\n class_name = broken[-1]\n module_name = '.'.join(broken[:-1])\n module = importlib.import_module(module_name)\n class_value = getattr(module, class_name)\n return module, class_value\n"
] | class ConfigHandler:
smtp_host = 'SMTP_HOST'
smtp_user = 'SMTP_USER'
smtp_port = 'SMTP_PORT'
smtp_password = 'SMTP_PASSWORD'
admin_emails = 'ADMIN_EMAILS'
middleware_key = 'MIDDLEWARES'
signal_key = 'SIGNALS'
service_name_key = 'SERVICE_NAME'
host_name_key = 'HOST_NAME'
service_version_key = 'SERVICE_VERSION'
reg_host_key = "REGISTRY_HOST"
reg_port_key = "REGISTRY_PORT"
redis_host_key = "REDIS_HOST"
redis_port_key = "REDIS_PORT"
http_host_key = "HTTP_HOST"
tcp_host_key = "TCP_HOST"
http_port_key = "HTTP_PORT"
tcp_port_key = "TCP_PORT"
database_key = 'DATABASE_SETTINGS'
ronin_key = "RONIN"
smtp_key = 'SMTP_SETTINGS'
apps_key = 'APPS'
# service_path_key = "SERVICE_PATH"
def __init__(self, host_class):
self.settings = None
self.host = host_class
@property
def service_name(self):
return self.settings[self.service_name_key]
def get_tcp_clients(self):
from trellio.services import TCPServiceClient
tcp_clients = self.inheritors(TCPServiceClient)
return tcp_clients
def get_http_clients(self):
from trellio.services import HTTPServiceClient
http_clients = self.inheritors(HTTPServiceClient)
return http_clients
def get_subscribers(self):
from trellio.pubsub import Subscriber
subscriber_classes = self.inheritors(Subscriber)
subscribers = []
for subs in subscriber_classes:
s = subs()
s.pubsub_host = self.settings[self.redis_host_key]
s.pubsub_port = self.settings[self.redis_port_key]
subscribers.append(s)
return subscribers
def configure_host(self, host):
host.configure(
host_name=self.settings[self.host_name_key],
service_name=self.settings[self.service_name_key],
service_version=self.settings[self.service_version_key],
http_host=self.settings[self.http_host_key],
http_port=self.settings[self.http_port_key],
tcp_host=self.settings[self.tcp_host_key],
tcp_port=self.settings[self.tcp_port_key],
registry_host=self.settings[self.reg_host_key],
registry_port=self.settings[self.reg_port_key],
pubsub_host=self.settings[self.redis_host_key],
pubsub_port=self.settings[self.reg_port_key],
ronin=self.settings[self.ronin_key]
)
def setup_host(self):
host = self.host
self.configure_host(host)
publisher = self.get_publisher()
subscribers = self.get_subscribers()
if publisher:
host.attach_publisher(publisher)
if subscribers:
host.attach_subscribers(subscribers)
http_service = self.get_http_service()
tcp_service = self.get_tcp_service()
tcp_clients = self.get_tcp_clients()
http_clients = self.get_http_clients()
http_views = self.get_http_views()
tcp_views = self.get_tcp_views()
if not http_service:
http_service = HTTPService(host.service_name, host.service_version, host.http_host, host.http_port)
if not tcp_service:
tcp_service = TCPService(host.service_name, host.service_version, host.tcp_host, host.tcp_port)
self.enable_signals()
self.enable_middlewares(http_service=http_service, http_views=http_views)
if http_service:
# self.register_http_views(http_service)
host.attach_service(http_service)
http_service.clients = [i() for i in http_clients + tcp_clients]
# self.register_tcp_views(tcp_service)
host.attach_service(tcp_service)
if http_service:
tcp_service.clients = http_service.clients
if http_views:
host.attach_http_views(http_views)
for view_inst in host.get_tcp_views():
pass
if tcp_views:
host.attach_tcp_views(tcp_views)
_tcp_service = host.get_tcp_service()
_tcp_service.tcp_views = host._tcp_views
host._smtp_handler = self.get_smtp_logging_handler()
def get_database_settings(self):
return self.settings[self.database_key]
def set_config(self, config_path):
settings = None
with open(config_path) as f:
settings = json.load(f)
new_settings = copy.deepcopy(GLOBAL_CONFIG)
new_settings.update(settings)
self.settings = new_settings
parent_dir = os.getcwd().split('/')[-1]
client_path = parent_dir + '.clients'
service_path = parent_dir + '.service'
try:
importlib.import_module(client_path)
except:
logger.warning('No clients found')
service_imported = True
service_exception = None
try:
importlib.import_module(service_path)
except Exception as e:
service_imported = False
service_exception = e.__traceback__
if self.settings.get(self.apps_key):
apps = self.settings[self.apps_key]
for app in apps:
views_path = parent_dir + '.{}.views'.format(app)
try:
importlib.import_module(views_path)
except Exception as e:
print(e.__traceback__.__str__())
else:
if not service_imported:
print(service_exception.__str__())
def get_smtp_logging_handler(self):
if self.settings.get(self.smtp_key):
keys = ["smtp_host", "smtp_port", "smtp_user", "smtp_password"]
setting_keys = self.settings[self.smtp_key].keys()
missing_keys = list(filter(lambda x: x not in setting_keys, keys))
if not missing_keys:
handler = BufferingSMTPHandler(mailhost=self.settings[self.smtp_key]['smtp_host'],
mailport=self.settings[self.smtp_key]['smtp_port'],
fromaddr=self.settings[self.smtp_key]['smtp_user'],
toaddrs=self.settings[self.admin_emails],
subject='Error {} {}:{}'.format(self.settings[self.host_name_key],
self.settings[
self.service_name_key].upper(),
self.settings[self.service_version_key]),
capacity=1,
password=self.settings[self.smtp_key]['smtp_password'])
handler.setLevel(logging.ERROR)
if not self.settings[self.ronin_key]:
return handler
def get_http_service(self):
from trellio.services import HTTPService
http_service = None
if HTTPService.__subclasses__():
service_sub_class = HTTPService.__subclasses__()[0]
http_service = service_sub_class(self.settings[self.service_name_key],
self.settings[self.service_version_key],
self.settings[self.http_host_key],
self.settings[self.http_port_key])
return http_service
def get_tcp_service(self):
from trellio.services import TCPService
tcp_service = None
if TCPService.__subclasses__():
service_sub_class = TCPService.__subclasses__()[0]
tcp_service = service_sub_class(self.settings[self.service_name_key],
self.settings[self.service_version_key],
self.settings[self.tcp_host_key],
self.settings[self.tcp_port_key])
return tcp_service
def get_publisher(self):
from trellio.pubsub import Publisher
publisher = None
if Publisher.__subclasses__():
publisher_sub_class = Publisher.__subclasses__()[0]
publisher = publisher_sub_class(self.settings[self.service_name_key],
self.settings[self.service_version_key],
self.settings[self.redis_host_key],
self.settings[self.redis_port_key])
return publisher
def get_http_views(self):
from trellio.views import HTTPView
return self.inheritors(HTTPView)
def get_tcp_views(self):
from trellio.views import TCPView
return self.inheritors(TCPView)
def import_class_from_path(self, path):
broken = path.split('.')
class_name = broken[-1]
module_name = '.'.join(broken[:-1])
module = importlib.import_module(module_name)
class_value = getattr(module, class_name)
return module, class_value
def enable_middlewares(self, http_service=None, http_views=()):
middlewares = self.settings[self.middleware_key] or []
middle_cls = []
for i in middlewares:
module, class_value = self.import_class_from_path(i)
if not class_value:
raise InvalidConfigurationError
else:
middle_cls.append(class_value())
if http_service:
http_service.middlewares = middle_cls
for view in http_views:
view.middlewares = middle_cls
# registering reciever
@staticmethod
def inheritors(klass):
subclasses = set()
work = [klass]
while work:
parent = work.pop()
for child in parent.__subclasses__():
if child not in subclasses:
subclasses.add(child)
work.append(child)
return list(subclasses)
|
quikmile/trellio | trellio/utils/decorators.py | retry | python | def retry(exceptions, tries=5, delay=1, backoff=2, logger=None):
def deco_retry(func):
@wraps(func)
async def f_retry(self, *args, **kwargs):
if not iscoroutine(func):
f = coroutine(func)
else:
f = func
mtries, mdelay = tries, delay
while mtries > 1:
try:
return await f(self, *args, **kwargs)
except exceptions:
if logger:
logger.info('Retrying %s after %s seconds', f.__name__, mdelay)
sleep(mdelay)
mtries -= 1
mdelay *= backoff
return await f(self, *args, **kwargs)
return f_retry
return deco_retry | Retry calling the decorated function using an exponential backoff.
Args:
exceptions: The exception to check. may be a tuple of
exceptions to check.
tries: Number of times to try (not retry) before giving up.
delay: Initial delay between retries in seconds.
backoff: Backoff multiplier (e.g. value of 2 will double the delay
each retry).
logger: Logger to use. If None, print. | train | https://github.com/quikmile/trellio/blob/e8b050077562acf32805fcbb9c0c162248a23c62/trellio/utils/decorators.py#L21-L57 | null | import warnings
from asyncio.coroutines import iscoroutine, coroutine
from asyncio.tasks import sleep
from functools import wraps
def deprecated(func):
"""
Generates a deprecation warning
"""
@wraps(func)
def wrapper(*args, **kwargs):
msg = "'{}' is deprecated".format(func.__name__)
warnings.warn(msg, category=DeprecationWarning, stacklevel=2)
return func(*args, **kwargs)
return wrapper
|
quikmile/trellio | trellio/pubsub.py | PubSub.publish | python | async def publish(self, endpoint: str, payload: str):
if self._conn is not None:
try:
await self._conn.publish(endpoint, payload)
return True
except redis.Error as e:
self._logger.error('Publish failed with error %s', repr(e))
return False | Publish to an endpoint.
:param str endpoint: Key by which the endpoint is recognised.
Subscribers will use this key to listen to events
:param str payload: Payload to publish with the event
:return: A boolean indicating if the publish was successful | train | https://github.com/quikmile/trellio/blob/e8b050077562acf32805fcbb9c0c162248a23c62/trellio/pubsub.py#L36-L50 | null | class PubSub:
"""
Pub sub handler which uses redis.
Can be used to publish an event or subscribe to a list of endpoints
"""
def __init__(self, redis_host, redis_port):
"""
Create in instance of Pub Sub handler
:param str redis_host: Redis Host address
:param redis_port: Redis port number
"""
self._redis_host = redis_host
self._redis_port = redis_port
self._conn = None
self._logger = logging.getLogger(__name__)
async def connect(self):
"""
Connect to the redis server and return the connection
:return:
"""
self._conn = await self._get_conn()
return self._conn
async def subscribe(self, endpoints: list, handler):
"""
Subscribe to a list of endpoints
:param endpoints: List of endpoints the subscribers is interested to subscribe to
:type endpoints: list
:param handler: The callback to call when a particular event is published.
Must take two arguments, a channel to which the event was published
and the payload.
:return:
"""
connection = await self._get_conn()
subscriber = await connection.start_subscribe()
await subscriber.subscribe(endpoints)
while True:
payload = await subscriber.next_published()
handler(payload.channel, payload.value)
async def _get_conn(self):
return await redis.Connection.create(self._redis_host, self._redis_port, auto_reconnect=True)
|
quikmile/trellio | trellio/pubsub.py | PubSub.subscribe | python | async def subscribe(self, endpoints: list, handler):
connection = await self._get_conn()
subscriber = await connection.start_subscribe()
await subscriber.subscribe(endpoints)
while True:
payload = await subscriber.next_published()
handler(payload.channel, payload.value) | Subscribe to a list of endpoints
:param endpoints: List of endpoints the subscribers is interested to subscribe to
:type endpoints: list
:param handler: The callback to call when a particular event is published.
Must take two arguments, a channel to which the event was published
and the payload.
:return: | train | https://github.com/quikmile/trellio/blob/e8b050077562acf32805fcbb9c0c162248a23c62/trellio/pubsub.py#L52-L67 | [
"async def _get_conn(self):\n return await redis.Connection.create(self._redis_host, self._redis_port, auto_reconnect=True)\n"
] | class PubSub:
"""
Pub sub handler which uses redis.
Can be used to publish an event or subscribe to a list of endpoints
"""
def __init__(self, redis_host, redis_port):
"""
Create in instance of Pub Sub handler
:param str redis_host: Redis Host address
:param redis_port: Redis port number
"""
self._redis_host = redis_host
self._redis_port = redis_port
self._conn = None
self._logger = logging.getLogger(__name__)
async def connect(self):
"""
Connect to the redis server and return the connection
:return:
"""
self._conn = await self._get_conn()
return self._conn
async def publish(self, endpoint: str, payload: str):
"""
Publish to an endpoint.
:param str endpoint: Key by which the endpoint is recognised.
Subscribers will use this key to listen to events
:param str payload: Payload to publish with the event
:return: A boolean indicating if the publish was successful
"""
if self._conn is not None:
try:
await self._conn.publish(endpoint, payload)
return True
except redis.Error as e:
self._logger.error('Publish failed with error %s', repr(e))
return False
async def _get_conn(self):
return await redis.Connection.create(self._redis_host, self._redis_port, auto_reconnect=True)
|
quikmile/trellio | trellio/bus.py | HTTPBus.send_http_request | python | def send_http_request(self, app: str, service: str, version: str, method: str, entity: str, params: dict):
host, port, node_id, service_type = self._registry_client.resolve(service, version, entity, HTTP)
url = 'http://{}:{}{}'.format(host, port, params.pop('path'))
http_keys = ['data', 'headers', 'cookies', 'auth', 'allow_redirects', 'compress', 'chunked']
kwargs = {k: params[k] for k in http_keys if k in params}
query_params = params.pop('params', {})
if app is not None:
query_params['app'] = app
query_params['version'] = version
query_params['service'] = service
response = yield from aiohttp.request(method, url, params=query_params, **kwargs)
return response | A convenience method that allows you to send a well formatted http request to another service | train | https://github.com/quikmile/trellio/blob/e8b050077562acf32805fcbb9c0c162248a23c62/trellio/bus.py#L31-L51 | null | class HTTPBus:
def __init__(self, registry_client):
self._registry_client = registry_client
|
whiteclover/dbpy | db/query/delete.py | DeleteQuery.compile | python | def compile(self):
sql = ''
sql += 'DELETE FROM ' + self.dialect.quote_table(self._table)
if self._where:
sql += ' WHERE ' + self.compile_condition(self._where)
if self._order_by:
sql += ' ' + self.compile_order_by(self._order_by)
if self._limit:
sql += ' LIMIT ' + self._limit
return sql | Compiles the delete sql statement | train | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/db/query/delete.py#L40-L51 | [
"def compile_condition(self, condition):\n condition.compile(self.dialect, self)\n sql = condition.to_sql()\n self.bind.extend(condition.bind)\n return sql\n",
"def compile_order_by(self, columns, direction=''):\n sorts = []\n for column, direction in columns:\n column = self.dialect.quote_identifier(column) if isinstance(column, list) else \\\n self.dialect.quote_column(column)\n if direction:\n direction = ' ' + direction.upper()\n sorts.append(column + direction)\n return 'ORDER BY ' + ', '.join(sorts)\n"
] | class DeleteQuery(WhereQuery):
"""Delete operator query builder"""
def __init__(self, table, dialect, db):
"""Constructor
:param table: table name
:type table: str
:param dialect: the sql dialect instance
:param db: the database connection instance
"""
if table:
self._table = table
self._db = db
WhereQuery.__init__(self, dialect)
def table(self, table):
"""Sets table name"""
self._table = table
return self
def clear(self):
"""Clear and reset to orignal state"""
WhereQuery.clear(self)
self._table = None
self._parameters = []
self._sql = None
def execute(self):
"""Execute the sql for delete operator"""
return self._db.execute(self.to_sql(), self.bind)
|
whiteclover/dbpy | db/query/delete.py | DeleteQuery.clear | python | def clear(self):
WhereQuery.clear(self)
self._table = None
self._parameters = []
self._sql = None | Clear and reset to orignal state | train | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/db/query/delete.py#L53-L58 | [
"def clear(self):\n self._where = QueryCondition('AND')\n self._order_by = []\n self._limit = None\n self.bind = []\n"
] | class DeleteQuery(WhereQuery):
"""Delete operator query builder"""
def __init__(self, table, dialect, db):
"""Constructor
:param table: table name
:type table: str
:param dialect: the sql dialect instance
:param db: the database connection instance
"""
if table:
self._table = table
self._db = db
WhereQuery.__init__(self, dialect)
def table(self, table):
"""Sets table name"""
self._table = table
return self
def compile(self):
"""Compiles the delete sql statement"""
sql = ''
sql += 'DELETE FROM ' + self.dialect.quote_table(self._table)
if self._where:
sql += ' WHERE ' + self.compile_condition(self._where)
if self._order_by:
sql += ' ' + self.compile_order_by(self._order_by)
if self._limit:
sql += ' LIMIT ' + self._limit
return sql
def execute(self):
"""Execute the sql for delete operator"""
return self._db.execute(self.to_sql(), self.bind)
|
whiteclover/dbpy | db/query/expr.py | Expr.compile | python | def compile(self, db):
sql = self.expression
if self.alias:
sql += (' AS ' + db.quote_column(self.alias))
return sql | Building the sql expression
:param db: the database instance | train | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/db/query/expr.py#L26-L34 | null | class Expr(object):
"""Sql expresion builder"""
def __init__(self, expression, alias=None):
#: sql expresion
self.expression = expression
#: expresssion filed name
self.alias = alias
|
whiteclover/dbpy | samples/orm.py | UserMapper.find | python | def find(self, uid):
data = (db.select(self.table).select('username', 'email', 'real_name',
'password', 'bio', 'status', 'role', 'uid').
condition('uid', uid).execute()
)
if data:
logger.info('data %s', data)
return self.load(data[0], self.model) | Find and load the user from database by uid(user id) | train | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/samples/orm.py#L46-L54 | [
"def select(table, key='default'):\n \"\"\"Select dialect\n\n\n :param key: a key for your dabtabase you wanna use\n \"\"\"\n database = choice(__db[key + '.slave'])\n return database.select(table)\n",
"def load(self, data, o):\n return o(*data)\n"
] | class UserMapper(BaseMapper, PrimaryTrait):
model = User
table = 'users'
def find_by_username(self, username):
"""Return user by username if find in database otherwise None"""
data = (db.select(self.table).select('username', 'email', 'real_name',
'password', 'bio', 'status', 'role', 'uid').
condition('username', username).execute()
)
if data:
return self.load(data[0], self.model)
def create(self, user):
return db.execute("INSERT INTO users(username, email, real_name, password, bio, status, role) \
VALUES(%s, %s, %s, %s, %s, %s, %s)",
(user.username, user.email, user.real_name, user.password, user.bio, user.status, user.role))
def search(self, **kw):
"""Find the users match the condition in kw"""
q = db.select(self.table).condition('status', 'active')
for k, v in kw:
q.condition(k, v)
data = q.execute()
users = []
for user in data:
users.append(self.load(user, self.model))
return users
def count(self):
return db.query('SELECT COUNT(*) FROM ' + self.table)[0][0]
def paginate(self, page=1, perpage=10):
count = self.count()
q = db.select(self.table).select('username', 'email', 'real_name',
'password', 'bio', 'status', 'role', 'uid')
results = q.limit(perpage).offset((page - 1) * perpage).order_by('real_name', 'desc').execute()
return [self.load(user, self.model) for user in results]
def save(self, user):
q = db.update(self.table)
data = dict((_, getattr(user, _)) for _ in ('username', 'email', 'real_name',
'password', 'bio', 'status', 'role'))
q.mset(data)
return q.condition('uid', user.uid).execute()
def delete(self, user):
return db.delete(self.table).condition('uid', user.uid).execute()
|
whiteclover/dbpy | samples/orm.py | UserMapper.find_by_username | python | def find_by_username(self, username):
data = (db.select(self.table).select('username', 'email', 'real_name',
'password', 'bio', 'status', 'role', 'uid').
condition('username', username).execute()
)
if data:
return self.load(data[0], self.model) | Return user by username if find in database otherwise None | train | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/samples/orm.py#L56-L63 | [
"def select(table, key='default'):\n \"\"\"Select dialect\n\n\n :param key: a key for your dabtabase you wanna use\n \"\"\"\n database = choice(__db[key + '.slave'])\n return database.select(table)\n",
"def load(self, data, o):\n return o(*data)\n"
] | class UserMapper(BaseMapper, PrimaryTrait):
model = User
table = 'users'
def find(self, uid):
"""Find and load the user from database by uid(user id)"""
data = (db.select(self.table).select('username', 'email', 'real_name',
'password', 'bio', 'status', 'role', 'uid').
condition('uid', uid).execute()
)
if data:
logger.info('data %s', data)
return self.load(data[0], self.model)
def create(self, user):
return db.execute("INSERT INTO users(username, email, real_name, password, bio, status, role) \
VALUES(%s, %s, %s, %s, %s, %s, %s)",
(user.username, user.email, user.real_name, user.password, user.bio, user.status, user.role))
def search(self, **kw):
"""Find the users match the condition in kw"""
q = db.select(self.table).condition('status', 'active')
for k, v in kw:
q.condition(k, v)
data = q.execute()
users = []
for user in data:
users.append(self.load(user, self.model))
return users
def count(self):
return db.query('SELECT COUNT(*) FROM ' + self.table)[0][0]
def paginate(self, page=1, perpage=10):
count = self.count()
q = db.select(self.table).select('username', 'email', 'real_name',
'password', 'bio', 'status', 'role', 'uid')
results = q.limit(perpage).offset((page - 1) * perpage).order_by('real_name', 'desc').execute()
return [self.load(user, self.model) for user in results]
def save(self, user):
q = db.update(self.table)
data = dict((_, getattr(user, _)) for _ in ('username', 'email', 'real_name',
'password', 'bio', 'status', 'role'))
q.mset(data)
return q.condition('uid', user.uid).execute()
def delete(self, user):
return db.delete(self.table).condition('uid', user.uid).execute()
|
whiteclover/dbpy | samples/orm.py | UserMapper.search | python | def search(self, **kw):
q = db.select(self.table).condition('status', 'active')
for k, v in kw:
q.condition(k, v)
data = q.execute()
users = []
for user in data:
users.append(self.load(user, self.model))
return users | Find the users match the condition in kw | train | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/samples/orm.py#L70-L79 | [
"def select(table, key='default'):\n \"\"\"Select dialect\n\n\n :param key: a key for your dabtabase you wanna use\n \"\"\"\n database = choice(__db[key + '.slave'])\n return database.select(table)\n",
"def load(self, data, o):\n return o(*data)\n"
] | class UserMapper(BaseMapper, PrimaryTrait):
model = User
table = 'users'
def find(self, uid):
"""Find and load the user from database by uid(user id)"""
data = (db.select(self.table).select('username', 'email', 'real_name',
'password', 'bio', 'status', 'role', 'uid').
condition('uid', uid).execute()
)
if data:
logger.info('data %s', data)
return self.load(data[0], self.model)
def find_by_username(self, username):
"""Return user by username if find in database otherwise None"""
data = (db.select(self.table).select('username', 'email', 'real_name',
'password', 'bio', 'status', 'role', 'uid').
condition('username', username).execute()
)
if data:
return self.load(data[0], self.model)
def create(self, user):
return db.execute("INSERT INTO users(username, email, real_name, password, bio, status, role) \
VALUES(%s, %s, %s, %s, %s, %s, %s)",
(user.username, user.email, user.real_name, user.password, user.bio, user.status, user.role))
def count(self):
return db.query('SELECT COUNT(*) FROM ' + self.table)[0][0]
def paginate(self, page=1, perpage=10):
count = self.count()
q = db.select(self.table).select('username', 'email', 'real_name',
'password', 'bio', 'status', 'role', 'uid')
results = q.limit(perpage).offset((page - 1) * perpage).order_by('real_name', 'desc').execute()
return [self.load(user, self.model) for user in results]
def save(self, user):
q = db.update(self.table)
data = dict((_, getattr(user, _)) for _ in ('username', 'email', 'real_name',
'password', 'bio', 'status', 'role'))
q.mset(data)
return q.condition('uid', user.uid).execute()
def delete(self, user):
return db.delete(self.table).condition('uid', user.uid).execute()
|
whiteclover/dbpy | samples/orm.py | PostMapper.paginate | python | def paginate(self, page=1, perpage=10, category=None):
q = db.select(self.table).fields('title', 'slug', 'description', 'html', 'css', 'js',
'category', 'status', 'comments', 'author', 'created', 'pid')
if category:
q.condition('category', category)
results = (q.limit(perpage).offset((page - 1) * perpage)
.order_by('created', 'DESC').execute())
return [self.load(data, self.model) for data in results] | Paginate the posts | train | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/samples/orm.py#L124-L132 | [
"def select(table, key='default'):\n \"\"\"Select dialect\n\n\n :param key: a key for your dabtabase you wanna use\n \"\"\"\n database = choice(__db[key + '.slave'])\n return database.select(table)\n"
] | class PostMapper(BaseMapper):
table = 'posts'
model = Post
def find(self, pid):
data = db.select(self.table).fields('title', 'slug', 'description', 'html', 'css', 'js',
'category', 'status', 'comments', 'author', 'created', 'pid').condition('pid', pid).execute()
if data:
return self.load(data[0], self.model)
def count(self):
return db.select(self.table).fields(db.expr('COUNT(*)')).execute()[0][0]
def create(self, post):
row = []
for _ in ('title', 'slug', 'description', 'created', 'html', 'css', 'js',
'category', 'status', 'comments', 'author'):
row.append(getattr(post, _))
return db.insert(self.table).columns('title', 'slug', 'description', 'created', 'html', 'css', 'js',
'category', 'status', 'comments', 'author').values(row).execute()
def save(self, page):
q = db.update(self.table)
data = dict((_, getattr(page, _)) for _ in ('title', 'slug', 'description', 'html', 'css', 'js',
'category', 'status', 'comments'))
q.mset(data)
return q.condition('pid', page.pid).execute()
def delete(self, page_id):
return db.delete(self.table).condition('pid', page_id).execute()
def category_count(self, category_id):
return db.select(self.table).fields(db.expr('count(*)',
'total')).condition('category', category_id).condition('status', 'published').execute()[0][0]
|
whiteclover/dbpy | db/__init__.py | setup | python | def setup(config, minconn=5, maxconn=10, adapter='mysql', key='default', slave=False):
global __db
if '.' in key:
raise TypeError('The DB Key: "%s" Can\'t Contain dot' % (key))
if slave == False and key in __db:
raise DBError('The Key: "%s" was set' % (key))
database = DB(config, minconn, maxconn, key, adapter)
master_key = key
slave_key = key + '.slave'
if not slave:
__db[master_key] = database
if slave_key not in __db:
__db[slave_key] = [database]
else:
if key in __db:
databases = __db[slave_key]
if len(databases) == 1 and __db[master_key] == databases[0]:
__db[slave_key] = [database]
else:
__db[slave_key].append(database)
else:
__db[slave_key] = [database] | Setup database
:param config dict: is the db adapter config
:param key string: the key to identify dabtabase
:param adapter string: the dabtabase adapter current support mysql only
:param minconn int: the min connection for connection pool
:param maxconn int: the max connection for connection pool
:param slave boolean: If True the database can be read only. | train | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/db/__init__.py#L54-L91 | null | #!/usr/bin/env python
# Copyright (C) 2014-2015 Thomas Huang
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
__version__ = '0.1.3'
VERSION = tuple(map(int, __version__.split('.')))
__all__ = [
'and_',
'or_',
'expr',
'query',
'execute',
'transaction',
'setup',
'select',
'insert',
'update',
'delete',
'database',
'DBError'
]
from db.query.select import QueryCondition
from db.query.expr import Expr as expr
from db._db import DB
from db.errors import DBError
from random import choice
def and_():
return QueryCondition('AND')
def or_():
return QueryCondition('OR')
__db = {}
def query(sql, args=None, many=None, as_dict=False, key='default'):
"""The connection raw sql query, when select table, show table
to fetch records, it is compatible the dbi execute method::
:param sql string: the sql stamtement like 'select * from %s'
:param args list: Wen set None, will use dbi execute(sql), else
dbi execute(sql, args), the args keep the original rules, it shuld be tuple or list of list
:param many int: when set, the query method will return genarate an iterate
:param as_dict bool: when is True, the type of row will be dict, otherwise is tuple
:param key: a key for your dabtabase you wanna use
"""
database = choice(__db[key + '.slave'])
return database.query(sql, args, many, as_dict)
def execute(sql, args=None, key='default'):
"""It is used for update, delete records.
:param sql string: the sql stamtement like 'select * from %s'
:param args list: Wen set None, will use dbi execute(sql), else
dbi execute(sql, args), the args keep the original rules, it shuld be tuple or list of list
:param key: a key for your dabtabase you wanna use
eg::
execute('insert into users values(%s, %s)', [(1L, 'blablabla'), (2L, 'animer')])
execute('delete from users')
"""
database = __db[key]
return database.execute(sql, args)
def transaction(key='default'):
"""transaction wrapper
:param key: a key for your dabtabase you wanna use
"""
database = __db[key]
return database.transaction()
def select(table, key='default'):
"""Select dialect
:param key: a key for your dabtabase you wanna use
"""
database = choice(__db[key + '.slave'])
return database.select(table)
def insert(table, key='default'):
"""insert dialect
:param key: a key for your dabtabase you wanna use
"""
database = __db[key]
return database.insert(table)
def update(table, key='default'):
"""update dialect
:param key: a key for your dabtabase you wanna use
"""
database = __db[key]
return database.update(table)
def delete(table, key='default'):
"""delete dialect
:param key: a key for your dabtabase you wanna use
"""
database = __db[key]
return database.delete(table)
def database(key='default', slave=False):
"""datbase dialect
:param key: a key for your dabtabase you wanna use
:param slave boolean: If True the database can be read only, Defaults False.
"""
if slave:
key += '.slave'
return choice(__db[key])
return __db.get(key)
|
whiteclover/dbpy | db/__init__.py | query | python | def query(sql, args=None, many=None, as_dict=False, key='default'):
database = choice(__db[key + '.slave'])
return database.query(sql, args, many, as_dict) | The connection raw sql query, when select table, show table
to fetch records, it is compatible the dbi execute method::
:param sql string: the sql stamtement like 'select * from %s'
:param args list: Wen set None, will use dbi execute(sql), else
dbi execute(sql, args), the args keep the original rules, it shuld be tuple or list of list
:param many int: when set, the query method will return genarate an iterate
:param as_dict bool: when is True, the type of row will be dict, otherwise is tuple
:param key: a key for your dabtabase you wanna use | train | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/db/__init__.py#L94-L107 | null | #!/usr/bin/env python
# Copyright (C) 2014-2015 Thomas Huang
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
__version__ = '0.1.3'
VERSION = tuple(map(int, __version__.split('.')))
__all__ = [
'and_',
'or_',
'expr',
'query',
'execute',
'transaction',
'setup',
'select',
'insert',
'update',
'delete',
'database',
'DBError'
]
from db.query.select import QueryCondition
from db.query.expr import Expr as expr
from db._db import DB
from db.errors import DBError
from random import choice
def and_():
return QueryCondition('AND')
def or_():
return QueryCondition('OR')
__db = {}
def setup(config, minconn=5, maxconn=10, adapter='mysql', key='default', slave=False):
"""Setup database
:param config dict: is the db adapter config
:param key string: the key to identify dabtabase
:param adapter string: the dabtabase adapter current support mysql only
:param minconn int: the min connection for connection pool
:param maxconn int: the max connection for connection pool
:param slave boolean: If True the database can be read only.
"""
global __db
if '.' in key:
raise TypeError('The DB Key: "%s" Can\'t Contain dot' % (key))
if slave == False and key in __db:
raise DBError('The Key: "%s" was set' % (key))
database = DB(config, minconn, maxconn, key, adapter)
master_key = key
slave_key = key + '.slave'
if not slave:
__db[master_key] = database
if slave_key not in __db:
__db[slave_key] = [database]
else:
if key in __db:
databases = __db[slave_key]
if len(databases) == 1 and __db[master_key] == databases[0]:
__db[slave_key] = [database]
else:
__db[slave_key].append(database)
else:
__db[slave_key] = [database]
def execute(sql, args=None, key='default'):
"""It is used for update, delete records.
:param sql string: the sql stamtement like 'select * from %s'
:param args list: Wen set None, will use dbi execute(sql), else
dbi execute(sql, args), the args keep the original rules, it shuld be tuple or list of list
:param key: a key for your dabtabase you wanna use
eg::
execute('insert into users values(%s, %s)', [(1L, 'blablabla'), (2L, 'animer')])
execute('delete from users')
"""
database = __db[key]
return database.execute(sql, args)
def transaction(key='default'):
"""transaction wrapper
:param key: a key for your dabtabase you wanna use
"""
database = __db[key]
return database.transaction()
def select(table, key='default'):
"""Select dialect
:param key: a key for your dabtabase you wanna use
"""
database = choice(__db[key + '.slave'])
return database.select(table)
def insert(table, key='default'):
"""insert dialect
:param key: a key for your dabtabase you wanna use
"""
database = __db[key]
return database.insert(table)
def update(table, key='default'):
"""update dialect
:param key: a key for your dabtabase you wanna use
"""
database = __db[key]
return database.update(table)
def delete(table, key='default'):
"""delete dialect
:param key: a key for your dabtabase you wanna use
"""
database = __db[key]
return database.delete(table)
def database(key='default', slave=False):
"""datbase dialect
:param key: a key for your dabtabase you wanna use
:param slave boolean: If True the database can be read only, Defaults False.
"""
if slave:
key += '.slave'
return choice(__db[key])
return __db.get(key)
|
whiteclover/dbpy | db/__init__.py | execute | python | def execute(sql, args=None, key='default'):
database = __db[key]
return database.execute(sql, args) | It is used for update, delete records.
:param sql string: the sql stamtement like 'select * from %s'
:param args list: Wen set None, will use dbi execute(sql), else
dbi execute(sql, args), the args keep the original rules, it shuld be tuple or list of list
:param key: a key for your dabtabase you wanna use
eg::
execute('insert into users values(%s, %s)', [(1L, 'blablabla'), (2L, 'animer')])
execute('delete from users') | train | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/db/__init__.py#L110-L124 | null | #!/usr/bin/env python
# Copyright (C) 2014-2015 Thomas Huang
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
__version__ = '0.1.3'
VERSION = tuple(map(int, __version__.split('.')))
__all__ = [
'and_',
'or_',
'expr',
'query',
'execute',
'transaction',
'setup',
'select',
'insert',
'update',
'delete',
'database',
'DBError'
]
from db.query.select import QueryCondition
from db.query.expr import Expr as expr
from db._db import DB
from db.errors import DBError
from random import choice
def and_():
return QueryCondition('AND')
def or_():
return QueryCondition('OR')
__db = {}
def setup(config, minconn=5, maxconn=10, adapter='mysql', key='default', slave=False):
"""Setup database
:param config dict: is the db adapter config
:param key string: the key to identify dabtabase
:param adapter string: the dabtabase adapter current support mysql only
:param minconn int: the min connection for connection pool
:param maxconn int: the max connection for connection pool
:param slave boolean: If True the database can be read only.
"""
global __db
if '.' in key:
raise TypeError('The DB Key: "%s" Can\'t Contain dot' % (key))
if slave == False and key in __db:
raise DBError('The Key: "%s" was set' % (key))
database = DB(config, minconn, maxconn, key, adapter)
master_key = key
slave_key = key + '.slave'
if not slave:
__db[master_key] = database
if slave_key not in __db:
__db[slave_key] = [database]
else:
if key in __db:
databases = __db[slave_key]
if len(databases) == 1 and __db[master_key] == databases[0]:
__db[slave_key] = [database]
else:
__db[slave_key].append(database)
else:
__db[slave_key] = [database]
def query(sql, args=None, many=None, as_dict=False, key='default'):
"""The connection raw sql query, when select table, show table
to fetch records, it is compatible the dbi execute method::
:param sql string: the sql stamtement like 'select * from %s'
:param args list: Wen set None, will use dbi execute(sql), else
dbi execute(sql, args), the args keep the original rules, it shuld be tuple or list of list
:param many int: when set, the query method will return genarate an iterate
:param as_dict bool: when is True, the type of row will be dict, otherwise is tuple
:param key: a key for your dabtabase you wanna use
"""
database = choice(__db[key + '.slave'])
return database.query(sql, args, many, as_dict)
def transaction(key='default'):
"""transaction wrapper
:param key: a key for your dabtabase you wanna use
"""
database = __db[key]
return database.transaction()
def select(table, key='default'):
"""Select dialect
:param key: a key for your dabtabase you wanna use
"""
database = choice(__db[key + '.slave'])
return database.select(table)
def insert(table, key='default'):
"""insert dialect
:param key: a key for your dabtabase you wanna use
"""
database = __db[key]
return database.insert(table)
def update(table, key='default'):
"""update dialect
:param key: a key for your dabtabase you wanna use
"""
database = __db[key]
return database.update(table)
def delete(table, key='default'):
"""delete dialect
:param key: a key for your dabtabase you wanna use
"""
database = __db[key]
return database.delete(table)
def database(key='default', slave=False):
"""datbase dialect
:param key: a key for your dabtabase you wanna use
:param slave boolean: If True the database can be read only, Defaults False.
"""
if slave:
key += '.slave'
return choice(__db[key])
return __db.get(key)
|
whiteclover/dbpy | db/__init__.py | select | python | def select(table, key='default'):
database = choice(__db[key + '.slave'])
return database.select(table) | Select dialect
:param key: a key for your dabtabase you wanna use | train | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/db/__init__.py#L136-L143 | null | #!/usr/bin/env python
# Copyright (C) 2014-2015 Thomas Huang
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
__version__ = '0.1.3'
VERSION = tuple(map(int, __version__.split('.')))
__all__ = [
'and_',
'or_',
'expr',
'query',
'execute',
'transaction',
'setup',
'select',
'insert',
'update',
'delete',
'database',
'DBError'
]
from db.query.select import QueryCondition
from db.query.expr import Expr as expr
from db._db import DB
from db.errors import DBError
from random import choice
def and_():
return QueryCondition('AND')
def or_():
return QueryCondition('OR')
__db = {}
def setup(config, minconn=5, maxconn=10, adapter='mysql', key='default', slave=False):
"""Setup database
:param config dict: is the db adapter config
:param key string: the key to identify dabtabase
:param adapter string: the dabtabase adapter current support mysql only
:param minconn int: the min connection for connection pool
:param maxconn int: the max connection for connection pool
:param slave boolean: If True the database can be read only.
"""
global __db
if '.' in key:
raise TypeError('The DB Key: "%s" Can\'t Contain dot' % (key))
if slave == False and key in __db:
raise DBError('The Key: "%s" was set' % (key))
database = DB(config, minconn, maxconn, key, adapter)
master_key = key
slave_key = key + '.slave'
if not slave:
__db[master_key] = database
if slave_key not in __db:
__db[slave_key] = [database]
else:
if key in __db:
databases = __db[slave_key]
if len(databases) == 1 and __db[master_key] == databases[0]:
__db[slave_key] = [database]
else:
__db[slave_key].append(database)
else:
__db[slave_key] = [database]
def query(sql, args=None, many=None, as_dict=False, key='default'):
"""The connection raw sql query, when select table, show table
to fetch records, it is compatible the dbi execute method::
:param sql string: the sql stamtement like 'select * from %s'
:param args list: Wen set None, will use dbi execute(sql), else
dbi execute(sql, args), the args keep the original rules, it shuld be tuple or list of list
:param many int: when set, the query method will return genarate an iterate
:param as_dict bool: when is True, the type of row will be dict, otherwise is tuple
:param key: a key for your dabtabase you wanna use
"""
database = choice(__db[key + '.slave'])
return database.query(sql, args, many, as_dict)
def execute(sql, args=None, key='default'):
"""It is used for update, delete records.
:param sql string: the sql stamtement like 'select * from %s'
:param args list: Wen set None, will use dbi execute(sql), else
dbi execute(sql, args), the args keep the original rules, it shuld be tuple or list of list
:param key: a key for your dabtabase you wanna use
eg::
execute('insert into users values(%s, %s)', [(1L, 'blablabla'), (2L, 'animer')])
execute('delete from users')
"""
database = __db[key]
return database.execute(sql, args)
def transaction(key='default'):
"""transaction wrapper
:param key: a key for your dabtabase you wanna use
"""
database = __db[key]
return database.transaction()
def insert(table, key='default'):
"""insert dialect
:param key: a key for your dabtabase you wanna use
"""
database = __db[key]
return database.insert(table)
def update(table, key='default'):
"""update dialect
:param key: a key for your dabtabase you wanna use
"""
database = __db[key]
return database.update(table)
def delete(table, key='default'):
"""delete dialect
:param key: a key for your dabtabase you wanna use
"""
database = __db[key]
return database.delete(table)
def database(key='default', slave=False):
"""datbase dialect
:param key: a key for your dabtabase you wanna use
:param slave boolean: If True the database can be read only, Defaults False.
"""
if slave:
key += '.slave'
return choice(__db[key])
return __db.get(key)
|
whiteclover/dbpy | db/__init__.py | database | python | def database(key='default', slave=False):
if slave:
key += '.slave'
return choice(__db[key])
return __db.get(key) | datbase dialect
:param key: a key for your dabtabase you wanna use
:param slave boolean: If True the database can be read only, Defaults False. | train | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/db/__init__.py#L173-L182 | null | #!/usr/bin/env python
# Copyright (C) 2014-2015 Thomas Huang
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
__version__ = '0.1.3'
VERSION = tuple(map(int, __version__.split('.')))
__all__ = [
'and_',
'or_',
'expr',
'query',
'execute',
'transaction',
'setup',
'select',
'insert',
'update',
'delete',
'database',
'DBError'
]
from db.query.select import QueryCondition
from db.query.expr import Expr as expr
from db._db import DB
from db.errors import DBError
from random import choice
def and_():
return QueryCondition('AND')
def or_():
return QueryCondition('OR')
__db = {}
def setup(config, minconn=5, maxconn=10, adapter='mysql', key='default', slave=False):
"""Setup database
:param config dict: is the db adapter config
:param key string: the key to identify dabtabase
:param adapter string: the dabtabase adapter current support mysql only
:param minconn int: the min connection for connection pool
:param maxconn int: the max connection for connection pool
:param slave boolean: If True the database can be read only.
"""
global __db
if '.' in key:
raise TypeError('The DB Key: "%s" Can\'t Contain dot' % (key))
if slave == False and key in __db:
raise DBError('The Key: "%s" was set' % (key))
database = DB(config, minconn, maxconn, key, adapter)
master_key = key
slave_key = key + '.slave'
if not slave:
__db[master_key] = database
if slave_key not in __db:
__db[slave_key] = [database]
else:
if key in __db:
databases = __db[slave_key]
if len(databases) == 1 and __db[master_key] == databases[0]:
__db[slave_key] = [database]
else:
__db[slave_key].append(database)
else:
__db[slave_key] = [database]
def query(sql, args=None, many=None, as_dict=False, key='default'):
"""The connection raw sql query, when select table, show table
to fetch records, it is compatible the dbi execute method::
:param sql string: the sql stamtement like 'select * from %s'
:param args list: Wen set None, will use dbi execute(sql), else
dbi execute(sql, args), the args keep the original rules, it shuld be tuple or list of list
:param many int: when set, the query method will return genarate an iterate
:param as_dict bool: when is True, the type of row will be dict, otherwise is tuple
:param key: a key for your dabtabase you wanna use
"""
database = choice(__db[key + '.slave'])
return database.query(sql, args, many, as_dict)
def execute(sql, args=None, key='default'):
"""It is used for update, delete records.
:param sql string: the sql stamtement like 'select * from %s'
:param args list: Wen set None, will use dbi execute(sql), else
dbi execute(sql, args), the args keep the original rules, it shuld be tuple or list of list
:param key: a key for your dabtabase you wanna use
eg::
execute('insert into users values(%s, %s)', [(1L, 'blablabla'), (2L, 'animer')])
execute('delete from users')
"""
database = __db[key]
return database.execute(sql, args)
def transaction(key='default'):
"""transaction wrapper
:param key: a key for your dabtabase you wanna use
"""
database = __db[key]
return database.transaction()
def select(table, key='default'):
"""Select dialect
:param key: a key for your dabtabase you wanna use
"""
database = choice(__db[key + '.slave'])
return database.select(table)
def insert(table, key='default'):
"""insert dialect
:param key: a key for your dabtabase you wanna use
"""
database = __db[key]
return database.insert(table)
def update(table, key='default'):
"""update dialect
:param key: a key for your dabtabase you wanna use
"""
database = __db[key]
return database.update(table)
def delete(table, key='default'):
"""delete dialect
:param key: a key for your dabtabase you wanna use
"""
database = __db[key]
return database.delete(table)
|
whiteclover/dbpy | db/_db.py | DB.query | python | def query(self, sql, args=None, many=None, as_dict=False):
con = self.pool.pop()
c = None
try:
c = con.cursor(as_dict)
LOGGER.debug("Query sql: " + sql + " args:" + str(args))
c.execute(sql, args)
if many and many > 0:
return self._yield(con, c, many)
else:
return c.fetchall()
except Exception as e:
LOGGER.error("Error Qeury on %s", str(e))
raise DBError(e.args[0], e.args[1])
finally:
many or (c and c.close())
many or (con and self.pool.push(con)) | The connection raw sql query, when select table, show table
to fetch records, it is compatible the dbi execute method.
:param sql string: the sql stamtement like 'select * from %s'
:param args list: Wen set None, will use dbi execute(sql), else
dbi execute(sql, args), the args keep the original rules, it shuld be tuple or list of list
:param many int: when set, the query method will return genarate an iterate
:param as_dict bool: when is true, the type of row will be dict, otherwise is tuple | train | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/db/_db.py#L85-L112 | [
"def _yield(self, con, cursor, many):\n try:\n result = cursor.fetchmany(many)\n while result:\n for row in result:\n yield row\n result = cursor.fetchmany(many)\n finally:\n cursor and cursor.close()\n con and self.pool.push(con)\n"
] | class DB(object):
adapters = {}
dialects = {}
def __init__(self, config, minconn=5, maxconn=10, key='defalut', adapter='mysql'):
""" Setup DB::
param config dict: is the db adapter config
param key string: the key to identify dabtabase
param adapter string: the dabtabase adapter current support mysql only
param minconn int: the min connection for connection pool
param maxconn int: the max connection for connection pool
"""
adapter = adapter or 'mysql'
self.key = key
self.adapter = adapter
self.pool = ConnectionPool(minconn, maxconn, self.connection_class(adapter), config)
self.dialect = self.dialect_class(adapter)(self)
def select(self, table):
"""Select sql executor
:param table: table name
:type table: str
:returns: select query instance
"""
return self.dialect.select(table)
def insert(self, table):
"""insert sql executor
:param table: table name
:type table: str
:returns: insert query instance
"""
return self.dialect.insert(table)
def update(self, table):
"""update sql executor
:param table: table name
:type table: str
:returns: update query instance
"""
return self.dialect.update(table)
def delete(self, table):
"""delete sql executor
:param table: table name
:type table: str
:returns: delete query instance
"""
return self.dialect.delete(table)
def _yield(self, con, cursor, many):
try:
result = cursor.fetchmany(many)
while result:
for row in result:
yield row
result = cursor.fetchmany(many)
finally:
cursor and cursor.close()
con and self.pool.push(con)
def execute(self, sql, args=None):
"""It is used for update, delete records.
:param sql string: the sql stamtement like 'select * from %s'
:param args list: Wen set None, will use dbi execute(sql), else
dbi execute(sql, args), the args keep the original rules, it shuld be tuple or list of list
eg::
execute('insert into users values(%s, %s)', [(1L, 'blablabla'), (2L, 'animer')])
execute('delete from users')
"""
con = self.pool.pop()
c = None
try:
c = con.cursor()
LOGGER.debug("Execute sql: " + sql + " args:" + str(args))
if type(args) is tuple:
c.execute(sql, args)
elif type(args) is list:
if len(args) > 1 and type(args[0]) in (list, tuple):
c.executemany(sql, args)
else:
c.execute(sql, args)
elif args is None:
c.execute(sql)
if sql.lstrip()[:6].upper() == 'INSERT':
return c.lastrowid
return c.rowcount
except Exception as e:
LOGGER.error("Error Execute on %s", str(e))
raise DBError(str(e))
finally:
c and c.close()
con and self.pool.push(con)
def transaction(self):
return Transaction(self)
def connection_class(self, adapter):
"""Get connection class by adapter"""
if self.adapters.get(adapter):
return self.adapters[adapter]
try:
class_prefix = getattr(
__import__('db.' + adapter, globals(), locals(),
['__class_prefix__']), '__class_prefix__')
driver = self._import_class('db.' + adapter + '.connection.' +
class_prefix + 'Connection')
except ImportError:
raise DBError("Must install adapter `%s` or doesn't support" %
(adapter))
self.adapters[adapter] = driver
return driver
def dialect_class(self, adapter):
"""Get dialect sql class by adapter"""
if self.dialects.get(adapter):
return self.dialects[adapter]
try:
class_prefix = getattr(
__import__('db.' + adapter, globals(), locals(),
['__class_prefix__']), '__class_prefix__')
driver = self._import_class('db.' + adapter + '.dialect.' +
class_prefix + 'Dialect')
except ImportError:
raise DBError("Must install adapter `%s` or doesn't support" %
(adapter))
self.dialects[adapter] = driver
return driver
def _import_class(self, module2cls):
"""Import class by module dot split string"""
d = module2cls.rfind(".")
classname = module2cls[d + 1: len(module2cls)]
m = __import__(module2cls[0:d], globals(), locals(), [classname])
return getattr(m, classname)
|
whiteclover/dbpy | db/_db.py | DB.execute | python | def execute(self, sql, args=None):
con = self.pool.pop()
c = None
try:
c = con.cursor()
LOGGER.debug("Execute sql: " + sql + " args:" + str(args))
if type(args) is tuple:
c.execute(sql, args)
elif type(args) is list:
if len(args) > 1 and type(args[0]) in (list, tuple):
c.executemany(sql, args)
else:
c.execute(sql, args)
elif args is None:
c.execute(sql)
if sql.lstrip()[:6].upper() == 'INSERT':
return c.lastrowid
return c.rowcount
except Exception as e:
LOGGER.error("Error Execute on %s", str(e))
raise DBError(str(e))
finally:
c and c.close()
con and self.pool.push(con) | It is used for update, delete records.
:param sql string: the sql stamtement like 'select * from %s'
:param args list: Wen set None, will use dbi execute(sql), else
dbi execute(sql, args), the args keep the original rules, it shuld be tuple or list of list
eg::
execute('insert into users values(%s, %s)', [(1L, 'blablabla'), (2L, 'animer')])
execute('delete from users') | train | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/db/_db.py#L125-L159 | null | class DB(object):
adapters = {}
dialects = {}
def __init__(self, config, minconn=5, maxconn=10, key='defalut', adapter='mysql'):
""" Setup DB::
param config dict: is the db adapter config
param key string: the key to identify dabtabase
param adapter string: the dabtabase adapter current support mysql only
param minconn int: the min connection for connection pool
param maxconn int: the max connection for connection pool
"""
adapter = adapter or 'mysql'
self.key = key
self.adapter = adapter
self.pool = ConnectionPool(minconn, maxconn, self.connection_class(adapter), config)
self.dialect = self.dialect_class(adapter)(self)
def select(self, table):
"""Select sql executor
:param table: table name
:type table: str
:returns: select query instance
"""
return self.dialect.select(table)
def insert(self, table):
"""insert sql executor
:param table: table name
:type table: str
:returns: insert query instance
"""
return self.dialect.insert(table)
def update(self, table):
"""update sql executor
:param table: table name
:type table: str
:returns: update query instance
"""
return self.dialect.update(table)
def delete(self, table):
"""delete sql executor
:param table: table name
:type table: str
:returns: delete query instance
"""
return self.dialect.delete(table)
def query(self, sql, args=None, many=None, as_dict=False):
"""The connection raw sql query, when select table, show table
to fetch records, it is compatible the dbi execute method.
:param sql string: the sql stamtement like 'select * from %s'
:param args list: Wen set None, will use dbi execute(sql), else
dbi execute(sql, args), the args keep the original rules, it shuld be tuple or list of list
:param many int: when set, the query method will return genarate an iterate
:param as_dict bool: when is true, the type of row will be dict, otherwise is tuple
"""
con = self.pool.pop()
c = None
try:
c = con.cursor(as_dict)
LOGGER.debug("Query sql: " + sql + " args:" + str(args))
c.execute(sql, args)
if many and many > 0:
return self._yield(con, c, many)
else:
return c.fetchall()
except Exception as e:
LOGGER.error("Error Qeury on %s", str(e))
raise DBError(e.args[0], e.args[1])
finally:
many or (c and c.close())
many or (con and self.pool.push(con))
def _yield(self, con, cursor, many):
try:
result = cursor.fetchmany(many)
while result:
for row in result:
yield row
result = cursor.fetchmany(many)
finally:
cursor and cursor.close()
con and self.pool.push(con)
def transaction(self):
return Transaction(self)
def connection_class(self, adapter):
"""Get connection class by adapter"""
if self.adapters.get(adapter):
return self.adapters[adapter]
try:
class_prefix = getattr(
__import__('db.' + adapter, globals(), locals(),
['__class_prefix__']), '__class_prefix__')
driver = self._import_class('db.' + adapter + '.connection.' +
class_prefix + 'Connection')
except ImportError:
raise DBError("Must install adapter `%s` or doesn't support" %
(adapter))
self.adapters[adapter] = driver
return driver
def dialect_class(self, adapter):
"""Get dialect sql class by adapter"""
if self.dialects.get(adapter):
return self.dialects[adapter]
try:
class_prefix = getattr(
__import__('db.' + adapter, globals(), locals(),
['__class_prefix__']), '__class_prefix__')
driver = self._import_class('db.' + adapter + '.dialect.' +
class_prefix + 'Dialect')
except ImportError:
raise DBError("Must install adapter `%s` or doesn't support" %
(adapter))
self.dialects[adapter] = driver
return driver
def _import_class(self, module2cls):
"""Import class by module dot split string"""
d = module2cls.rfind(".")
classname = module2cls[d + 1: len(module2cls)]
m = __import__(module2cls[0:d], globals(), locals(), [classname])
return getattr(m, classname)
|
whiteclover/dbpy | db/_db.py | DB.connection_class | python | def connection_class(self, adapter):
if self.adapters.get(adapter):
return self.adapters[adapter]
try:
class_prefix = getattr(
__import__('db.' + adapter, globals(), locals(),
['__class_prefix__']), '__class_prefix__')
driver = self._import_class('db.' + adapter + '.connection.' +
class_prefix + 'Connection')
except ImportError:
raise DBError("Must install adapter `%s` or doesn't support" %
(adapter))
self.adapters[adapter] = driver
return driver | Get connection class by adapter | train | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/db/_db.py#L164-L179 | null | class DB(object):
adapters = {}
dialects = {}
def __init__(self, config, minconn=5, maxconn=10, key='defalut', adapter='mysql'):
""" Setup DB::
param config dict: is the db adapter config
param key string: the key to identify dabtabase
param adapter string: the dabtabase adapter current support mysql only
param minconn int: the min connection for connection pool
param maxconn int: the max connection for connection pool
"""
adapter = adapter or 'mysql'
self.key = key
self.adapter = adapter
self.pool = ConnectionPool(minconn, maxconn, self.connection_class(adapter), config)
self.dialect = self.dialect_class(adapter)(self)
def select(self, table):
"""Select sql executor
:param table: table name
:type table: str
:returns: select query instance
"""
return self.dialect.select(table)
def insert(self, table):
"""insert sql executor
:param table: table name
:type table: str
:returns: insert query instance
"""
return self.dialect.insert(table)
def update(self, table):
"""update sql executor
:param table: table name
:type table: str
:returns: update query instance
"""
return self.dialect.update(table)
def delete(self, table):
"""delete sql executor
:param table: table name
:type table: str
:returns: delete query instance
"""
return self.dialect.delete(table)
def query(self, sql, args=None, many=None, as_dict=False):
"""The connection raw sql query, when select table, show table
to fetch records, it is compatible the dbi execute method.
:param sql string: the sql stamtement like 'select * from %s'
:param args list: Wen set None, will use dbi execute(sql), else
dbi execute(sql, args), the args keep the original rules, it shuld be tuple or list of list
:param many int: when set, the query method will return genarate an iterate
:param as_dict bool: when is true, the type of row will be dict, otherwise is tuple
"""
con = self.pool.pop()
c = None
try:
c = con.cursor(as_dict)
LOGGER.debug("Query sql: " + sql + " args:" + str(args))
c.execute(sql, args)
if many and many > 0:
return self._yield(con, c, many)
else:
return c.fetchall()
except Exception as e:
LOGGER.error("Error Qeury on %s", str(e))
raise DBError(e.args[0], e.args[1])
finally:
many or (c and c.close())
many or (con and self.pool.push(con))
def _yield(self, con, cursor, many):
try:
result = cursor.fetchmany(many)
while result:
for row in result:
yield row
result = cursor.fetchmany(many)
finally:
cursor and cursor.close()
con and self.pool.push(con)
def execute(self, sql, args=None):
"""It is used for update, delete records.
:param sql string: the sql stamtement like 'select * from %s'
:param args list: Wen set None, will use dbi execute(sql), else
dbi execute(sql, args), the args keep the original rules, it shuld be tuple or list of list
eg::
execute('insert into users values(%s, %s)', [(1L, 'blablabla'), (2L, 'animer')])
execute('delete from users')
"""
con = self.pool.pop()
c = None
try:
c = con.cursor()
LOGGER.debug("Execute sql: " + sql + " args:" + str(args))
if type(args) is tuple:
c.execute(sql, args)
elif type(args) is list:
if len(args) > 1 and type(args[0]) in (list, tuple):
c.executemany(sql, args)
else:
c.execute(sql, args)
elif args is None:
c.execute(sql)
if sql.lstrip()[:6].upper() == 'INSERT':
return c.lastrowid
return c.rowcount
except Exception as e:
LOGGER.error("Error Execute on %s", str(e))
raise DBError(str(e))
finally:
c and c.close()
con and self.pool.push(con)
def transaction(self):
return Transaction(self)
def dialect_class(self, adapter):
"""Get dialect sql class by adapter"""
if self.dialects.get(adapter):
return self.dialects[adapter]
try:
class_prefix = getattr(
__import__('db.' + adapter, globals(), locals(),
['__class_prefix__']), '__class_prefix__')
driver = self._import_class('db.' + adapter + '.dialect.' +
class_prefix + 'Dialect')
except ImportError:
raise DBError("Must install adapter `%s` or doesn't support" %
(adapter))
self.dialects[adapter] = driver
return driver
def _import_class(self, module2cls):
"""Import class by module dot split string"""
d = module2cls.rfind(".")
classname = module2cls[d + 1: len(module2cls)]
m = __import__(module2cls[0:d], globals(), locals(), [classname])
return getattr(m, classname)
|
whiteclover/dbpy | db/_db.py | DB.dialect_class | python | def dialect_class(self, adapter):
if self.dialects.get(adapter):
return self.dialects[adapter]
try:
class_prefix = getattr(
__import__('db.' + adapter, globals(), locals(),
['__class_prefix__']), '__class_prefix__')
driver = self._import_class('db.' + adapter + '.dialect.' +
class_prefix + 'Dialect')
except ImportError:
raise DBError("Must install adapter `%s` or doesn't support" %
(adapter))
self.dialects[adapter] = driver
return driver | Get dialect sql class by adapter | train | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/db/_db.py#L181-L196 | null | class DB(object):
adapters = {}
dialects = {}
def __init__(self, config, minconn=5, maxconn=10, key='defalut', adapter='mysql'):
""" Setup DB::
param config dict: is the db adapter config
param key string: the key to identify dabtabase
param adapter string: the dabtabase adapter current support mysql only
param minconn int: the min connection for connection pool
param maxconn int: the max connection for connection pool
"""
adapter = adapter or 'mysql'
self.key = key
self.adapter = adapter
self.pool = ConnectionPool(minconn, maxconn, self.connection_class(adapter), config)
self.dialect = self.dialect_class(adapter)(self)
def select(self, table):
"""Select sql executor
:param table: table name
:type table: str
:returns: select query instance
"""
return self.dialect.select(table)
def insert(self, table):
"""insert sql executor
:param table: table name
:type table: str
:returns: insert query instance
"""
return self.dialect.insert(table)
def update(self, table):
"""update sql executor
:param table: table name
:type table: str
:returns: update query instance
"""
return self.dialect.update(table)
def delete(self, table):
"""delete sql executor
:param table: table name
:type table: str
:returns: delete query instance
"""
return self.dialect.delete(table)
def query(self, sql, args=None, many=None, as_dict=False):
"""The connection raw sql query, when select table, show table
to fetch records, it is compatible the dbi execute method.
:param sql string: the sql stamtement like 'select * from %s'
:param args list: Wen set None, will use dbi execute(sql), else
dbi execute(sql, args), the args keep the original rules, it shuld be tuple or list of list
:param many int: when set, the query method will return genarate an iterate
:param as_dict bool: when is true, the type of row will be dict, otherwise is tuple
"""
con = self.pool.pop()
c = None
try:
c = con.cursor(as_dict)
LOGGER.debug("Query sql: " + sql + " args:" + str(args))
c.execute(sql, args)
if many and many > 0:
return self._yield(con, c, many)
else:
return c.fetchall()
except Exception as e:
LOGGER.error("Error Qeury on %s", str(e))
raise DBError(e.args[0], e.args[1])
finally:
many or (c and c.close())
many or (con and self.pool.push(con))
def _yield(self, con, cursor, many):
try:
result = cursor.fetchmany(many)
while result:
for row in result:
yield row
result = cursor.fetchmany(many)
finally:
cursor and cursor.close()
con and self.pool.push(con)
def execute(self, sql, args=None):
"""It is used for update, delete records.
:param sql string: the sql stamtement like 'select * from %s'
:param args list: Wen set None, will use dbi execute(sql), else
dbi execute(sql, args), the args keep the original rules, it shuld be tuple or list of list
eg::
execute('insert into users values(%s, %s)', [(1L, 'blablabla'), (2L, 'animer')])
execute('delete from users')
"""
con = self.pool.pop()
c = None
try:
c = con.cursor()
LOGGER.debug("Execute sql: " + sql + " args:" + str(args))
if type(args) is tuple:
c.execute(sql, args)
elif type(args) is list:
if len(args) > 1 and type(args[0]) in (list, tuple):
c.executemany(sql, args)
else:
c.execute(sql, args)
elif args is None:
c.execute(sql)
if sql.lstrip()[:6].upper() == 'INSERT':
return c.lastrowid
return c.rowcount
except Exception as e:
LOGGER.error("Error Execute on %s", str(e))
raise DBError(str(e))
finally:
c and c.close()
con and self.pool.push(con)
def transaction(self):
return Transaction(self)
def connection_class(self, adapter):
"""Get connection class by adapter"""
if self.adapters.get(adapter):
return self.adapters[adapter]
try:
class_prefix = getattr(
__import__('db.' + adapter, globals(), locals(),
['__class_prefix__']), '__class_prefix__')
driver = self._import_class('db.' + adapter + '.connection.' +
class_prefix + 'Connection')
except ImportError:
raise DBError("Must install adapter `%s` or doesn't support" %
(adapter))
self.adapters[adapter] = driver
return driver
def _import_class(self, module2cls):
"""Import class by module dot split string"""
d = module2cls.rfind(".")
classname = module2cls[d + 1: len(module2cls)]
m = __import__(module2cls[0:d], globals(), locals(), [classname])
return getattr(m, classname)
|
whiteclover/dbpy | db/_db.py | DB._import_class | python | def _import_class(self, module2cls):
d = module2cls.rfind(".")
classname = module2cls[d + 1: len(module2cls)]
m = __import__(module2cls[0:d], globals(), locals(), [classname])
return getattr(m, classname) | Import class by module dot split string | train | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/db/_db.py#L198-L203 | null | class DB(object):
adapters = {}
dialects = {}
def __init__(self, config, minconn=5, maxconn=10, key='defalut', adapter='mysql'):
""" Setup DB::
param config dict: is the db adapter config
param key string: the key to identify dabtabase
param adapter string: the dabtabase adapter current support mysql only
param minconn int: the min connection for connection pool
param maxconn int: the max connection for connection pool
"""
adapter = adapter or 'mysql'
self.key = key
self.adapter = adapter
self.pool = ConnectionPool(minconn, maxconn, self.connection_class(adapter), config)
self.dialect = self.dialect_class(adapter)(self)
def select(self, table):
"""Select sql executor
:param table: table name
:type table: str
:returns: select query instance
"""
return self.dialect.select(table)
def insert(self, table):
"""insert sql executor
:param table: table name
:type table: str
:returns: insert query instance
"""
return self.dialect.insert(table)
def update(self, table):
"""update sql executor
:param table: table name
:type table: str
:returns: update query instance
"""
return self.dialect.update(table)
def delete(self, table):
"""delete sql executor
:param table: table name
:type table: str
:returns: delete query instance
"""
return self.dialect.delete(table)
def query(self, sql, args=None, many=None, as_dict=False):
"""The connection raw sql query, when select table, show table
to fetch records, it is compatible the dbi execute method.
:param sql string: the sql stamtement like 'select * from %s'
:param args list: Wen set None, will use dbi execute(sql), else
dbi execute(sql, args), the args keep the original rules, it shuld be tuple or list of list
:param many int: when set, the query method will return genarate an iterate
:param as_dict bool: when is true, the type of row will be dict, otherwise is tuple
"""
con = self.pool.pop()
c = None
try:
c = con.cursor(as_dict)
LOGGER.debug("Query sql: " + sql + " args:" + str(args))
c.execute(sql, args)
if many and many > 0:
return self._yield(con, c, many)
else:
return c.fetchall()
except Exception as e:
LOGGER.error("Error Qeury on %s", str(e))
raise DBError(e.args[0], e.args[1])
finally:
many or (c and c.close())
many or (con and self.pool.push(con))
def _yield(self, con, cursor, many):
try:
result = cursor.fetchmany(many)
while result:
for row in result:
yield row
result = cursor.fetchmany(many)
finally:
cursor and cursor.close()
con and self.pool.push(con)
def execute(self, sql, args=None):
"""It is used for update, delete records.
:param sql string: the sql stamtement like 'select * from %s'
:param args list: Wen set None, will use dbi execute(sql), else
dbi execute(sql, args), the args keep the original rules, it shuld be tuple or list of list
eg::
execute('insert into users values(%s, %s)', [(1L, 'blablabla'), (2L, 'animer')])
execute('delete from users')
"""
con = self.pool.pop()
c = None
try:
c = con.cursor()
LOGGER.debug("Execute sql: " + sql + " args:" + str(args))
if type(args) is tuple:
c.execute(sql, args)
elif type(args) is list:
if len(args) > 1 and type(args[0]) in (list, tuple):
c.executemany(sql, args)
else:
c.execute(sql, args)
elif args is None:
c.execute(sql)
if sql.lstrip()[:6].upper() == 'INSERT':
return c.lastrowid
return c.rowcount
except Exception as e:
LOGGER.error("Error Execute on %s", str(e))
raise DBError(str(e))
finally:
c and c.close()
con and self.pool.push(con)
def transaction(self):
return Transaction(self)
def connection_class(self, adapter):
"""Get connection class by adapter"""
if self.adapters.get(adapter):
return self.adapters[adapter]
try:
class_prefix = getattr(
__import__('db.' + adapter, globals(), locals(),
['__class_prefix__']), '__class_prefix__')
driver = self._import_class('db.' + adapter + '.connection.' +
class_prefix + 'Connection')
except ImportError:
raise DBError("Must install adapter `%s` or doesn't support" %
(adapter))
self.adapters[adapter] = driver
return driver
def dialect_class(self, adapter):
"""Get dialect sql class by adapter"""
if self.dialects.get(adapter):
return self.dialects[adapter]
try:
class_prefix = getattr(
__import__('db.' + adapter, globals(), locals(),
['__class_prefix__']), '__class_prefix__')
driver = self._import_class('db.' + adapter + '.dialect.' +
class_prefix + 'Dialect')
except ImportError:
raise DBError("Must install adapter `%s` or doesn't support" %
(adapter))
self.dialects[adapter] = driver
return driver
|
whiteclover/dbpy | db/_db.py | Transaction.commit | python | def commit(self):
try:
self._con.commit()
self._con.autocommit(True)
except Exception as e:
try:
self._con.rollback()
except Exception as e_:
LOGGER.error('When transaction happend error: %s', e_)
raise e
finally:
self._db.pool.push(self._con)
self._con = None
self._db = None | Commits transaction | train | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/db/_db.py#L261-L275 | null | class Transaction(object):
"""Database sql Transaction"""
def __init__(self, db):
self._db = db
self._con = None
@lazy_attr
def dialect(self):
return self._db.dialects.get(self._db.adapter)(self)
def __enter__(self):
self._con = self._db.pool.pop()
self._con.ensure_connect()
self._con.autocommit(False)
return self
def __exit__(self, exc_type, exc_value, traceback):
try:
self._con.commit()
self._con.autocommit(True)
except Exception as e:
try:
self._con.rollback()
except Exception as e_:
LOGGER.error('When transaction happend error: %s', e_)
raise e
finally:
self._db.pool.push(self._con)
self._con = None
self._db = None
def begin(self):
"""Begins transaction"""
self._con = self._db.pool.pop()
self._con.ensure_connect()
self._con.autocommit(False)
def execute(self, sql, args):
"""Execute sql
:param sql string: the sql stamtement like 'select * from %s'
:param args list: Wen set None, will use dbi execute(sql), else
db execute(sql, args), the args keep the original rules, it shuld be tuple or list of list
"""
c = None
try:
c = self._con.cursor()
LOGGER.debug("execute sql: " + sql + " args:" + str(args))
if type(args) is tuple:
c.execute(sql, args)
elif type(args) is list:
if len(args) > 1 and type(args[0]) in (list, tuple):
c.executemany(sql, args)
else:
c.execute(sql, args)
elif args is None:
c.execute(sql)
if sql.lstrip()[:6].upper() == 'INSERT':
return c.lastrowid
return c.rowcount
finally:
c and c.close()
def insert(self, table):
"""Insert sql diaect"""
return self.dialect.insert(table)
def update(self, table):
"""update sql diaect"""
return self.dialect.update(table)
def delete(self, table):
"""delete sql diaect"""
return self.dialect.delete(table)
|
whiteclover/dbpy | db/_db.py | Transaction.execute | python | def execute(self, sql, args):
c = None
try:
c = self._con.cursor()
LOGGER.debug("execute sql: " + sql + " args:" + str(args))
if type(args) is tuple:
c.execute(sql, args)
elif type(args) is list:
if len(args) > 1 and type(args[0]) in (list, tuple):
c.executemany(sql, args)
else:
c.execute(sql, args)
elif args is None:
c.execute(sql)
if sql.lstrip()[:6].upper() == 'INSERT':
return c.lastrowid
return c.rowcount
finally:
c and c.close() | Execute sql
:param sql string: the sql stamtement like 'select * from %s'
:param args list: Wen set None, will use dbi execute(sql), else
db execute(sql, args), the args keep the original rules, it shuld be tuple or list of list | train | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/db/_db.py#L277-L301 | null | class Transaction(object):
"""Database sql Transaction"""
def __init__(self, db):
self._db = db
self._con = None
@lazy_attr
def dialect(self):
return self._db.dialects.get(self._db.adapter)(self)
def __enter__(self):
self._con = self._db.pool.pop()
self._con.ensure_connect()
self._con.autocommit(False)
return self
def __exit__(self, exc_type, exc_value, traceback):
try:
self._con.commit()
self._con.autocommit(True)
except Exception as e:
try:
self._con.rollback()
except Exception as e_:
LOGGER.error('When transaction happend error: %s', e_)
raise e
finally:
self._db.pool.push(self._con)
self._con = None
self._db = None
def begin(self):
"""Begins transaction"""
self._con = self._db.pool.pop()
self._con.ensure_connect()
self._con.autocommit(False)
def commit(self):
"""Commits transaction"""
try:
self._con.commit()
self._con.autocommit(True)
except Exception as e:
try:
self._con.rollback()
except Exception as e_:
LOGGER.error('When transaction happend error: %s', e_)
raise e
finally:
self._db.pool.push(self._con)
self._con = None
self._db = None
def insert(self, table):
"""Insert sql diaect"""
return self.dialect.insert(table)
def update(self, table):
"""update sql diaect"""
return self.dialect.update(table)
def delete(self, table):
"""delete sql diaect"""
return self.dialect.delete(table)
|
whiteclover/dbpy | db/connection.py | Connection.cursor | python | def cursor(self, as_dict=False):
self.ensure_connect()
ctype = self.real_ctype(as_dict)
return self._connect.cursor(ctype) | Gets the cursor by type , if ``as_dict is ture, make a dict sql connection cursor | train | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/db/connection.py#L57-L61 | [
"def ensure_connect(self):\n \"\"\"Ensure the connetion is useable\"\"\"\n raise NotImplementedError('Must implement ensure_connect in Subclass')\n",
"def real_ctype(self, as_dict):\n \"\"\"The real sql cursor type\"\"\"\n raise NotImplementedError('Must implement real_ctype in Subclass')\n"
] | class Connection(object):
"""Base Database Connection class
:param db_options: db optional configuration, defaults to None
:type db_options: dict, optional
"""
def __init__(self, db_options=None):
db_options = db_options or {}
#: database optional configuration, defaults to None
self._db_options = self.default_options()
self._db_options.update(db_options)
#: database real connection
self._connect = None
self.initialize()
def initialize(self):
"""Initialize customize configuration in subclass"""
pass
def default_options(self):
"""Defalut options for intailize sql connection"""
return {}
def connect(self):
"""connects database"""
raise NotImplementedError('Must implement connect in Subclass')
def close(self):
"""Close connect"""
if self._connect is not None:
self._connect.close()
self._connect = None
def ensure_connect(self):
"""Ensure the connetion is useable"""
raise NotImplementedError('Must implement ensure_connect in Subclass')
def real_ctype(self, as_dict):
"""The real sql cursor type"""
raise NotImplementedError('Must implement real_ctype in Subclass')
def driver(self):
"""Get database driver"""
return None
def commit(self):
"""Commit batch execute"""
self._connect.commit()
def rollback(self):
"""Rollback database process"""
self._connect.rollback()
def autocommit(self, enable=True):
"""Sets commit to auto if True"""
self._connect.autocommit(enable)
|
whiteclover/dbpy | db/query/insert.py | InsertQuery.values | python | def values(self, values):
if isinstance(values, dict):
l = []
for column in self._columns:
l.append(values[column])
self._values.append(tuple(l))
else:
self._values.append(values)
return self | The values for insert ,
it can be a dict row or list tuple row. | train | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/db/query/insert.py#L36-L47 | null | class InsertQuery(Query):
def __init__(self, table, dialect, db, columns=[]):
self._columns = columns or []
self._values = []
self._table = table
self._db = db
Query.__init__(self, dialect)
def table(self, table):
self._table = table
return self
def fields(self, *columns):
self._columns.extend(columns)
return self
def compile(self):
sql = 'INSERT INTO ' + self.dialect.quote_table(self._table)
if self._columns:
sql += ' (' + ', '.join([self.dialect.quote_column(_) for _ in self._columns]) + ')'
sql += ' VALUES(' + ', '.join(['%s' for _ in range(len(self._values[0]))]) + ')'
return sql
def execute(self):
bind = self._values[0] if len(self._values) == 1 else self._values
return self._db.execute(self.to_sql(), bind)
def clear(self):
self._columns = []
self._values = []
return self
|
jslang/responsys | responsys/client.py | InteractClient.call | python | def call(self, method, *args):
try:
response = getattr(self.client.service, method)(*args)
except (URLError, SSLError) as e:
log.exception('Failed to connect to responsys service')
raise ConnectError("Request to service timed out")
except WebFault as web_fault:
fault_name = getattr(web_fault.fault, 'faultstring', None)
error = str(web_fault.fault.detail)
if fault_name == 'TableFault':
raise TableFault(error)
if fault_name == 'ListFault':
raise ListFault(error)
if fault_name == 'API_LIMIT_EXCEEDED':
raise ApiLimitError(error)
if fault_name == 'AccountFault':
raise AccountFault(error)
raise ServiceError(web_fault.fault, web_fault.document)
return response | Calls the service method defined with the arguments provided | train | https://github.com/jslang/responsys/blob/9b355a444c0c75dff41064502c1e2b76dfd5cb93/responsys/client.py#L110-L131 | null | class InteractClient(object):
""" Interact Client Class
Provides access to the methods defined by the Responsys Interact API. Example setup:
>>> client = InteractClient(username, password, pod)
>>> client.connect()
>>> client.merge_list_members(interact_object, records, merge_rules)
>>> client.disconnect()
Using the client class as a context manager will automatically connect using the credentials
provided, and disconnect upon context exit:
>>> with InteractClient(username, password, pod) as client:
... client.merge_list_members(interact_object, records, merge_rules)
Since responsys limits the number of active sessions per account, this can help ensure you
don't leave unused connections open.
"""
DEFAULT_SESSION_LIFETIME = 60 * 10
WSDLS = {
'2': 'https://ws2.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'5': 'https://ws5.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'rtm4': 'https://rtm4.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
'rtm4b': 'https://rtm4b.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
}
ENDPOINTS = {
'2': 'https://ws2.responsys.net/webservices/services/ResponsysWSService',
'5': 'https://ws5.responsys.net/webservices/services/ResponsysWSService',
'rtm4': 'http://rtm4.responsys.net:80/tmws/services/TriggeredMessageWS',
'rtm4b': 'http://rtm4b.responsys.net:80/tmws/services/TriggeredMessageWS',
}
@property
def wsdl(self):
return self.WSDLS[self.pod]
@property
def endpoint(self):
return self.ENDPOINTS[self.pod]
@property
def client(self):
if self._client is None:
self._client = Client(self.wsdl, location=self.endpoint, timeout=self.timeout)
return self._client
@property
def connected(self):
return getattr(self, '_connected', False)
@connected.setter
def connected(self, value):
self._connected = value
@property
def session(self):
return getattr(self, '_session', None)
@session.setter
def session(self, session_id):
self._session = type(
'Session', (tuple,), {
'is_expired': property(lambda s: s[1] + self.session_lifetime <= time()),
})([session_id, time()])
session_header = self.client.factory.create('SessionHeader')
session_header.sessionId = session_id
self.client.set_options(soapheaders=session_header)
@session.deleter
def session(self):
self._session = None
self.client.set_options(soapheaders=())
def __init__(self, username, password, pod, client=None, session_lifetime=600, timeout=5):
self.username = username
self.password = password
self.pod = pod
self.session_lifetime = session_lifetime
self.timeout = timeout
self._client = client
def __enter__(self):
self.connect()
return self
def __exit__(self, type_, value, traceback):
self.disconnect()
def connect(self):
""" Connects to the Responsys soap service
Uses the credentials passed to the client init to login and setup the session id returned.
Returns True on successful connection, otherwise False.
"""
if self.session and self.session.is_expired:
# Close the session to avoid max concurrent session errors
self.disconnect(abandon_session=True)
if not self.session:
try:
login_result = self.login(self.username, self.password)
except AccountFault:
log.error('Login failed, invalid username or password')
raise
else:
self.session = login_result.session_id
self.connected = time()
return self.connected
def disconnect(self, abandon_session=False):
""" Disconnects from the Responsys soap service
Calls the service logout method and destroys the client's session information. Returns
True on success, False otherwise.
"""
self.connected = False
if (self.session and self.session.is_expired) or abandon_session:
try:
self.logout()
except:
log.warning(
'Logout call to responsys failed, session may have not been terminated',
exc_info=True
)
del self.session
return True
# Session Management Methods
def login(self, username, password):
""" Responsys.login soap call
Accepts username and password for authentication, returns a LoginResult object.
"""
return LoginResult(self.call('login', username, password))
def logout(self):
""" Responsys.logout soap call
Returns True on success, False otherwise.
"""
return self.call('logout')
def login_with_certificate(self, encrypted_server_challenge):
""" Responsys.loginWithCertificate soap call
Accepts encrypted_server_challenge for login. Returns LoginResult.
"""
return LoginResult(self.call('loginWithCertificate', encrypted_server_challenge))
def authenticate_server(self, username, client_challenge):
""" Responsys.authenticateServer soap call
Accepts username and client_challenge to authenciate. Returns ServerAuthResult.
"""
return ServerAuthResult(self.call('authenticateServer', username, client_challenge))
# List Management Methods
def merge_list_members(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembers call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a MergeResult
"""
list_ = list_.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
merge_rule = merge_rule.get_soap_object(self.client)
return MergeResult(self.call('mergeListMembers', list_, record_data, merge_rule))
def merge_list_members_RIID(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembersRIID call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a RecipientResult
"""
list_ = list_.get_soap_object(self.client)
result = self.call('mergeListMembersRIID', list_, record_data, merge_rule)
return RecipientResult(result.recipientResult)
def delete_list_members(self, list_, query_column, ids_to_delete):
""" Responsys.deleteListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
list_ = list_.get_soap_object(self.client)
result = self.call('deleteListMembers', list_, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_list_members(self, list_, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData instance
"""
list_ = list_.get_soap_object(self.client)
result = self.call('retrieveListMembers', list_, query_column, field_list, ids_to_retrieve)
return RecordData.from_soap_type(result.recordData)
# Table Management Methods
def create_table(self, table, fields):
""" Responsys.createTable call
Accepts:
InteractObject table
list fields
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTable', table, fields)
def create_table_with_pk(self, table, fields, primary_keys):
""" Responsys.createTableWithPK call
Accepts:
InteractObject table
list fields
list primary_keys
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTableWithPK', table, fields, primary_keys)
def delete_table(self, table):
""" Responsys.deleteTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('deleteTable', table)
def delete_profile_extension_members(self, profile_extension, query_column, ids_to_delete):
""" Responsys.deleteProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns list of DeleteResults
"""
profile_extension = profile_extension.get_soap_object(self.client)
result = self.call(
'deleteProfileExtensionMembers', profile_extension, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_profile_extension_records(self, profile_extension, field_list, ids_to_retrieve,
query_column='RIID'):
""" Responsys.retrieveProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns RecordData
"""
profile_extension = profile_extension.get_soap_object(self.client)
return RecordData.from_soap_type(
self.call('retrieveProfileExtensionRecords',
profile_extension, query_column, field_list, ids_to_retrieve))
def truncate_table(self, table):
""" Responsys.truncateTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('truncateTable', table)
def delete_table_records(self, table, query_column, ids_to_delete):
""" Responsys.deleteTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
table = table.get_soap_object(self.client)
result = self.call('deleteTableRecords', table, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def merge_table_records(self, table, record_data, match_column_names):
""" Responsys.mergeTableRecords call
Accepts:
InteractObject table
RecordData record_data
list match_column_names
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecords', table, record_data, match_column_names))
def merge_table_records_with_pk(self, table, record_data, insert_on_no_match, update_on_match):
""" Responsys.mergeTableRecordsWithPK call
Accepts:
InteractObject table
RecordData record_data
string insert_on_no_match
string update_on_match
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecordsWithPK', table, record_data, insert_on_no_match, update_on_match))
def merge_into_profile_extension(self, profile_extension, record_data, match_column,
insert_on_no_match, update_on_match):
""" Responsys.mergeIntoProfileExtension call
Accepts:
InteractObject profile_extension
RecordData record_data
string match_column
string insert_on_no_match
string update_on_match
Returns a RecipientResult
"""
profile_extension = profile_extension.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
results = self.call(
'mergeIntoProfileExtension', profile_extension, record_data, match_column,
insert_on_no_match, update_on_match)
return [RecipientResult(result) for result in results]
def retrieve_table_records(self, table, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData
"""
table = table.get_soap_object(self.client)
return RecordData.from_soap_type(self.call(
'retrieveTableRecords', table, query_column, field_list, ids_to_retrieve))
# Campaign Management Methods
# TODO: implement
# GetLaunchStatus
# LaunchCampaign
# MergeTriggerEmail
# ScheduleCampaignLaunch
# TriggerCampaignMessage
def trigger_custom_event(self, custom_event, recipient_data=None):
custom_event = custom_event.get_soap_object(self.client)
recipient_data = [rdata.get_soap_object(self.client) for rdata in recipient_data]
results = self.call('triggerCustomEvent', custom_event, recipient_data)
return [TriggerResult(result) for result in results]
|
jslang/responsys | responsys/client.py | InteractClient.connect | python | def connect(self):
if self.session and self.session.is_expired:
# Close the session to avoid max concurrent session errors
self.disconnect(abandon_session=True)
if not self.session:
try:
login_result = self.login(self.username, self.password)
except AccountFault:
log.error('Login failed, invalid username or password')
raise
else:
self.session = login_result.session_id
self.connected = time()
return self.connected | Connects to the Responsys soap service
Uses the credentials passed to the client init to login and setup the session id returned.
Returns True on successful connection, otherwise False. | train | https://github.com/jslang/responsys/blob/9b355a444c0c75dff41064502c1e2b76dfd5cb93/responsys/client.py#L133-L155 | [
"def disconnect(self, abandon_session=False):\n \"\"\" Disconnects from the Responsys soap service\n\n Calls the service logout method and destroys the client's session information. Returns\n True on success, False otherwise.\n \"\"\"\n self.connected = False\n if (self.session and self.session.is_expired) or abandon_session:\n try:\n self.logout()\n except:\n log.warning(\n 'Logout call to responsys failed, session may have not been terminated',\n exc_info=True\n )\n del self.session\n return True\n",
"def login(self, username, password):\n \"\"\" Responsys.login soap call\n\n Accepts username and password for authentication, returns a LoginResult object.\n \"\"\"\n return LoginResult(self.call('login', username, password))\n"
] | class InteractClient(object):
""" Interact Client Class
Provides access to the methods defined by the Responsys Interact API. Example setup:
>>> client = InteractClient(username, password, pod)
>>> client.connect()
>>> client.merge_list_members(interact_object, records, merge_rules)
>>> client.disconnect()
Using the client class as a context manager will automatically connect using the credentials
provided, and disconnect upon context exit:
>>> with InteractClient(username, password, pod) as client:
... client.merge_list_members(interact_object, records, merge_rules)
Since responsys limits the number of active sessions per account, this can help ensure you
don't leave unused connections open.
"""
DEFAULT_SESSION_LIFETIME = 60 * 10
WSDLS = {
'2': 'https://ws2.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'5': 'https://ws5.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'rtm4': 'https://rtm4.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
'rtm4b': 'https://rtm4b.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
}
ENDPOINTS = {
'2': 'https://ws2.responsys.net/webservices/services/ResponsysWSService',
'5': 'https://ws5.responsys.net/webservices/services/ResponsysWSService',
'rtm4': 'http://rtm4.responsys.net:80/tmws/services/TriggeredMessageWS',
'rtm4b': 'http://rtm4b.responsys.net:80/tmws/services/TriggeredMessageWS',
}
@property
def wsdl(self):
return self.WSDLS[self.pod]
@property
def endpoint(self):
return self.ENDPOINTS[self.pod]
@property
def client(self):
if self._client is None:
self._client = Client(self.wsdl, location=self.endpoint, timeout=self.timeout)
return self._client
@property
def connected(self):
return getattr(self, '_connected', False)
@connected.setter
def connected(self, value):
self._connected = value
@property
def session(self):
return getattr(self, '_session', None)
@session.setter
def session(self, session_id):
self._session = type(
'Session', (tuple,), {
'is_expired': property(lambda s: s[1] + self.session_lifetime <= time()),
})([session_id, time()])
session_header = self.client.factory.create('SessionHeader')
session_header.sessionId = session_id
self.client.set_options(soapheaders=session_header)
@session.deleter
def session(self):
self._session = None
self.client.set_options(soapheaders=())
def __init__(self, username, password, pod, client=None, session_lifetime=600, timeout=5):
self.username = username
self.password = password
self.pod = pod
self.session_lifetime = session_lifetime
self.timeout = timeout
self._client = client
def __enter__(self):
self.connect()
return self
def __exit__(self, type_, value, traceback):
self.disconnect()
def call(self, method, *args):
""" Calls the service method defined with the arguments provided """
try:
response = getattr(self.client.service, method)(*args)
except (URLError, SSLError) as e:
log.exception('Failed to connect to responsys service')
raise ConnectError("Request to service timed out")
except WebFault as web_fault:
fault_name = getattr(web_fault.fault, 'faultstring', None)
error = str(web_fault.fault.detail)
if fault_name == 'TableFault':
raise TableFault(error)
if fault_name == 'ListFault':
raise ListFault(error)
if fault_name == 'API_LIMIT_EXCEEDED':
raise ApiLimitError(error)
if fault_name == 'AccountFault':
raise AccountFault(error)
raise ServiceError(web_fault.fault, web_fault.document)
return response
def disconnect(self, abandon_session=False):
""" Disconnects from the Responsys soap service
Calls the service logout method and destroys the client's session information. Returns
True on success, False otherwise.
"""
self.connected = False
if (self.session and self.session.is_expired) or abandon_session:
try:
self.logout()
except:
log.warning(
'Logout call to responsys failed, session may have not been terminated',
exc_info=True
)
del self.session
return True
# Session Management Methods
def login(self, username, password):
""" Responsys.login soap call
Accepts username and password for authentication, returns a LoginResult object.
"""
return LoginResult(self.call('login', username, password))
def logout(self):
""" Responsys.logout soap call
Returns True on success, False otherwise.
"""
return self.call('logout')
def login_with_certificate(self, encrypted_server_challenge):
""" Responsys.loginWithCertificate soap call
Accepts encrypted_server_challenge for login. Returns LoginResult.
"""
return LoginResult(self.call('loginWithCertificate', encrypted_server_challenge))
def authenticate_server(self, username, client_challenge):
""" Responsys.authenticateServer soap call
Accepts username and client_challenge to authenciate. Returns ServerAuthResult.
"""
return ServerAuthResult(self.call('authenticateServer', username, client_challenge))
# List Management Methods
def merge_list_members(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembers call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a MergeResult
"""
list_ = list_.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
merge_rule = merge_rule.get_soap_object(self.client)
return MergeResult(self.call('mergeListMembers', list_, record_data, merge_rule))
def merge_list_members_RIID(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembersRIID call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a RecipientResult
"""
list_ = list_.get_soap_object(self.client)
result = self.call('mergeListMembersRIID', list_, record_data, merge_rule)
return RecipientResult(result.recipientResult)
def delete_list_members(self, list_, query_column, ids_to_delete):
""" Responsys.deleteListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
list_ = list_.get_soap_object(self.client)
result = self.call('deleteListMembers', list_, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_list_members(self, list_, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData instance
"""
list_ = list_.get_soap_object(self.client)
result = self.call('retrieveListMembers', list_, query_column, field_list, ids_to_retrieve)
return RecordData.from_soap_type(result.recordData)
# Table Management Methods
def create_table(self, table, fields):
""" Responsys.createTable call
Accepts:
InteractObject table
list fields
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTable', table, fields)
def create_table_with_pk(self, table, fields, primary_keys):
""" Responsys.createTableWithPK call
Accepts:
InteractObject table
list fields
list primary_keys
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTableWithPK', table, fields, primary_keys)
def delete_table(self, table):
""" Responsys.deleteTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('deleteTable', table)
def delete_profile_extension_members(self, profile_extension, query_column, ids_to_delete):
""" Responsys.deleteProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns list of DeleteResults
"""
profile_extension = profile_extension.get_soap_object(self.client)
result = self.call(
'deleteProfileExtensionMembers', profile_extension, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_profile_extension_records(self, profile_extension, field_list, ids_to_retrieve,
query_column='RIID'):
""" Responsys.retrieveProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns RecordData
"""
profile_extension = profile_extension.get_soap_object(self.client)
return RecordData.from_soap_type(
self.call('retrieveProfileExtensionRecords',
profile_extension, query_column, field_list, ids_to_retrieve))
def truncate_table(self, table):
""" Responsys.truncateTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('truncateTable', table)
def delete_table_records(self, table, query_column, ids_to_delete):
""" Responsys.deleteTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
table = table.get_soap_object(self.client)
result = self.call('deleteTableRecords', table, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def merge_table_records(self, table, record_data, match_column_names):
""" Responsys.mergeTableRecords call
Accepts:
InteractObject table
RecordData record_data
list match_column_names
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecords', table, record_data, match_column_names))
def merge_table_records_with_pk(self, table, record_data, insert_on_no_match, update_on_match):
""" Responsys.mergeTableRecordsWithPK call
Accepts:
InteractObject table
RecordData record_data
string insert_on_no_match
string update_on_match
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecordsWithPK', table, record_data, insert_on_no_match, update_on_match))
def merge_into_profile_extension(self, profile_extension, record_data, match_column,
insert_on_no_match, update_on_match):
""" Responsys.mergeIntoProfileExtension call
Accepts:
InteractObject profile_extension
RecordData record_data
string match_column
string insert_on_no_match
string update_on_match
Returns a RecipientResult
"""
profile_extension = profile_extension.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
results = self.call(
'mergeIntoProfileExtension', profile_extension, record_data, match_column,
insert_on_no_match, update_on_match)
return [RecipientResult(result) for result in results]
def retrieve_table_records(self, table, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData
"""
table = table.get_soap_object(self.client)
return RecordData.from_soap_type(self.call(
'retrieveTableRecords', table, query_column, field_list, ids_to_retrieve))
# Campaign Management Methods
# TODO: implement
# GetLaunchStatus
# LaunchCampaign
# MergeTriggerEmail
# ScheduleCampaignLaunch
# TriggerCampaignMessage
def trigger_custom_event(self, custom_event, recipient_data=None):
custom_event = custom_event.get_soap_object(self.client)
recipient_data = [rdata.get_soap_object(self.client) for rdata in recipient_data]
results = self.call('triggerCustomEvent', custom_event, recipient_data)
return [TriggerResult(result) for result in results]
|
jslang/responsys | responsys/client.py | InteractClient.disconnect | python | def disconnect(self, abandon_session=False):
self.connected = False
if (self.session and self.session.is_expired) or abandon_session:
try:
self.logout()
except:
log.warning(
'Logout call to responsys failed, session may have not been terminated',
exc_info=True
)
del self.session
return True | Disconnects from the Responsys soap service
Calls the service logout method and destroys the client's session information. Returns
True on success, False otherwise. | train | https://github.com/jslang/responsys/blob/9b355a444c0c75dff41064502c1e2b76dfd5cb93/responsys/client.py#L157-L173 | [
"def logout(self):\n \"\"\" Responsys.logout soap call\n\n Returns True on success, False otherwise.\n \"\"\"\n return self.call('logout')\n"
] | class InteractClient(object):
""" Interact Client Class
Provides access to the methods defined by the Responsys Interact API. Example setup:
>>> client = InteractClient(username, password, pod)
>>> client.connect()
>>> client.merge_list_members(interact_object, records, merge_rules)
>>> client.disconnect()
Using the client class as a context manager will automatically connect using the credentials
provided, and disconnect upon context exit:
>>> with InteractClient(username, password, pod) as client:
... client.merge_list_members(interact_object, records, merge_rules)
Since responsys limits the number of active sessions per account, this can help ensure you
don't leave unused connections open.
"""
DEFAULT_SESSION_LIFETIME = 60 * 10
WSDLS = {
'2': 'https://ws2.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'5': 'https://ws5.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'rtm4': 'https://rtm4.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
'rtm4b': 'https://rtm4b.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
}
ENDPOINTS = {
'2': 'https://ws2.responsys.net/webservices/services/ResponsysWSService',
'5': 'https://ws5.responsys.net/webservices/services/ResponsysWSService',
'rtm4': 'http://rtm4.responsys.net:80/tmws/services/TriggeredMessageWS',
'rtm4b': 'http://rtm4b.responsys.net:80/tmws/services/TriggeredMessageWS',
}
@property
def wsdl(self):
return self.WSDLS[self.pod]
@property
def endpoint(self):
return self.ENDPOINTS[self.pod]
@property
def client(self):
if self._client is None:
self._client = Client(self.wsdl, location=self.endpoint, timeout=self.timeout)
return self._client
@property
def connected(self):
return getattr(self, '_connected', False)
@connected.setter
def connected(self, value):
self._connected = value
@property
def session(self):
return getattr(self, '_session', None)
@session.setter
def session(self, session_id):
self._session = type(
'Session', (tuple,), {
'is_expired': property(lambda s: s[1] + self.session_lifetime <= time()),
})([session_id, time()])
session_header = self.client.factory.create('SessionHeader')
session_header.sessionId = session_id
self.client.set_options(soapheaders=session_header)
@session.deleter
def session(self):
self._session = None
self.client.set_options(soapheaders=())
def __init__(self, username, password, pod, client=None, session_lifetime=600, timeout=5):
self.username = username
self.password = password
self.pod = pod
self.session_lifetime = session_lifetime
self.timeout = timeout
self._client = client
def __enter__(self):
self.connect()
return self
def __exit__(self, type_, value, traceback):
self.disconnect()
def call(self, method, *args):
""" Calls the service method defined with the arguments provided """
try:
response = getattr(self.client.service, method)(*args)
except (URLError, SSLError) as e:
log.exception('Failed to connect to responsys service')
raise ConnectError("Request to service timed out")
except WebFault as web_fault:
fault_name = getattr(web_fault.fault, 'faultstring', None)
error = str(web_fault.fault.detail)
if fault_name == 'TableFault':
raise TableFault(error)
if fault_name == 'ListFault':
raise ListFault(error)
if fault_name == 'API_LIMIT_EXCEEDED':
raise ApiLimitError(error)
if fault_name == 'AccountFault':
raise AccountFault(error)
raise ServiceError(web_fault.fault, web_fault.document)
return response
def connect(self):
""" Connects to the Responsys soap service
Uses the credentials passed to the client init to login and setup the session id returned.
Returns True on successful connection, otherwise False.
"""
if self.session and self.session.is_expired:
# Close the session to avoid max concurrent session errors
self.disconnect(abandon_session=True)
if not self.session:
try:
login_result = self.login(self.username, self.password)
except AccountFault:
log.error('Login failed, invalid username or password')
raise
else:
self.session = login_result.session_id
self.connected = time()
return self.connected
# Session Management Methods
def login(self, username, password):
""" Responsys.login soap call
Accepts username and password for authentication, returns a LoginResult object.
"""
return LoginResult(self.call('login', username, password))
def logout(self):
""" Responsys.logout soap call
Returns True on success, False otherwise.
"""
return self.call('logout')
def login_with_certificate(self, encrypted_server_challenge):
""" Responsys.loginWithCertificate soap call
Accepts encrypted_server_challenge for login. Returns LoginResult.
"""
return LoginResult(self.call('loginWithCertificate', encrypted_server_challenge))
def authenticate_server(self, username, client_challenge):
""" Responsys.authenticateServer soap call
Accepts username and client_challenge to authenciate. Returns ServerAuthResult.
"""
return ServerAuthResult(self.call('authenticateServer', username, client_challenge))
# List Management Methods
def merge_list_members(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembers call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a MergeResult
"""
list_ = list_.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
merge_rule = merge_rule.get_soap_object(self.client)
return MergeResult(self.call('mergeListMembers', list_, record_data, merge_rule))
def merge_list_members_RIID(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembersRIID call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a RecipientResult
"""
list_ = list_.get_soap_object(self.client)
result = self.call('mergeListMembersRIID', list_, record_data, merge_rule)
return RecipientResult(result.recipientResult)
def delete_list_members(self, list_, query_column, ids_to_delete):
""" Responsys.deleteListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
list_ = list_.get_soap_object(self.client)
result = self.call('deleteListMembers', list_, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_list_members(self, list_, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData instance
"""
list_ = list_.get_soap_object(self.client)
result = self.call('retrieveListMembers', list_, query_column, field_list, ids_to_retrieve)
return RecordData.from_soap_type(result.recordData)
# Table Management Methods
def create_table(self, table, fields):
""" Responsys.createTable call
Accepts:
InteractObject table
list fields
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTable', table, fields)
def create_table_with_pk(self, table, fields, primary_keys):
""" Responsys.createTableWithPK call
Accepts:
InteractObject table
list fields
list primary_keys
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTableWithPK', table, fields, primary_keys)
def delete_table(self, table):
""" Responsys.deleteTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('deleteTable', table)
def delete_profile_extension_members(self, profile_extension, query_column, ids_to_delete):
""" Responsys.deleteProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns list of DeleteResults
"""
profile_extension = profile_extension.get_soap_object(self.client)
result = self.call(
'deleteProfileExtensionMembers', profile_extension, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_profile_extension_records(self, profile_extension, field_list, ids_to_retrieve,
query_column='RIID'):
""" Responsys.retrieveProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns RecordData
"""
profile_extension = profile_extension.get_soap_object(self.client)
return RecordData.from_soap_type(
self.call('retrieveProfileExtensionRecords',
profile_extension, query_column, field_list, ids_to_retrieve))
def truncate_table(self, table):
""" Responsys.truncateTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('truncateTable', table)
def delete_table_records(self, table, query_column, ids_to_delete):
""" Responsys.deleteTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
table = table.get_soap_object(self.client)
result = self.call('deleteTableRecords', table, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def merge_table_records(self, table, record_data, match_column_names):
""" Responsys.mergeTableRecords call
Accepts:
InteractObject table
RecordData record_data
list match_column_names
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecords', table, record_data, match_column_names))
def merge_table_records_with_pk(self, table, record_data, insert_on_no_match, update_on_match):
""" Responsys.mergeTableRecordsWithPK call
Accepts:
InteractObject table
RecordData record_data
string insert_on_no_match
string update_on_match
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecordsWithPK', table, record_data, insert_on_no_match, update_on_match))
def merge_into_profile_extension(self, profile_extension, record_data, match_column,
insert_on_no_match, update_on_match):
""" Responsys.mergeIntoProfileExtension call
Accepts:
InteractObject profile_extension
RecordData record_data
string match_column
string insert_on_no_match
string update_on_match
Returns a RecipientResult
"""
profile_extension = profile_extension.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
results = self.call(
'mergeIntoProfileExtension', profile_extension, record_data, match_column,
insert_on_no_match, update_on_match)
return [RecipientResult(result) for result in results]
def retrieve_table_records(self, table, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData
"""
table = table.get_soap_object(self.client)
return RecordData.from_soap_type(self.call(
'retrieveTableRecords', table, query_column, field_list, ids_to_retrieve))
# Campaign Management Methods
# TODO: implement
# GetLaunchStatus
# LaunchCampaign
# MergeTriggerEmail
# ScheduleCampaignLaunch
# TriggerCampaignMessage
def trigger_custom_event(self, custom_event, recipient_data=None):
custom_event = custom_event.get_soap_object(self.client)
recipient_data = [rdata.get_soap_object(self.client) for rdata in recipient_data]
results = self.call('triggerCustomEvent', custom_event, recipient_data)
return [TriggerResult(result) for result in results]
|
jslang/responsys | responsys/client.py | InteractClient.merge_list_members | python | def merge_list_members(self, list_, record_data, merge_rule):
list_ = list_.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
merge_rule = merge_rule.get_soap_object(self.client)
return MergeResult(self.call('mergeListMembers', list_, record_data, merge_rule)) | Responsys.mergeListMembers call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a MergeResult | train | https://github.com/jslang/responsys/blob/9b355a444c0c75dff41064502c1e2b76dfd5cb93/responsys/client.py#L205-L218 | [
"def call(self, method, *args):\n \"\"\" Calls the service method defined with the arguments provided \"\"\"\n try:\n response = getattr(self.client.service, method)(*args)\n except (URLError, SSLError) as e:\n log.exception('Failed to connect to responsys service')\n raise ConnectError(\"Request to service timed out\")\n except WebFault as web_fault:\n fault_name = getattr(web_fault.fault, 'faultstring', None)\n error = str(web_fault.fault.detail)\n\n if fault_name == 'TableFault':\n raise TableFault(error)\n if fault_name == 'ListFault':\n raise ListFault(error)\n if fault_name == 'API_LIMIT_EXCEEDED':\n raise ApiLimitError(error)\n if fault_name == 'AccountFault':\n raise AccountFault(error)\n\n raise ServiceError(web_fault.fault, web_fault.document)\n return response\n"
] | class InteractClient(object):
""" Interact Client Class
Provides access to the methods defined by the Responsys Interact API. Example setup:
>>> client = InteractClient(username, password, pod)
>>> client.connect()
>>> client.merge_list_members(interact_object, records, merge_rules)
>>> client.disconnect()
Using the client class as a context manager will automatically connect using the credentials
provided, and disconnect upon context exit:
>>> with InteractClient(username, password, pod) as client:
... client.merge_list_members(interact_object, records, merge_rules)
Since responsys limits the number of active sessions per account, this can help ensure you
don't leave unused connections open.
"""
DEFAULT_SESSION_LIFETIME = 60 * 10
WSDLS = {
'2': 'https://ws2.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'5': 'https://ws5.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'rtm4': 'https://rtm4.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
'rtm4b': 'https://rtm4b.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
}
ENDPOINTS = {
'2': 'https://ws2.responsys.net/webservices/services/ResponsysWSService',
'5': 'https://ws5.responsys.net/webservices/services/ResponsysWSService',
'rtm4': 'http://rtm4.responsys.net:80/tmws/services/TriggeredMessageWS',
'rtm4b': 'http://rtm4b.responsys.net:80/tmws/services/TriggeredMessageWS',
}
@property
def wsdl(self):
return self.WSDLS[self.pod]
@property
def endpoint(self):
return self.ENDPOINTS[self.pod]
@property
def client(self):
if self._client is None:
self._client = Client(self.wsdl, location=self.endpoint, timeout=self.timeout)
return self._client
@property
def connected(self):
return getattr(self, '_connected', False)
@connected.setter
def connected(self, value):
self._connected = value
@property
def session(self):
return getattr(self, '_session', None)
@session.setter
def session(self, session_id):
self._session = type(
'Session', (tuple,), {
'is_expired': property(lambda s: s[1] + self.session_lifetime <= time()),
})([session_id, time()])
session_header = self.client.factory.create('SessionHeader')
session_header.sessionId = session_id
self.client.set_options(soapheaders=session_header)
@session.deleter
def session(self):
self._session = None
self.client.set_options(soapheaders=())
def __init__(self, username, password, pod, client=None, session_lifetime=600, timeout=5):
self.username = username
self.password = password
self.pod = pod
self.session_lifetime = session_lifetime
self.timeout = timeout
self._client = client
def __enter__(self):
self.connect()
return self
def __exit__(self, type_, value, traceback):
self.disconnect()
def call(self, method, *args):
""" Calls the service method defined with the arguments provided """
try:
response = getattr(self.client.service, method)(*args)
except (URLError, SSLError) as e:
log.exception('Failed to connect to responsys service')
raise ConnectError("Request to service timed out")
except WebFault as web_fault:
fault_name = getattr(web_fault.fault, 'faultstring', None)
error = str(web_fault.fault.detail)
if fault_name == 'TableFault':
raise TableFault(error)
if fault_name == 'ListFault':
raise ListFault(error)
if fault_name == 'API_LIMIT_EXCEEDED':
raise ApiLimitError(error)
if fault_name == 'AccountFault':
raise AccountFault(error)
raise ServiceError(web_fault.fault, web_fault.document)
return response
def connect(self):
""" Connects to the Responsys soap service
Uses the credentials passed to the client init to login and setup the session id returned.
Returns True on successful connection, otherwise False.
"""
if self.session and self.session.is_expired:
# Close the session to avoid max concurrent session errors
self.disconnect(abandon_session=True)
if not self.session:
try:
login_result = self.login(self.username, self.password)
except AccountFault:
log.error('Login failed, invalid username or password')
raise
else:
self.session = login_result.session_id
self.connected = time()
return self.connected
def disconnect(self, abandon_session=False):
""" Disconnects from the Responsys soap service
Calls the service logout method and destroys the client's session information. Returns
True on success, False otherwise.
"""
self.connected = False
if (self.session and self.session.is_expired) or abandon_session:
try:
self.logout()
except:
log.warning(
'Logout call to responsys failed, session may have not been terminated',
exc_info=True
)
del self.session
return True
# Session Management Methods
def login(self, username, password):
""" Responsys.login soap call
Accepts username and password for authentication, returns a LoginResult object.
"""
return LoginResult(self.call('login', username, password))
def logout(self):
""" Responsys.logout soap call
Returns True on success, False otherwise.
"""
return self.call('logout')
def login_with_certificate(self, encrypted_server_challenge):
""" Responsys.loginWithCertificate soap call
Accepts encrypted_server_challenge for login. Returns LoginResult.
"""
return LoginResult(self.call('loginWithCertificate', encrypted_server_challenge))
def authenticate_server(self, username, client_challenge):
""" Responsys.authenticateServer soap call
Accepts username and client_challenge to authenciate. Returns ServerAuthResult.
"""
return ServerAuthResult(self.call('authenticateServer', username, client_challenge))
# List Management Methods
def merge_list_members_RIID(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembersRIID call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a RecipientResult
"""
list_ = list_.get_soap_object(self.client)
result = self.call('mergeListMembersRIID', list_, record_data, merge_rule)
return RecipientResult(result.recipientResult)
def delete_list_members(self, list_, query_column, ids_to_delete):
""" Responsys.deleteListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
list_ = list_.get_soap_object(self.client)
result = self.call('deleteListMembers', list_, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_list_members(self, list_, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData instance
"""
list_ = list_.get_soap_object(self.client)
result = self.call('retrieveListMembers', list_, query_column, field_list, ids_to_retrieve)
return RecordData.from_soap_type(result.recordData)
# Table Management Methods
def create_table(self, table, fields):
""" Responsys.createTable call
Accepts:
InteractObject table
list fields
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTable', table, fields)
def create_table_with_pk(self, table, fields, primary_keys):
""" Responsys.createTableWithPK call
Accepts:
InteractObject table
list fields
list primary_keys
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTableWithPK', table, fields, primary_keys)
def delete_table(self, table):
""" Responsys.deleteTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('deleteTable', table)
def delete_profile_extension_members(self, profile_extension, query_column, ids_to_delete):
""" Responsys.deleteProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns list of DeleteResults
"""
profile_extension = profile_extension.get_soap_object(self.client)
result = self.call(
'deleteProfileExtensionMembers', profile_extension, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_profile_extension_records(self, profile_extension, field_list, ids_to_retrieve,
query_column='RIID'):
""" Responsys.retrieveProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns RecordData
"""
profile_extension = profile_extension.get_soap_object(self.client)
return RecordData.from_soap_type(
self.call('retrieveProfileExtensionRecords',
profile_extension, query_column, field_list, ids_to_retrieve))
def truncate_table(self, table):
""" Responsys.truncateTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('truncateTable', table)
def delete_table_records(self, table, query_column, ids_to_delete):
""" Responsys.deleteTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
table = table.get_soap_object(self.client)
result = self.call('deleteTableRecords', table, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def merge_table_records(self, table, record_data, match_column_names):
""" Responsys.mergeTableRecords call
Accepts:
InteractObject table
RecordData record_data
list match_column_names
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecords', table, record_data, match_column_names))
def merge_table_records_with_pk(self, table, record_data, insert_on_no_match, update_on_match):
""" Responsys.mergeTableRecordsWithPK call
Accepts:
InteractObject table
RecordData record_data
string insert_on_no_match
string update_on_match
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecordsWithPK', table, record_data, insert_on_no_match, update_on_match))
def merge_into_profile_extension(self, profile_extension, record_data, match_column,
insert_on_no_match, update_on_match):
""" Responsys.mergeIntoProfileExtension call
Accepts:
InteractObject profile_extension
RecordData record_data
string match_column
string insert_on_no_match
string update_on_match
Returns a RecipientResult
"""
profile_extension = profile_extension.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
results = self.call(
'mergeIntoProfileExtension', profile_extension, record_data, match_column,
insert_on_no_match, update_on_match)
return [RecipientResult(result) for result in results]
def retrieve_table_records(self, table, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData
"""
table = table.get_soap_object(self.client)
return RecordData.from_soap_type(self.call(
'retrieveTableRecords', table, query_column, field_list, ids_to_retrieve))
# Campaign Management Methods
# TODO: implement
# GetLaunchStatus
# LaunchCampaign
# MergeTriggerEmail
# ScheduleCampaignLaunch
# TriggerCampaignMessage
def trigger_custom_event(self, custom_event, recipient_data=None):
custom_event = custom_event.get_soap_object(self.client)
recipient_data = [rdata.get_soap_object(self.client) for rdata in recipient_data]
results = self.call('triggerCustomEvent', custom_event, recipient_data)
return [TriggerResult(result) for result in results]
|
jslang/responsys | responsys/client.py | InteractClient.merge_list_members_RIID | python | def merge_list_members_RIID(self, list_, record_data, merge_rule):
list_ = list_.get_soap_object(self.client)
result = self.call('mergeListMembersRIID', list_, record_data, merge_rule)
return RecipientResult(result.recipientResult) | Responsys.mergeListMembersRIID call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a RecipientResult | train | https://github.com/jslang/responsys/blob/9b355a444c0c75dff41064502c1e2b76dfd5cb93/responsys/client.py#L220-L232 | [
"def call(self, method, *args):\n \"\"\" Calls the service method defined with the arguments provided \"\"\"\n try:\n response = getattr(self.client.service, method)(*args)\n except (URLError, SSLError) as e:\n log.exception('Failed to connect to responsys service')\n raise ConnectError(\"Request to service timed out\")\n except WebFault as web_fault:\n fault_name = getattr(web_fault.fault, 'faultstring', None)\n error = str(web_fault.fault.detail)\n\n if fault_name == 'TableFault':\n raise TableFault(error)\n if fault_name == 'ListFault':\n raise ListFault(error)\n if fault_name == 'API_LIMIT_EXCEEDED':\n raise ApiLimitError(error)\n if fault_name == 'AccountFault':\n raise AccountFault(error)\n\n raise ServiceError(web_fault.fault, web_fault.document)\n return response\n"
] | class InteractClient(object):
""" Interact Client Class
Provides access to the methods defined by the Responsys Interact API. Example setup:
>>> client = InteractClient(username, password, pod)
>>> client.connect()
>>> client.merge_list_members(interact_object, records, merge_rules)
>>> client.disconnect()
Using the client class as a context manager will automatically connect using the credentials
provided, and disconnect upon context exit:
>>> with InteractClient(username, password, pod) as client:
... client.merge_list_members(interact_object, records, merge_rules)
Since responsys limits the number of active sessions per account, this can help ensure you
don't leave unused connections open.
"""
DEFAULT_SESSION_LIFETIME = 60 * 10
WSDLS = {
'2': 'https://ws2.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'5': 'https://ws5.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'rtm4': 'https://rtm4.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
'rtm4b': 'https://rtm4b.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
}
ENDPOINTS = {
'2': 'https://ws2.responsys.net/webservices/services/ResponsysWSService',
'5': 'https://ws5.responsys.net/webservices/services/ResponsysWSService',
'rtm4': 'http://rtm4.responsys.net:80/tmws/services/TriggeredMessageWS',
'rtm4b': 'http://rtm4b.responsys.net:80/tmws/services/TriggeredMessageWS',
}
@property
def wsdl(self):
return self.WSDLS[self.pod]
@property
def endpoint(self):
return self.ENDPOINTS[self.pod]
@property
def client(self):
if self._client is None:
self._client = Client(self.wsdl, location=self.endpoint, timeout=self.timeout)
return self._client
@property
def connected(self):
return getattr(self, '_connected', False)
@connected.setter
def connected(self, value):
self._connected = value
@property
def session(self):
return getattr(self, '_session', None)
@session.setter
def session(self, session_id):
self._session = type(
'Session', (tuple,), {
'is_expired': property(lambda s: s[1] + self.session_lifetime <= time()),
})([session_id, time()])
session_header = self.client.factory.create('SessionHeader')
session_header.sessionId = session_id
self.client.set_options(soapheaders=session_header)
@session.deleter
def session(self):
self._session = None
self.client.set_options(soapheaders=())
def __init__(self, username, password, pod, client=None, session_lifetime=600, timeout=5):
self.username = username
self.password = password
self.pod = pod
self.session_lifetime = session_lifetime
self.timeout = timeout
self._client = client
def __enter__(self):
self.connect()
return self
def __exit__(self, type_, value, traceback):
self.disconnect()
def call(self, method, *args):
""" Calls the service method defined with the arguments provided """
try:
response = getattr(self.client.service, method)(*args)
except (URLError, SSLError) as e:
log.exception('Failed to connect to responsys service')
raise ConnectError("Request to service timed out")
except WebFault as web_fault:
fault_name = getattr(web_fault.fault, 'faultstring', None)
error = str(web_fault.fault.detail)
if fault_name == 'TableFault':
raise TableFault(error)
if fault_name == 'ListFault':
raise ListFault(error)
if fault_name == 'API_LIMIT_EXCEEDED':
raise ApiLimitError(error)
if fault_name == 'AccountFault':
raise AccountFault(error)
raise ServiceError(web_fault.fault, web_fault.document)
return response
def connect(self):
""" Connects to the Responsys soap service
Uses the credentials passed to the client init to login and setup the session id returned.
Returns True on successful connection, otherwise False.
"""
if self.session and self.session.is_expired:
# Close the session to avoid max concurrent session errors
self.disconnect(abandon_session=True)
if not self.session:
try:
login_result = self.login(self.username, self.password)
except AccountFault:
log.error('Login failed, invalid username or password')
raise
else:
self.session = login_result.session_id
self.connected = time()
return self.connected
def disconnect(self, abandon_session=False):
""" Disconnects from the Responsys soap service
Calls the service logout method and destroys the client's session information. Returns
True on success, False otherwise.
"""
self.connected = False
if (self.session and self.session.is_expired) or abandon_session:
try:
self.logout()
except:
log.warning(
'Logout call to responsys failed, session may have not been terminated',
exc_info=True
)
del self.session
return True
# Session Management Methods
def login(self, username, password):
""" Responsys.login soap call
Accepts username and password for authentication, returns a LoginResult object.
"""
return LoginResult(self.call('login', username, password))
def logout(self):
""" Responsys.logout soap call
Returns True on success, False otherwise.
"""
return self.call('logout')
def login_with_certificate(self, encrypted_server_challenge):
""" Responsys.loginWithCertificate soap call
Accepts encrypted_server_challenge for login. Returns LoginResult.
"""
return LoginResult(self.call('loginWithCertificate', encrypted_server_challenge))
def authenticate_server(self, username, client_challenge):
""" Responsys.authenticateServer soap call
Accepts username and client_challenge to authenciate. Returns ServerAuthResult.
"""
return ServerAuthResult(self.call('authenticateServer', username, client_challenge))
# List Management Methods
def merge_list_members(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembers call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a MergeResult
"""
list_ = list_.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
merge_rule = merge_rule.get_soap_object(self.client)
return MergeResult(self.call('mergeListMembers', list_, record_data, merge_rule))
def delete_list_members(self, list_, query_column, ids_to_delete):
""" Responsys.deleteListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
list_ = list_.get_soap_object(self.client)
result = self.call('deleteListMembers', list_, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_list_members(self, list_, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData instance
"""
list_ = list_.get_soap_object(self.client)
result = self.call('retrieveListMembers', list_, query_column, field_list, ids_to_retrieve)
return RecordData.from_soap_type(result.recordData)
# Table Management Methods
def create_table(self, table, fields):
""" Responsys.createTable call
Accepts:
InteractObject table
list fields
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTable', table, fields)
def create_table_with_pk(self, table, fields, primary_keys):
""" Responsys.createTableWithPK call
Accepts:
InteractObject table
list fields
list primary_keys
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTableWithPK', table, fields, primary_keys)
def delete_table(self, table):
""" Responsys.deleteTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('deleteTable', table)
def delete_profile_extension_members(self, profile_extension, query_column, ids_to_delete):
""" Responsys.deleteProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns list of DeleteResults
"""
profile_extension = profile_extension.get_soap_object(self.client)
result = self.call(
'deleteProfileExtensionMembers', profile_extension, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_profile_extension_records(self, profile_extension, field_list, ids_to_retrieve,
query_column='RIID'):
""" Responsys.retrieveProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns RecordData
"""
profile_extension = profile_extension.get_soap_object(self.client)
return RecordData.from_soap_type(
self.call('retrieveProfileExtensionRecords',
profile_extension, query_column, field_list, ids_to_retrieve))
def truncate_table(self, table):
""" Responsys.truncateTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('truncateTable', table)
def delete_table_records(self, table, query_column, ids_to_delete):
""" Responsys.deleteTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
table = table.get_soap_object(self.client)
result = self.call('deleteTableRecords', table, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def merge_table_records(self, table, record_data, match_column_names):
""" Responsys.mergeTableRecords call
Accepts:
InteractObject table
RecordData record_data
list match_column_names
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecords', table, record_data, match_column_names))
def merge_table_records_with_pk(self, table, record_data, insert_on_no_match, update_on_match):
""" Responsys.mergeTableRecordsWithPK call
Accepts:
InteractObject table
RecordData record_data
string insert_on_no_match
string update_on_match
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecordsWithPK', table, record_data, insert_on_no_match, update_on_match))
def merge_into_profile_extension(self, profile_extension, record_data, match_column,
insert_on_no_match, update_on_match):
""" Responsys.mergeIntoProfileExtension call
Accepts:
InteractObject profile_extension
RecordData record_data
string match_column
string insert_on_no_match
string update_on_match
Returns a RecipientResult
"""
profile_extension = profile_extension.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
results = self.call(
'mergeIntoProfileExtension', profile_extension, record_data, match_column,
insert_on_no_match, update_on_match)
return [RecipientResult(result) for result in results]
def retrieve_table_records(self, table, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData
"""
table = table.get_soap_object(self.client)
return RecordData.from_soap_type(self.call(
'retrieveTableRecords', table, query_column, field_list, ids_to_retrieve))
# Campaign Management Methods
# TODO: implement
# GetLaunchStatus
# LaunchCampaign
# MergeTriggerEmail
# ScheduleCampaignLaunch
# TriggerCampaignMessage
def trigger_custom_event(self, custom_event, recipient_data=None):
custom_event = custom_event.get_soap_object(self.client)
recipient_data = [rdata.get_soap_object(self.client) for rdata in recipient_data]
results = self.call('triggerCustomEvent', custom_event, recipient_data)
return [TriggerResult(result) for result in results]
|
jslang/responsys | responsys/client.py | InteractClient.delete_list_members | python | def delete_list_members(self, list_, query_column, ids_to_delete):
list_ = list_.get_soap_object(self.client)
result = self.call('deleteListMembers', list_, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)] | Responsys.deleteListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances | train | https://github.com/jslang/responsys/blob/9b355a444c0c75dff41064502c1e2b76dfd5cb93/responsys/client.py#L234-L249 | [
"def call(self, method, *args):\n \"\"\" Calls the service method defined with the arguments provided \"\"\"\n try:\n response = getattr(self.client.service, method)(*args)\n except (URLError, SSLError) as e:\n log.exception('Failed to connect to responsys service')\n raise ConnectError(\"Request to service timed out\")\n except WebFault as web_fault:\n fault_name = getattr(web_fault.fault, 'faultstring', None)\n error = str(web_fault.fault.detail)\n\n if fault_name == 'TableFault':\n raise TableFault(error)\n if fault_name == 'ListFault':\n raise ListFault(error)\n if fault_name == 'API_LIMIT_EXCEEDED':\n raise ApiLimitError(error)\n if fault_name == 'AccountFault':\n raise AccountFault(error)\n\n raise ServiceError(web_fault.fault, web_fault.document)\n return response\n"
] | class InteractClient(object):
""" Interact Client Class
Provides access to the methods defined by the Responsys Interact API. Example setup:
>>> client = InteractClient(username, password, pod)
>>> client.connect()
>>> client.merge_list_members(interact_object, records, merge_rules)
>>> client.disconnect()
Using the client class as a context manager will automatically connect using the credentials
provided, and disconnect upon context exit:
>>> with InteractClient(username, password, pod) as client:
... client.merge_list_members(interact_object, records, merge_rules)
Since responsys limits the number of active sessions per account, this can help ensure you
don't leave unused connections open.
"""
DEFAULT_SESSION_LIFETIME = 60 * 10
WSDLS = {
'2': 'https://ws2.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'5': 'https://ws5.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'rtm4': 'https://rtm4.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
'rtm4b': 'https://rtm4b.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
}
ENDPOINTS = {
'2': 'https://ws2.responsys.net/webservices/services/ResponsysWSService',
'5': 'https://ws5.responsys.net/webservices/services/ResponsysWSService',
'rtm4': 'http://rtm4.responsys.net:80/tmws/services/TriggeredMessageWS',
'rtm4b': 'http://rtm4b.responsys.net:80/tmws/services/TriggeredMessageWS',
}
@property
def wsdl(self):
return self.WSDLS[self.pod]
@property
def endpoint(self):
return self.ENDPOINTS[self.pod]
@property
def client(self):
if self._client is None:
self._client = Client(self.wsdl, location=self.endpoint, timeout=self.timeout)
return self._client
@property
def connected(self):
return getattr(self, '_connected', False)
@connected.setter
def connected(self, value):
self._connected = value
@property
def session(self):
return getattr(self, '_session', None)
@session.setter
def session(self, session_id):
self._session = type(
'Session', (tuple,), {
'is_expired': property(lambda s: s[1] + self.session_lifetime <= time()),
})([session_id, time()])
session_header = self.client.factory.create('SessionHeader')
session_header.sessionId = session_id
self.client.set_options(soapheaders=session_header)
@session.deleter
def session(self):
self._session = None
self.client.set_options(soapheaders=())
def __init__(self, username, password, pod, client=None, session_lifetime=600, timeout=5):
self.username = username
self.password = password
self.pod = pod
self.session_lifetime = session_lifetime
self.timeout = timeout
self._client = client
def __enter__(self):
self.connect()
return self
def __exit__(self, type_, value, traceback):
self.disconnect()
def call(self, method, *args):
""" Calls the service method defined with the arguments provided """
try:
response = getattr(self.client.service, method)(*args)
except (URLError, SSLError) as e:
log.exception('Failed to connect to responsys service')
raise ConnectError("Request to service timed out")
except WebFault as web_fault:
fault_name = getattr(web_fault.fault, 'faultstring', None)
error = str(web_fault.fault.detail)
if fault_name == 'TableFault':
raise TableFault(error)
if fault_name == 'ListFault':
raise ListFault(error)
if fault_name == 'API_LIMIT_EXCEEDED':
raise ApiLimitError(error)
if fault_name == 'AccountFault':
raise AccountFault(error)
raise ServiceError(web_fault.fault, web_fault.document)
return response
def connect(self):
""" Connects to the Responsys soap service
Uses the credentials passed to the client init to login and setup the session id returned.
Returns True on successful connection, otherwise False.
"""
if self.session and self.session.is_expired:
# Close the session to avoid max concurrent session errors
self.disconnect(abandon_session=True)
if not self.session:
try:
login_result = self.login(self.username, self.password)
except AccountFault:
log.error('Login failed, invalid username or password')
raise
else:
self.session = login_result.session_id
self.connected = time()
return self.connected
def disconnect(self, abandon_session=False):
""" Disconnects from the Responsys soap service
Calls the service logout method and destroys the client's session information. Returns
True on success, False otherwise.
"""
self.connected = False
if (self.session and self.session.is_expired) or abandon_session:
try:
self.logout()
except:
log.warning(
'Logout call to responsys failed, session may have not been terminated',
exc_info=True
)
del self.session
return True
# Session Management Methods
def login(self, username, password):
""" Responsys.login soap call
Accepts username and password for authentication, returns a LoginResult object.
"""
return LoginResult(self.call('login', username, password))
def logout(self):
""" Responsys.logout soap call
Returns True on success, False otherwise.
"""
return self.call('logout')
def login_with_certificate(self, encrypted_server_challenge):
""" Responsys.loginWithCertificate soap call
Accepts encrypted_server_challenge for login. Returns LoginResult.
"""
return LoginResult(self.call('loginWithCertificate', encrypted_server_challenge))
def authenticate_server(self, username, client_challenge):
""" Responsys.authenticateServer soap call
Accepts username and client_challenge to authenciate. Returns ServerAuthResult.
"""
return ServerAuthResult(self.call('authenticateServer', username, client_challenge))
# List Management Methods
def merge_list_members(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembers call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a MergeResult
"""
list_ = list_.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
merge_rule = merge_rule.get_soap_object(self.client)
return MergeResult(self.call('mergeListMembers', list_, record_data, merge_rule))
def merge_list_members_RIID(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembersRIID call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a RecipientResult
"""
list_ = list_.get_soap_object(self.client)
result = self.call('mergeListMembersRIID', list_, record_data, merge_rule)
return RecipientResult(result.recipientResult)
def retrieve_list_members(self, list_, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData instance
"""
list_ = list_.get_soap_object(self.client)
result = self.call('retrieveListMembers', list_, query_column, field_list, ids_to_retrieve)
return RecordData.from_soap_type(result.recordData)
# Table Management Methods
def create_table(self, table, fields):
""" Responsys.createTable call
Accepts:
InteractObject table
list fields
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTable', table, fields)
def create_table_with_pk(self, table, fields, primary_keys):
""" Responsys.createTableWithPK call
Accepts:
InteractObject table
list fields
list primary_keys
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTableWithPK', table, fields, primary_keys)
def delete_table(self, table):
""" Responsys.deleteTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('deleteTable', table)
def delete_profile_extension_members(self, profile_extension, query_column, ids_to_delete):
""" Responsys.deleteProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns list of DeleteResults
"""
profile_extension = profile_extension.get_soap_object(self.client)
result = self.call(
'deleteProfileExtensionMembers', profile_extension, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_profile_extension_records(self, profile_extension, field_list, ids_to_retrieve,
query_column='RIID'):
""" Responsys.retrieveProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns RecordData
"""
profile_extension = profile_extension.get_soap_object(self.client)
return RecordData.from_soap_type(
self.call('retrieveProfileExtensionRecords',
profile_extension, query_column, field_list, ids_to_retrieve))
def truncate_table(self, table):
""" Responsys.truncateTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('truncateTable', table)
def delete_table_records(self, table, query_column, ids_to_delete):
""" Responsys.deleteTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
table = table.get_soap_object(self.client)
result = self.call('deleteTableRecords', table, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def merge_table_records(self, table, record_data, match_column_names):
""" Responsys.mergeTableRecords call
Accepts:
InteractObject table
RecordData record_data
list match_column_names
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecords', table, record_data, match_column_names))
def merge_table_records_with_pk(self, table, record_data, insert_on_no_match, update_on_match):
""" Responsys.mergeTableRecordsWithPK call
Accepts:
InteractObject table
RecordData record_data
string insert_on_no_match
string update_on_match
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecordsWithPK', table, record_data, insert_on_no_match, update_on_match))
def merge_into_profile_extension(self, profile_extension, record_data, match_column,
insert_on_no_match, update_on_match):
""" Responsys.mergeIntoProfileExtension call
Accepts:
InteractObject profile_extension
RecordData record_data
string match_column
string insert_on_no_match
string update_on_match
Returns a RecipientResult
"""
profile_extension = profile_extension.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
results = self.call(
'mergeIntoProfileExtension', profile_extension, record_data, match_column,
insert_on_no_match, update_on_match)
return [RecipientResult(result) for result in results]
def retrieve_table_records(self, table, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData
"""
table = table.get_soap_object(self.client)
return RecordData.from_soap_type(self.call(
'retrieveTableRecords', table, query_column, field_list, ids_to_retrieve))
# Campaign Management Methods
# TODO: implement
# GetLaunchStatus
# LaunchCampaign
# MergeTriggerEmail
# ScheduleCampaignLaunch
# TriggerCampaignMessage
def trigger_custom_event(self, custom_event, recipient_data=None):
custom_event = custom_event.get_soap_object(self.client)
recipient_data = [rdata.get_soap_object(self.client) for rdata in recipient_data]
results = self.call('triggerCustomEvent', custom_event, recipient_data)
return [TriggerResult(result) for result in results]
|
jslang/responsys | responsys/client.py | InteractClient.retrieve_list_members | python | def retrieve_list_members(self, list_, query_column, field_list, ids_to_retrieve):
list_ = list_.get_soap_object(self.client)
result = self.call('retrieveListMembers', list_, query_column, field_list, ids_to_retrieve)
return RecordData.from_soap_type(result.recordData) | Responsys.retrieveListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData instance | train | https://github.com/jslang/responsys/blob/9b355a444c0c75dff41064502c1e2b76dfd5cb93/responsys/client.py#L251-L265 | [
"def call(self, method, *args):\n \"\"\" Calls the service method defined with the arguments provided \"\"\"\n try:\n response = getattr(self.client.service, method)(*args)\n except (URLError, SSLError) as e:\n log.exception('Failed to connect to responsys service')\n raise ConnectError(\"Request to service timed out\")\n except WebFault as web_fault:\n fault_name = getattr(web_fault.fault, 'faultstring', None)\n error = str(web_fault.fault.detail)\n\n if fault_name == 'TableFault':\n raise TableFault(error)\n if fault_name == 'ListFault':\n raise ListFault(error)\n if fault_name == 'API_LIMIT_EXCEEDED':\n raise ApiLimitError(error)\n if fault_name == 'AccountFault':\n raise AccountFault(error)\n\n raise ServiceError(web_fault.fault, web_fault.document)\n return response\n",
"def from_soap_type(cls, record_data):\n record_data = [\n dict(zip(record_data.fieldNames, r.fieldValues)) for r in record_data.records]\n return cls(record_data)\n"
] | class InteractClient(object):
""" Interact Client Class
Provides access to the methods defined by the Responsys Interact API. Example setup:
>>> client = InteractClient(username, password, pod)
>>> client.connect()
>>> client.merge_list_members(interact_object, records, merge_rules)
>>> client.disconnect()
Using the client class as a context manager will automatically connect using the credentials
provided, and disconnect upon context exit:
>>> with InteractClient(username, password, pod) as client:
... client.merge_list_members(interact_object, records, merge_rules)
Since responsys limits the number of active sessions per account, this can help ensure you
don't leave unused connections open.
"""
DEFAULT_SESSION_LIFETIME = 60 * 10
WSDLS = {
'2': 'https://ws2.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'5': 'https://ws5.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'rtm4': 'https://rtm4.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
'rtm4b': 'https://rtm4b.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
}
ENDPOINTS = {
'2': 'https://ws2.responsys.net/webservices/services/ResponsysWSService',
'5': 'https://ws5.responsys.net/webservices/services/ResponsysWSService',
'rtm4': 'http://rtm4.responsys.net:80/tmws/services/TriggeredMessageWS',
'rtm4b': 'http://rtm4b.responsys.net:80/tmws/services/TriggeredMessageWS',
}
@property
def wsdl(self):
return self.WSDLS[self.pod]
@property
def endpoint(self):
return self.ENDPOINTS[self.pod]
@property
def client(self):
if self._client is None:
self._client = Client(self.wsdl, location=self.endpoint, timeout=self.timeout)
return self._client
@property
def connected(self):
return getattr(self, '_connected', False)
@connected.setter
def connected(self, value):
self._connected = value
@property
def session(self):
return getattr(self, '_session', None)
@session.setter
def session(self, session_id):
self._session = type(
'Session', (tuple,), {
'is_expired': property(lambda s: s[1] + self.session_lifetime <= time()),
})([session_id, time()])
session_header = self.client.factory.create('SessionHeader')
session_header.sessionId = session_id
self.client.set_options(soapheaders=session_header)
@session.deleter
def session(self):
self._session = None
self.client.set_options(soapheaders=())
def __init__(self, username, password, pod, client=None, session_lifetime=600, timeout=5):
self.username = username
self.password = password
self.pod = pod
self.session_lifetime = session_lifetime
self.timeout = timeout
self._client = client
def __enter__(self):
self.connect()
return self
def __exit__(self, type_, value, traceback):
self.disconnect()
def call(self, method, *args):
""" Calls the service method defined with the arguments provided """
try:
response = getattr(self.client.service, method)(*args)
except (URLError, SSLError) as e:
log.exception('Failed to connect to responsys service')
raise ConnectError("Request to service timed out")
except WebFault as web_fault:
fault_name = getattr(web_fault.fault, 'faultstring', None)
error = str(web_fault.fault.detail)
if fault_name == 'TableFault':
raise TableFault(error)
if fault_name == 'ListFault':
raise ListFault(error)
if fault_name == 'API_LIMIT_EXCEEDED':
raise ApiLimitError(error)
if fault_name == 'AccountFault':
raise AccountFault(error)
raise ServiceError(web_fault.fault, web_fault.document)
return response
def connect(self):
""" Connects to the Responsys soap service
Uses the credentials passed to the client init to login and setup the session id returned.
Returns True on successful connection, otherwise False.
"""
if self.session and self.session.is_expired:
# Close the session to avoid max concurrent session errors
self.disconnect(abandon_session=True)
if not self.session:
try:
login_result = self.login(self.username, self.password)
except AccountFault:
log.error('Login failed, invalid username or password')
raise
else:
self.session = login_result.session_id
self.connected = time()
return self.connected
def disconnect(self, abandon_session=False):
""" Disconnects from the Responsys soap service
Calls the service logout method and destroys the client's session information. Returns
True on success, False otherwise.
"""
self.connected = False
if (self.session and self.session.is_expired) or abandon_session:
try:
self.logout()
except:
log.warning(
'Logout call to responsys failed, session may have not been terminated',
exc_info=True
)
del self.session
return True
# Session Management Methods
def login(self, username, password):
""" Responsys.login soap call
Accepts username and password for authentication, returns a LoginResult object.
"""
return LoginResult(self.call('login', username, password))
def logout(self):
""" Responsys.logout soap call
Returns True on success, False otherwise.
"""
return self.call('logout')
def login_with_certificate(self, encrypted_server_challenge):
""" Responsys.loginWithCertificate soap call
Accepts encrypted_server_challenge for login. Returns LoginResult.
"""
return LoginResult(self.call('loginWithCertificate', encrypted_server_challenge))
def authenticate_server(self, username, client_challenge):
""" Responsys.authenticateServer soap call
Accepts username and client_challenge to authenciate. Returns ServerAuthResult.
"""
return ServerAuthResult(self.call('authenticateServer', username, client_challenge))
# List Management Methods
def merge_list_members(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembers call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a MergeResult
"""
list_ = list_.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
merge_rule = merge_rule.get_soap_object(self.client)
return MergeResult(self.call('mergeListMembers', list_, record_data, merge_rule))
def merge_list_members_RIID(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembersRIID call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a RecipientResult
"""
list_ = list_.get_soap_object(self.client)
result = self.call('mergeListMembersRIID', list_, record_data, merge_rule)
return RecipientResult(result.recipientResult)
def delete_list_members(self, list_, query_column, ids_to_delete):
""" Responsys.deleteListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
list_ = list_.get_soap_object(self.client)
result = self.call('deleteListMembers', list_, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
# Table Management Methods
def create_table(self, table, fields):
""" Responsys.createTable call
Accepts:
InteractObject table
list fields
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTable', table, fields)
def create_table_with_pk(self, table, fields, primary_keys):
""" Responsys.createTableWithPK call
Accepts:
InteractObject table
list fields
list primary_keys
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTableWithPK', table, fields, primary_keys)
def delete_table(self, table):
""" Responsys.deleteTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('deleteTable', table)
def delete_profile_extension_members(self, profile_extension, query_column, ids_to_delete):
""" Responsys.deleteProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns list of DeleteResults
"""
profile_extension = profile_extension.get_soap_object(self.client)
result = self.call(
'deleteProfileExtensionMembers', profile_extension, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_profile_extension_records(self, profile_extension, field_list, ids_to_retrieve,
query_column='RIID'):
""" Responsys.retrieveProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns RecordData
"""
profile_extension = profile_extension.get_soap_object(self.client)
return RecordData.from_soap_type(
self.call('retrieveProfileExtensionRecords',
profile_extension, query_column, field_list, ids_to_retrieve))
def truncate_table(self, table):
""" Responsys.truncateTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('truncateTable', table)
def delete_table_records(self, table, query_column, ids_to_delete):
""" Responsys.deleteTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
table = table.get_soap_object(self.client)
result = self.call('deleteTableRecords', table, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def merge_table_records(self, table, record_data, match_column_names):
""" Responsys.mergeTableRecords call
Accepts:
InteractObject table
RecordData record_data
list match_column_names
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecords', table, record_data, match_column_names))
def merge_table_records_with_pk(self, table, record_data, insert_on_no_match, update_on_match):
""" Responsys.mergeTableRecordsWithPK call
Accepts:
InteractObject table
RecordData record_data
string insert_on_no_match
string update_on_match
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecordsWithPK', table, record_data, insert_on_no_match, update_on_match))
def merge_into_profile_extension(self, profile_extension, record_data, match_column,
insert_on_no_match, update_on_match):
""" Responsys.mergeIntoProfileExtension call
Accepts:
InteractObject profile_extension
RecordData record_data
string match_column
string insert_on_no_match
string update_on_match
Returns a RecipientResult
"""
profile_extension = profile_extension.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
results = self.call(
'mergeIntoProfileExtension', profile_extension, record_data, match_column,
insert_on_no_match, update_on_match)
return [RecipientResult(result) for result in results]
def retrieve_table_records(self, table, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData
"""
table = table.get_soap_object(self.client)
return RecordData.from_soap_type(self.call(
'retrieveTableRecords', table, query_column, field_list, ids_to_retrieve))
# Campaign Management Methods
# TODO: implement
# GetLaunchStatus
# LaunchCampaign
# MergeTriggerEmail
# ScheduleCampaignLaunch
# TriggerCampaignMessage
def trigger_custom_event(self, custom_event, recipient_data=None):
custom_event = custom_event.get_soap_object(self.client)
recipient_data = [rdata.get_soap_object(self.client) for rdata in recipient_data]
results = self.call('triggerCustomEvent', custom_event, recipient_data)
return [TriggerResult(result) for result in results]
|
jslang/responsys | responsys/client.py | InteractClient.create_table | python | def create_table(self, table, fields):
table = table.get_soap_object(self.client)
return self.call('createTable', table, fields) | Responsys.createTable call
Accepts:
InteractObject table
list fields
Returns True on success | train | https://github.com/jslang/responsys/blob/9b355a444c0c75dff41064502c1e2b76dfd5cb93/responsys/client.py#L268-L278 | [
"def call(self, method, *args):\n \"\"\" Calls the service method defined with the arguments provided \"\"\"\n try:\n response = getattr(self.client.service, method)(*args)\n except (URLError, SSLError) as e:\n log.exception('Failed to connect to responsys service')\n raise ConnectError(\"Request to service timed out\")\n except WebFault as web_fault:\n fault_name = getattr(web_fault.fault, 'faultstring', None)\n error = str(web_fault.fault.detail)\n\n if fault_name == 'TableFault':\n raise TableFault(error)\n if fault_name == 'ListFault':\n raise ListFault(error)\n if fault_name == 'API_LIMIT_EXCEEDED':\n raise ApiLimitError(error)\n if fault_name == 'AccountFault':\n raise AccountFault(error)\n\n raise ServiceError(web_fault.fault, web_fault.document)\n return response\n"
] | class InteractClient(object):
""" Interact Client Class
Provides access to the methods defined by the Responsys Interact API. Example setup:
>>> client = InteractClient(username, password, pod)
>>> client.connect()
>>> client.merge_list_members(interact_object, records, merge_rules)
>>> client.disconnect()
Using the client class as a context manager will automatically connect using the credentials
provided, and disconnect upon context exit:
>>> with InteractClient(username, password, pod) as client:
... client.merge_list_members(interact_object, records, merge_rules)
Since responsys limits the number of active sessions per account, this can help ensure you
don't leave unused connections open.
"""
DEFAULT_SESSION_LIFETIME = 60 * 10
WSDLS = {
'2': 'https://ws2.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'5': 'https://ws5.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'rtm4': 'https://rtm4.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
'rtm4b': 'https://rtm4b.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
}
ENDPOINTS = {
'2': 'https://ws2.responsys.net/webservices/services/ResponsysWSService',
'5': 'https://ws5.responsys.net/webservices/services/ResponsysWSService',
'rtm4': 'http://rtm4.responsys.net:80/tmws/services/TriggeredMessageWS',
'rtm4b': 'http://rtm4b.responsys.net:80/tmws/services/TriggeredMessageWS',
}
@property
def wsdl(self):
return self.WSDLS[self.pod]
@property
def endpoint(self):
return self.ENDPOINTS[self.pod]
@property
def client(self):
if self._client is None:
self._client = Client(self.wsdl, location=self.endpoint, timeout=self.timeout)
return self._client
@property
def connected(self):
return getattr(self, '_connected', False)
@connected.setter
def connected(self, value):
self._connected = value
@property
def session(self):
return getattr(self, '_session', None)
@session.setter
def session(self, session_id):
self._session = type(
'Session', (tuple,), {
'is_expired': property(lambda s: s[1] + self.session_lifetime <= time()),
})([session_id, time()])
session_header = self.client.factory.create('SessionHeader')
session_header.sessionId = session_id
self.client.set_options(soapheaders=session_header)
@session.deleter
def session(self):
self._session = None
self.client.set_options(soapheaders=())
def __init__(self, username, password, pod, client=None, session_lifetime=600, timeout=5):
self.username = username
self.password = password
self.pod = pod
self.session_lifetime = session_lifetime
self.timeout = timeout
self._client = client
def __enter__(self):
self.connect()
return self
def __exit__(self, type_, value, traceback):
self.disconnect()
def call(self, method, *args):
""" Calls the service method defined with the arguments provided """
try:
response = getattr(self.client.service, method)(*args)
except (URLError, SSLError) as e:
log.exception('Failed to connect to responsys service')
raise ConnectError("Request to service timed out")
except WebFault as web_fault:
fault_name = getattr(web_fault.fault, 'faultstring', None)
error = str(web_fault.fault.detail)
if fault_name == 'TableFault':
raise TableFault(error)
if fault_name == 'ListFault':
raise ListFault(error)
if fault_name == 'API_LIMIT_EXCEEDED':
raise ApiLimitError(error)
if fault_name == 'AccountFault':
raise AccountFault(error)
raise ServiceError(web_fault.fault, web_fault.document)
return response
def connect(self):
""" Connects to the Responsys soap service
Uses the credentials passed to the client init to login and setup the session id returned.
Returns True on successful connection, otherwise False.
"""
if self.session and self.session.is_expired:
# Close the session to avoid max concurrent session errors
self.disconnect(abandon_session=True)
if not self.session:
try:
login_result = self.login(self.username, self.password)
except AccountFault:
log.error('Login failed, invalid username or password')
raise
else:
self.session = login_result.session_id
self.connected = time()
return self.connected
def disconnect(self, abandon_session=False):
""" Disconnects from the Responsys soap service
Calls the service logout method and destroys the client's session information. Returns
True on success, False otherwise.
"""
self.connected = False
if (self.session and self.session.is_expired) or abandon_session:
try:
self.logout()
except:
log.warning(
'Logout call to responsys failed, session may have not been terminated',
exc_info=True
)
del self.session
return True
# Session Management Methods
def login(self, username, password):
""" Responsys.login soap call
Accepts username and password for authentication, returns a LoginResult object.
"""
return LoginResult(self.call('login', username, password))
def logout(self):
""" Responsys.logout soap call
Returns True on success, False otherwise.
"""
return self.call('logout')
def login_with_certificate(self, encrypted_server_challenge):
""" Responsys.loginWithCertificate soap call
Accepts encrypted_server_challenge for login. Returns LoginResult.
"""
return LoginResult(self.call('loginWithCertificate', encrypted_server_challenge))
def authenticate_server(self, username, client_challenge):
""" Responsys.authenticateServer soap call
Accepts username and client_challenge to authenciate. Returns ServerAuthResult.
"""
return ServerAuthResult(self.call('authenticateServer', username, client_challenge))
# List Management Methods
def merge_list_members(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembers call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a MergeResult
"""
list_ = list_.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
merge_rule = merge_rule.get_soap_object(self.client)
return MergeResult(self.call('mergeListMembers', list_, record_data, merge_rule))
def merge_list_members_RIID(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembersRIID call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a RecipientResult
"""
list_ = list_.get_soap_object(self.client)
result = self.call('mergeListMembersRIID', list_, record_data, merge_rule)
return RecipientResult(result.recipientResult)
def delete_list_members(self, list_, query_column, ids_to_delete):
""" Responsys.deleteListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
list_ = list_.get_soap_object(self.client)
result = self.call('deleteListMembers', list_, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_list_members(self, list_, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData instance
"""
list_ = list_.get_soap_object(self.client)
result = self.call('retrieveListMembers', list_, query_column, field_list, ids_to_retrieve)
return RecordData.from_soap_type(result.recordData)
# Table Management Methods
def create_table_with_pk(self, table, fields, primary_keys):
""" Responsys.createTableWithPK call
Accepts:
InteractObject table
list fields
list primary_keys
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTableWithPK', table, fields, primary_keys)
def delete_table(self, table):
""" Responsys.deleteTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('deleteTable', table)
def delete_profile_extension_members(self, profile_extension, query_column, ids_to_delete):
""" Responsys.deleteProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns list of DeleteResults
"""
profile_extension = profile_extension.get_soap_object(self.client)
result = self.call(
'deleteProfileExtensionMembers', profile_extension, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_profile_extension_records(self, profile_extension, field_list, ids_to_retrieve,
query_column='RIID'):
""" Responsys.retrieveProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns RecordData
"""
profile_extension = profile_extension.get_soap_object(self.client)
return RecordData.from_soap_type(
self.call('retrieveProfileExtensionRecords',
profile_extension, query_column, field_list, ids_to_retrieve))
def truncate_table(self, table):
""" Responsys.truncateTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('truncateTable', table)
def delete_table_records(self, table, query_column, ids_to_delete):
""" Responsys.deleteTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
table = table.get_soap_object(self.client)
result = self.call('deleteTableRecords', table, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def merge_table_records(self, table, record_data, match_column_names):
""" Responsys.mergeTableRecords call
Accepts:
InteractObject table
RecordData record_data
list match_column_names
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecords', table, record_data, match_column_names))
def merge_table_records_with_pk(self, table, record_data, insert_on_no_match, update_on_match):
""" Responsys.mergeTableRecordsWithPK call
Accepts:
InteractObject table
RecordData record_data
string insert_on_no_match
string update_on_match
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecordsWithPK', table, record_data, insert_on_no_match, update_on_match))
def merge_into_profile_extension(self, profile_extension, record_data, match_column,
insert_on_no_match, update_on_match):
""" Responsys.mergeIntoProfileExtension call
Accepts:
InteractObject profile_extension
RecordData record_data
string match_column
string insert_on_no_match
string update_on_match
Returns a RecipientResult
"""
profile_extension = profile_extension.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
results = self.call(
'mergeIntoProfileExtension', profile_extension, record_data, match_column,
insert_on_no_match, update_on_match)
return [RecipientResult(result) for result in results]
def retrieve_table_records(self, table, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData
"""
table = table.get_soap_object(self.client)
return RecordData.from_soap_type(self.call(
'retrieveTableRecords', table, query_column, field_list, ids_to_retrieve))
# Campaign Management Methods
# TODO: implement
# GetLaunchStatus
# LaunchCampaign
# MergeTriggerEmail
# ScheduleCampaignLaunch
# TriggerCampaignMessage
def trigger_custom_event(self, custom_event, recipient_data=None):
custom_event = custom_event.get_soap_object(self.client)
recipient_data = [rdata.get_soap_object(self.client) for rdata in recipient_data]
results = self.call('triggerCustomEvent', custom_event, recipient_data)
return [TriggerResult(result) for result in results]
|
jslang/responsys | responsys/client.py | InteractClient.create_table_with_pk | python | def create_table_with_pk(self, table, fields, primary_keys):
table = table.get_soap_object(self.client)
return self.call('createTableWithPK', table, fields, primary_keys) | Responsys.createTableWithPK call
Accepts:
InteractObject table
list fields
list primary_keys
Returns True on success | train | https://github.com/jslang/responsys/blob/9b355a444c0c75dff41064502c1e2b76dfd5cb93/responsys/client.py#L280-L291 | [
"def call(self, method, *args):\n \"\"\" Calls the service method defined with the arguments provided \"\"\"\n try:\n response = getattr(self.client.service, method)(*args)\n except (URLError, SSLError) as e:\n log.exception('Failed to connect to responsys service')\n raise ConnectError(\"Request to service timed out\")\n except WebFault as web_fault:\n fault_name = getattr(web_fault.fault, 'faultstring', None)\n error = str(web_fault.fault.detail)\n\n if fault_name == 'TableFault':\n raise TableFault(error)\n if fault_name == 'ListFault':\n raise ListFault(error)\n if fault_name == 'API_LIMIT_EXCEEDED':\n raise ApiLimitError(error)\n if fault_name == 'AccountFault':\n raise AccountFault(error)\n\n raise ServiceError(web_fault.fault, web_fault.document)\n return response\n"
] | class InteractClient(object):
""" Interact Client Class
Provides access to the methods defined by the Responsys Interact API. Example setup:
>>> client = InteractClient(username, password, pod)
>>> client.connect()
>>> client.merge_list_members(interact_object, records, merge_rules)
>>> client.disconnect()
Using the client class as a context manager will automatically connect using the credentials
provided, and disconnect upon context exit:
>>> with InteractClient(username, password, pod) as client:
... client.merge_list_members(interact_object, records, merge_rules)
Since responsys limits the number of active sessions per account, this can help ensure you
don't leave unused connections open.
"""
DEFAULT_SESSION_LIFETIME = 60 * 10
WSDLS = {
'2': 'https://ws2.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'5': 'https://ws5.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'rtm4': 'https://rtm4.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
'rtm4b': 'https://rtm4b.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
}
ENDPOINTS = {
'2': 'https://ws2.responsys.net/webservices/services/ResponsysWSService',
'5': 'https://ws5.responsys.net/webservices/services/ResponsysWSService',
'rtm4': 'http://rtm4.responsys.net:80/tmws/services/TriggeredMessageWS',
'rtm4b': 'http://rtm4b.responsys.net:80/tmws/services/TriggeredMessageWS',
}
@property
def wsdl(self):
return self.WSDLS[self.pod]
@property
def endpoint(self):
return self.ENDPOINTS[self.pod]
@property
def client(self):
if self._client is None:
self._client = Client(self.wsdl, location=self.endpoint, timeout=self.timeout)
return self._client
@property
def connected(self):
return getattr(self, '_connected', False)
@connected.setter
def connected(self, value):
self._connected = value
@property
def session(self):
return getattr(self, '_session', None)
@session.setter
def session(self, session_id):
self._session = type(
'Session', (tuple,), {
'is_expired': property(lambda s: s[1] + self.session_lifetime <= time()),
})([session_id, time()])
session_header = self.client.factory.create('SessionHeader')
session_header.sessionId = session_id
self.client.set_options(soapheaders=session_header)
@session.deleter
def session(self):
self._session = None
self.client.set_options(soapheaders=())
def __init__(self, username, password, pod, client=None, session_lifetime=600, timeout=5):
self.username = username
self.password = password
self.pod = pod
self.session_lifetime = session_lifetime
self.timeout = timeout
self._client = client
def __enter__(self):
self.connect()
return self
def __exit__(self, type_, value, traceback):
self.disconnect()
def call(self, method, *args):
""" Calls the service method defined with the arguments provided """
try:
response = getattr(self.client.service, method)(*args)
except (URLError, SSLError) as e:
log.exception('Failed to connect to responsys service')
raise ConnectError("Request to service timed out")
except WebFault as web_fault:
fault_name = getattr(web_fault.fault, 'faultstring', None)
error = str(web_fault.fault.detail)
if fault_name == 'TableFault':
raise TableFault(error)
if fault_name == 'ListFault':
raise ListFault(error)
if fault_name == 'API_LIMIT_EXCEEDED':
raise ApiLimitError(error)
if fault_name == 'AccountFault':
raise AccountFault(error)
raise ServiceError(web_fault.fault, web_fault.document)
return response
def connect(self):
""" Connects to the Responsys soap service
Uses the credentials passed to the client init to login and setup the session id returned.
Returns True on successful connection, otherwise False.
"""
if self.session and self.session.is_expired:
# Close the session to avoid max concurrent session errors
self.disconnect(abandon_session=True)
if not self.session:
try:
login_result = self.login(self.username, self.password)
except AccountFault:
log.error('Login failed, invalid username or password')
raise
else:
self.session = login_result.session_id
self.connected = time()
return self.connected
def disconnect(self, abandon_session=False):
""" Disconnects from the Responsys soap service
Calls the service logout method and destroys the client's session information. Returns
True on success, False otherwise.
"""
self.connected = False
if (self.session and self.session.is_expired) or abandon_session:
try:
self.logout()
except:
log.warning(
'Logout call to responsys failed, session may have not been terminated',
exc_info=True
)
del self.session
return True
# Session Management Methods
def login(self, username, password):
""" Responsys.login soap call
Accepts username and password for authentication, returns a LoginResult object.
"""
return LoginResult(self.call('login', username, password))
def logout(self):
""" Responsys.logout soap call
Returns True on success, False otherwise.
"""
return self.call('logout')
def login_with_certificate(self, encrypted_server_challenge):
""" Responsys.loginWithCertificate soap call
Accepts encrypted_server_challenge for login. Returns LoginResult.
"""
return LoginResult(self.call('loginWithCertificate', encrypted_server_challenge))
def authenticate_server(self, username, client_challenge):
""" Responsys.authenticateServer soap call
Accepts username and client_challenge to authenciate. Returns ServerAuthResult.
"""
return ServerAuthResult(self.call('authenticateServer', username, client_challenge))
# List Management Methods
def merge_list_members(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembers call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a MergeResult
"""
list_ = list_.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
merge_rule = merge_rule.get_soap_object(self.client)
return MergeResult(self.call('mergeListMembers', list_, record_data, merge_rule))
def merge_list_members_RIID(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembersRIID call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a RecipientResult
"""
list_ = list_.get_soap_object(self.client)
result = self.call('mergeListMembersRIID', list_, record_data, merge_rule)
return RecipientResult(result.recipientResult)
def delete_list_members(self, list_, query_column, ids_to_delete):
""" Responsys.deleteListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
list_ = list_.get_soap_object(self.client)
result = self.call('deleteListMembers', list_, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_list_members(self, list_, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData instance
"""
list_ = list_.get_soap_object(self.client)
result = self.call('retrieveListMembers', list_, query_column, field_list, ids_to_retrieve)
return RecordData.from_soap_type(result.recordData)
# Table Management Methods
def create_table(self, table, fields):
""" Responsys.createTable call
Accepts:
InteractObject table
list fields
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTable', table, fields)
def delete_table(self, table):
""" Responsys.deleteTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('deleteTable', table)
def delete_profile_extension_members(self, profile_extension, query_column, ids_to_delete):
""" Responsys.deleteProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns list of DeleteResults
"""
profile_extension = profile_extension.get_soap_object(self.client)
result = self.call(
'deleteProfileExtensionMembers', profile_extension, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_profile_extension_records(self, profile_extension, field_list, ids_to_retrieve,
query_column='RIID'):
""" Responsys.retrieveProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns RecordData
"""
profile_extension = profile_extension.get_soap_object(self.client)
return RecordData.from_soap_type(
self.call('retrieveProfileExtensionRecords',
profile_extension, query_column, field_list, ids_to_retrieve))
def truncate_table(self, table):
""" Responsys.truncateTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('truncateTable', table)
def delete_table_records(self, table, query_column, ids_to_delete):
""" Responsys.deleteTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
table = table.get_soap_object(self.client)
result = self.call('deleteTableRecords', table, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def merge_table_records(self, table, record_data, match_column_names):
""" Responsys.mergeTableRecords call
Accepts:
InteractObject table
RecordData record_data
list match_column_names
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecords', table, record_data, match_column_names))
def merge_table_records_with_pk(self, table, record_data, insert_on_no_match, update_on_match):
""" Responsys.mergeTableRecordsWithPK call
Accepts:
InteractObject table
RecordData record_data
string insert_on_no_match
string update_on_match
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecordsWithPK', table, record_data, insert_on_no_match, update_on_match))
def merge_into_profile_extension(self, profile_extension, record_data, match_column,
insert_on_no_match, update_on_match):
""" Responsys.mergeIntoProfileExtension call
Accepts:
InteractObject profile_extension
RecordData record_data
string match_column
string insert_on_no_match
string update_on_match
Returns a RecipientResult
"""
profile_extension = profile_extension.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
results = self.call(
'mergeIntoProfileExtension', profile_extension, record_data, match_column,
insert_on_no_match, update_on_match)
return [RecipientResult(result) for result in results]
def retrieve_table_records(self, table, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData
"""
table = table.get_soap_object(self.client)
return RecordData.from_soap_type(self.call(
'retrieveTableRecords', table, query_column, field_list, ids_to_retrieve))
# Campaign Management Methods
# TODO: implement
# GetLaunchStatus
# LaunchCampaign
# MergeTriggerEmail
# ScheduleCampaignLaunch
# TriggerCampaignMessage
def trigger_custom_event(self, custom_event, recipient_data=None):
custom_event = custom_event.get_soap_object(self.client)
recipient_data = [rdata.get_soap_object(self.client) for rdata in recipient_data]
results = self.call('triggerCustomEvent', custom_event, recipient_data)
return [TriggerResult(result) for result in results]
|
jslang/responsys | responsys/client.py | InteractClient.delete_table | python | def delete_table(self, table):
table = table.get_soap_object(self.client)
return self.call('deleteTable', table) | Responsys.deleteTable call
Accepts:
InteractObject table
Returns True on success | train | https://github.com/jslang/responsys/blob/9b355a444c0c75dff41064502c1e2b76dfd5cb93/responsys/client.py#L293-L302 | [
"def call(self, method, *args):\n \"\"\" Calls the service method defined with the arguments provided \"\"\"\n try:\n response = getattr(self.client.service, method)(*args)\n except (URLError, SSLError) as e:\n log.exception('Failed to connect to responsys service')\n raise ConnectError(\"Request to service timed out\")\n except WebFault as web_fault:\n fault_name = getattr(web_fault.fault, 'faultstring', None)\n error = str(web_fault.fault.detail)\n\n if fault_name == 'TableFault':\n raise TableFault(error)\n if fault_name == 'ListFault':\n raise ListFault(error)\n if fault_name == 'API_LIMIT_EXCEEDED':\n raise ApiLimitError(error)\n if fault_name == 'AccountFault':\n raise AccountFault(error)\n\n raise ServiceError(web_fault.fault, web_fault.document)\n return response\n"
] | class InteractClient(object):
""" Interact Client Class
Provides access to the methods defined by the Responsys Interact API. Example setup:
>>> client = InteractClient(username, password, pod)
>>> client.connect()
>>> client.merge_list_members(interact_object, records, merge_rules)
>>> client.disconnect()
Using the client class as a context manager will automatically connect using the credentials
provided, and disconnect upon context exit:
>>> with InteractClient(username, password, pod) as client:
... client.merge_list_members(interact_object, records, merge_rules)
Since responsys limits the number of active sessions per account, this can help ensure you
don't leave unused connections open.
"""
DEFAULT_SESSION_LIFETIME = 60 * 10
WSDLS = {
'2': 'https://ws2.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'5': 'https://ws5.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'rtm4': 'https://rtm4.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
'rtm4b': 'https://rtm4b.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
}
ENDPOINTS = {
'2': 'https://ws2.responsys.net/webservices/services/ResponsysWSService',
'5': 'https://ws5.responsys.net/webservices/services/ResponsysWSService',
'rtm4': 'http://rtm4.responsys.net:80/tmws/services/TriggeredMessageWS',
'rtm4b': 'http://rtm4b.responsys.net:80/tmws/services/TriggeredMessageWS',
}
@property
def wsdl(self):
return self.WSDLS[self.pod]
@property
def endpoint(self):
return self.ENDPOINTS[self.pod]
@property
def client(self):
if self._client is None:
self._client = Client(self.wsdl, location=self.endpoint, timeout=self.timeout)
return self._client
@property
def connected(self):
return getattr(self, '_connected', False)
@connected.setter
def connected(self, value):
self._connected = value
@property
def session(self):
return getattr(self, '_session', None)
@session.setter
def session(self, session_id):
self._session = type(
'Session', (tuple,), {
'is_expired': property(lambda s: s[1] + self.session_lifetime <= time()),
})([session_id, time()])
session_header = self.client.factory.create('SessionHeader')
session_header.sessionId = session_id
self.client.set_options(soapheaders=session_header)
@session.deleter
def session(self):
self._session = None
self.client.set_options(soapheaders=())
def __init__(self, username, password, pod, client=None, session_lifetime=600, timeout=5):
self.username = username
self.password = password
self.pod = pod
self.session_lifetime = session_lifetime
self.timeout = timeout
self._client = client
def __enter__(self):
self.connect()
return self
def __exit__(self, type_, value, traceback):
self.disconnect()
def call(self, method, *args):
""" Calls the service method defined with the arguments provided """
try:
response = getattr(self.client.service, method)(*args)
except (URLError, SSLError) as e:
log.exception('Failed to connect to responsys service')
raise ConnectError("Request to service timed out")
except WebFault as web_fault:
fault_name = getattr(web_fault.fault, 'faultstring', None)
error = str(web_fault.fault.detail)
if fault_name == 'TableFault':
raise TableFault(error)
if fault_name == 'ListFault':
raise ListFault(error)
if fault_name == 'API_LIMIT_EXCEEDED':
raise ApiLimitError(error)
if fault_name == 'AccountFault':
raise AccountFault(error)
raise ServiceError(web_fault.fault, web_fault.document)
return response
def connect(self):
""" Connects to the Responsys soap service
Uses the credentials passed to the client init to login and setup the session id returned.
Returns True on successful connection, otherwise False.
"""
if self.session and self.session.is_expired:
# Close the session to avoid max concurrent session errors
self.disconnect(abandon_session=True)
if not self.session:
try:
login_result = self.login(self.username, self.password)
except AccountFault:
log.error('Login failed, invalid username or password')
raise
else:
self.session = login_result.session_id
self.connected = time()
return self.connected
def disconnect(self, abandon_session=False):
""" Disconnects from the Responsys soap service
Calls the service logout method and destroys the client's session information. Returns
True on success, False otherwise.
"""
self.connected = False
if (self.session and self.session.is_expired) or abandon_session:
try:
self.logout()
except:
log.warning(
'Logout call to responsys failed, session may have not been terminated',
exc_info=True
)
del self.session
return True
# Session Management Methods
def login(self, username, password):
""" Responsys.login soap call
Accepts username and password for authentication, returns a LoginResult object.
"""
return LoginResult(self.call('login', username, password))
def logout(self):
""" Responsys.logout soap call
Returns True on success, False otherwise.
"""
return self.call('logout')
def login_with_certificate(self, encrypted_server_challenge):
""" Responsys.loginWithCertificate soap call
Accepts encrypted_server_challenge for login. Returns LoginResult.
"""
return LoginResult(self.call('loginWithCertificate', encrypted_server_challenge))
def authenticate_server(self, username, client_challenge):
""" Responsys.authenticateServer soap call
Accepts username and client_challenge to authenciate. Returns ServerAuthResult.
"""
return ServerAuthResult(self.call('authenticateServer', username, client_challenge))
# List Management Methods
def merge_list_members(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembers call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a MergeResult
"""
list_ = list_.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
merge_rule = merge_rule.get_soap_object(self.client)
return MergeResult(self.call('mergeListMembers', list_, record_data, merge_rule))
def merge_list_members_RIID(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembersRIID call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a RecipientResult
"""
list_ = list_.get_soap_object(self.client)
result = self.call('mergeListMembersRIID', list_, record_data, merge_rule)
return RecipientResult(result.recipientResult)
def delete_list_members(self, list_, query_column, ids_to_delete):
""" Responsys.deleteListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
list_ = list_.get_soap_object(self.client)
result = self.call('deleteListMembers', list_, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_list_members(self, list_, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData instance
"""
list_ = list_.get_soap_object(self.client)
result = self.call('retrieveListMembers', list_, query_column, field_list, ids_to_retrieve)
return RecordData.from_soap_type(result.recordData)
# Table Management Methods
def create_table(self, table, fields):
""" Responsys.createTable call
Accepts:
InteractObject table
list fields
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTable', table, fields)
def create_table_with_pk(self, table, fields, primary_keys):
""" Responsys.createTableWithPK call
Accepts:
InteractObject table
list fields
list primary_keys
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTableWithPK', table, fields, primary_keys)
def delete_profile_extension_members(self, profile_extension, query_column, ids_to_delete):
""" Responsys.deleteProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns list of DeleteResults
"""
profile_extension = profile_extension.get_soap_object(self.client)
result = self.call(
'deleteProfileExtensionMembers', profile_extension, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_profile_extension_records(self, profile_extension, field_list, ids_to_retrieve,
query_column='RIID'):
""" Responsys.retrieveProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns RecordData
"""
profile_extension = profile_extension.get_soap_object(self.client)
return RecordData.from_soap_type(
self.call('retrieveProfileExtensionRecords',
profile_extension, query_column, field_list, ids_to_retrieve))
def truncate_table(self, table):
""" Responsys.truncateTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('truncateTable', table)
def delete_table_records(self, table, query_column, ids_to_delete):
""" Responsys.deleteTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
table = table.get_soap_object(self.client)
result = self.call('deleteTableRecords', table, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def merge_table_records(self, table, record_data, match_column_names):
""" Responsys.mergeTableRecords call
Accepts:
InteractObject table
RecordData record_data
list match_column_names
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecords', table, record_data, match_column_names))
def merge_table_records_with_pk(self, table, record_data, insert_on_no_match, update_on_match):
""" Responsys.mergeTableRecordsWithPK call
Accepts:
InteractObject table
RecordData record_data
string insert_on_no_match
string update_on_match
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecordsWithPK', table, record_data, insert_on_no_match, update_on_match))
def merge_into_profile_extension(self, profile_extension, record_data, match_column,
insert_on_no_match, update_on_match):
""" Responsys.mergeIntoProfileExtension call
Accepts:
InteractObject profile_extension
RecordData record_data
string match_column
string insert_on_no_match
string update_on_match
Returns a RecipientResult
"""
profile_extension = profile_extension.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
results = self.call(
'mergeIntoProfileExtension', profile_extension, record_data, match_column,
insert_on_no_match, update_on_match)
return [RecipientResult(result) for result in results]
def retrieve_table_records(self, table, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData
"""
table = table.get_soap_object(self.client)
return RecordData.from_soap_type(self.call(
'retrieveTableRecords', table, query_column, field_list, ids_to_retrieve))
# Campaign Management Methods
# TODO: implement
# GetLaunchStatus
# LaunchCampaign
# MergeTriggerEmail
# ScheduleCampaignLaunch
# TriggerCampaignMessage
def trigger_custom_event(self, custom_event, recipient_data=None):
custom_event = custom_event.get_soap_object(self.client)
recipient_data = [rdata.get_soap_object(self.client) for rdata in recipient_data]
results = self.call('triggerCustomEvent', custom_event, recipient_data)
return [TriggerResult(result) for result in results]
|
jslang/responsys | responsys/client.py | InteractClient.delete_profile_extension_members | python | def delete_profile_extension_members(self, profile_extension, query_column, ids_to_delete):
profile_extension = profile_extension.get_soap_object(self.client)
result = self.call(
'deleteProfileExtensionMembers', profile_extension, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)] | Responsys.deleteProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns list of DeleteResults | train | https://github.com/jslang/responsys/blob/9b355a444c0c75dff41064502c1e2b76dfd5cb93/responsys/client.py#L304-L321 | [
"def call(self, method, *args):\n \"\"\" Calls the service method defined with the arguments provided \"\"\"\n try:\n response = getattr(self.client.service, method)(*args)\n except (URLError, SSLError) as e:\n log.exception('Failed to connect to responsys service')\n raise ConnectError(\"Request to service timed out\")\n except WebFault as web_fault:\n fault_name = getattr(web_fault.fault, 'faultstring', None)\n error = str(web_fault.fault.detail)\n\n if fault_name == 'TableFault':\n raise TableFault(error)\n if fault_name == 'ListFault':\n raise ListFault(error)\n if fault_name == 'API_LIMIT_EXCEEDED':\n raise ApiLimitError(error)\n if fault_name == 'AccountFault':\n raise AccountFault(error)\n\n raise ServiceError(web_fault.fault, web_fault.document)\n return response\n"
] | class InteractClient(object):
""" Interact Client Class
Provides access to the methods defined by the Responsys Interact API. Example setup:
>>> client = InteractClient(username, password, pod)
>>> client.connect()
>>> client.merge_list_members(interact_object, records, merge_rules)
>>> client.disconnect()
Using the client class as a context manager will automatically connect using the credentials
provided, and disconnect upon context exit:
>>> with InteractClient(username, password, pod) as client:
... client.merge_list_members(interact_object, records, merge_rules)
Since responsys limits the number of active sessions per account, this can help ensure you
don't leave unused connections open.
"""
DEFAULT_SESSION_LIFETIME = 60 * 10
WSDLS = {
'2': 'https://ws2.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'5': 'https://ws5.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'rtm4': 'https://rtm4.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
'rtm4b': 'https://rtm4b.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
}
ENDPOINTS = {
'2': 'https://ws2.responsys.net/webservices/services/ResponsysWSService',
'5': 'https://ws5.responsys.net/webservices/services/ResponsysWSService',
'rtm4': 'http://rtm4.responsys.net:80/tmws/services/TriggeredMessageWS',
'rtm4b': 'http://rtm4b.responsys.net:80/tmws/services/TriggeredMessageWS',
}
@property
def wsdl(self):
return self.WSDLS[self.pod]
@property
def endpoint(self):
return self.ENDPOINTS[self.pod]
@property
def client(self):
if self._client is None:
self._client = Client(self.wsdl, location=self.endpoint, timeout=self.timeout)
return self._client
@property
def connected(self):
return getattr(self, '_connected', False)
@connected.setter
def connected(self, value):
self._connected = value
@property
def session(self):
return getattr(self, '_session', None)
@session.setter
def session(self, session_id):
self._session = type(
'Session', (tuple,), {
'is_expired': property(lambda s: s[1] + self.session_lifetime <= time()),
})([session_id, time()])
session_header = self.client.factory.create('SessionHeader')
session_header.sessionId = session_id
self.client.set_options(soapheaders=session_header)
@session.deleter
def session(self):
self._session = None
self.client.set_options(soapheaders=())
def __init__(self, username, password, pod, client=None, session_lifetime=600, timeout=5):
self.username = username
self.password = password
self.pod = pod
self.session_lifetime = session_lifetime
self.timeout = timeout
self._client = client
def __enter__(self):
self.connect()
return self
def __exit__(self, type_, value, traceback):
self.disconnect()
def call(self, method, *args):
""" Calls the service method defined with the arguments provided """
try:
response = getattr(self.client.service, method)(*args)
except (URLError, SSLError) as e:
log.exception('Failed to connect to responsys service')
raise ConnectError("Request to service timed out")
except WebFault as web_fault:
fault_name = getattr(web_fault.fault, 'faultstring', None)
error = str(web_fault.fault.detail)
if fault_name == 'TableFault':
raise TableFault(error)
if fault_name == 'ListFault':
raise ListFault(error)
if fault_name == 'API_LIMIT_EXCEEDED':
raise ApiLimitError(error)
if fault_name == 'AccountFault':
raise AccountFault(error)
raise ServiceError(web_fault.fault, web_fault.document)
return response
def connect(self):
""" Connects to the Responsys soap service
Uses the credentials passed to the client init to login and setup the session id returned.
Returns True on successful connection, otherwise False.
"""
if self.session and self.session.is_expired:
# Close the session to avoid max concurrent session errors
self.disconnect(abandon_session=True)
if not self.session:
try:
login_result = self.login(self.username, self.password)
except AccountFault:
log.error('Login failed, invalid username or password')
raise
else:
self.session = login_result.session_id
self.connected = time()
return self.connected
def disconnect(self, abandon_session=False):
""" Disconnects from the Responsys soap service
Calls the service logout method and destroys the client's session information. Returns
True on success, False otherwise.
"""
self.connected = False
if (self.session and self.session.is_expired) or abandon_session:
try:
self.logout()
except:
log.warning(
'Logout call to responsys failed, session may have not been terminated',
exc_info=True
)
del self.session
return True
# Session Management Methods
def login(self, username, password):
""" Responsys.login soap call
Accepts username and password for authentication, returns a LoginResult object.
"""
return LoginResult(self.call('login', username, password))
def logout(self):
""" Responsys.logout soap call
Returns True on success, False otherwise.
"""
return self.call('logout')
def login_with_certificate(self, encrypted_server_challenge):
""" Responsys.loginWithCertificate soap call
Accepts encrypted_server_challenge for login. Returns LoginResult.
"""
return LoginResult(self.call('loginWithCertificate', encrypted_server_challenge))
def authenticate_server(self, username, client_challenge):
""" Responsys.authenticateServer soap call
Accepts username and client_challenge to authenciate. Returns ServerAuthResult.
"""
return ServerAuthResult(self.call('authenticateServer', username, client_challenge))
# List Management Methods
def merge_list_members(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembers call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a MergeResult
"""
list_ = list_.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
merge_rule = merge_rule.get_soap_object(self.client)
return MergeResult(self.call('mergeListMembers', list_, record_data, merge_rule))
def merge_list_members_RIID(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembersRIID call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a RecipientResult
"""
list_ = list_.get_soap_object(self.client)
result = self.call('mergeListMembersRIID', list_, record_data, merge_rule)
return RecipientResult(result.recipientResult)
def delete_list_members(self, list_, query_column, ids_to_delete):
""" Responsys.deleteListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
list_ = list_.get_soap_object(self.client)
result = self.call('deleteListMembers', list_, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_list_members(self, list_, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData instance
"""
list_ = list_.get_soap_object(self.client)
result = self.call('retrieveListMembers', list_, query_column, field_list, ids_to_retrieve)
return RecordData.from_soap_type(result.recordData)
# Table Management Methods
def create_table(self, table, fields):
""" Responsys.createTable call
Accepts:
InteractObject table
list fields
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTable', table, fields)
def create_table_with_pk(self, table, fields, primary_keys):
""" Responsys.createTableWithPK call
Accepts:
InteractObject table
list fields
list primary_keys
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTableWithPK', table, fields, primary_keys)
def delete_table(self, table):
""" Responsys.deleteTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('deleteTable', table)
def retrieve_profile_extension_records(self, profile_extension, field_list, ids_to_retrieve,
query_column='RIID'):
""" Responsys.retrieveProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns RecordData
"""
profile_extension = profile_extension.get_soap_object(self.client)
return RecordData.from_soap_type(
self.call('retrieveProfileExtensionRecords',
profile_extension, query_column, field_list, ids_to_retrieve))
def truncate_table(self, table):
""" Responsys.truncateTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('truncateTable', table)
def delete_table_records(self, table, query_column, ids_to_delete):
""" Responsys.deleteTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
table = table.get_soap_object(self.client)
result = self.call('deleteTableRecords', table, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def merge_table_records(self, table, record_data, match_column_names):
""" Responsys.mergeTableRecords call
Accepts:
InteractObject table
RecordData record_data
list match_column_names
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecords', table, record_data, match_column_names))
def merge_table_records_with_pk(self, table, record_data, insert_on_no_match, update_on_match):
""" Responsys.mergeTableRecordsWithPK call
Accepts:
InteractObject table
RecordData record_data
string insert_on_no_match
string update_on_match
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecordsWithPK', table, record_data, insert_on_no_match, update_on_match))
def merge_into_profile_extension(self, profile_extension, record_data, match_column,
insert_on_no_match, update_on_match):
""" Responsys.mergeIntoProfileExtension call
Accepts:
InteractObject profile_extension
RecordData record_data
string match_column
string insert_on_no_match
string update_on_match
Returns a RecipientResult
"""
profile_extension = profile_extension.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
results = self.call(
'mergeIntoProfileExtension', profile_extension, record_data, match_column,
insert_on_no_match, update_on_match)
return [RecipientResult(result) for result in results]
def retrieve_table_records(self, table, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData
"""
table = table.get_soap_object(self.client)
return RecordData.from_soap_type(self.call(
'retrieveTableRecords', table, query_column, field_list, ids_to_retrieve))
# Campaign Management Methods
# TODO: implement
# GetLaunchStatus
# LaunchCampaign
# MergeTriggerEmail
# ScheduleCampaignLaunch
# TriggerCampaignMessage
def trigger_custom_event(self, custom_event, recipient_data=None):
custom_event = custom_event.get_soap_object(self.client)
recipient_data = [rdata.get_soap_object(self.client) for rdata in recipient_data]
results = self.call('triggerCustomEvent', custom_event, recipient_data)
return [TriggerResult(result) for result in results]
|
jslang/responsys | responsys/client.py | InteractClient.retrieve_profile_extension_records | python | def retrieve_profile_extension_records(self, profile_extension, field_list, ids_to_retrieve,
query_column='RIID'):
profile_extension = profile_extension.get_soap_object(self.client)
return RecordData.from_soap_type(
self.call('retrieveProfileExtensionRecords',
profile_extension, query_column, field_list, ids_to_retrieve)) | Responsys.retrieveProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns RecordData | train | https://github.com/jslang/responsys/blob/9b355a444c0c75dff41064502c1e2b76dfd5cb93/responsys/client.py#L323-L339 | [
"def call(self, method, *args):\n \"\"\" Calls the service method defined with the arguments provided \"\"\"\n try:\n response = getattr(self.client.service, method)(*args)\n except (URLError, SSLError) as e:\n log.exception('Failed to connect to responsys service')\n raise ConnectError(\"Request to service timed out\")\n except WebFault as web_fault:\n fault_name = getattr(web_fault.fault, 'faultstring', None)\n error = str(web_fault.fault.detail)\n\n if fault_name == 'TableFault':\n raise TableFault(error)\n if fault_name == 'ListFault':\n raise ListFault(error)\n if fault_name == 'API_LIMIT_EXCEEDED':\n raise ApiLimitError(error)\n if fault_name == 'AccountFault':\n raise AccountFault(error)\n\n raise ServiceError(web_fault.fault, web_fault.document)\n return response\n",
"def from_soap_type(cls, record_data):\n record_data = [\n dict(zip(record_data.fieldNames, r.fieldValues)) for r in record_data.records]\n return cls(record_data)\n"
] | class InteractClient(object):
""" Interact Client Class
Provides access to the methods defined by the Responsys Interact API. Example setup:
>>> client = InteractClient(username, password, pod)
>>> client.connect()
>>> client.merge_list_members(interact_object, records, merge_rules)
>>> client.disconnect()
Using the client class as a context manager will automatically connect using the credentials
provided, and disconnect upon context exit:
>>> with InteractClient(username, password, pod) as client:
... client.merge_list_members(interact_object, records, merge_rules)
Since responsys limits the number of active sessions per account, this can help ensure you
don't leave unused connections open.
"""
DEFAULT_SESSION_LIFETIME = 60 * 10
WSDLS = {
'2': 'https://ws2.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'5': 'https://ws5.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'rtm4': 'https://rtm4.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
'rtm4b': 'https://rtm4b.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
}
ENDPOINTS = {
'2': 'https://ws2.responsys.net/webservices/services/ResponsysWSService',
'5': 'https://ws5.responsys.net/webservices/services/ResponsysWSService',
'rtm4': 'http://rtm4.responsys.net:80/tmws/services/TriggeredMessageWS',
'rtm4b': 'http://rtm4b.responsys.net:80/tmws/services/TriggeredMessageWS',
}
@property
def wsdl(self):
return self.WSDLS[self.pod]
@property
def endpoint(self):
return self.ENDPOINTS[self.pod]
@property
def client(self):
if self._client is None:
self._client = Client(self.wsdl, location=self.endpoint, timeout=self.timeout)
return self._client
@property
def connected(self):
return getattr(self, '_connected', False)
@connected.setter
def connected(self, value):
self._connected = value
@property
def session(self):
return getattr(self, '_session', None)
@session.setter
def session(self, session_id):
self._session = type(
'Session', (tuple,), {
'is_expired': property(lambda s: s[1] + self.session_lifetime <= time()),
})([session_id, time()])
session_header = self.client.factory.create('SessionHeader')
session_header.sessionId = session_id
self.client.set_options(soapheaders=session_header)
@session.deleter
def session(self):
self._session = None
self.client.set_options(soapheaders=())
def __init__(self, username, password, pod, client=None, session_lifetime=600, timeout=5):
self.username = username
self.password = password
self.pod = pod
self.session_lifetime = session_lifetime
self.timeout = timeout
self._client = client
def __enter__(self):
self.connect()
return self
def __exit__(self, type_, value, traceback):
self.disconnect()
def call(self, method, *args):
""" Calls the service method defined with the arguments provided """
try:
response = getattr(self.client.service, method)(*args)
except (URLError, SSLError) as e:
log.exception('Failed to connect to responsys service')
raise ConnectError("Request to service timed out")
except WebFault as web_fault:
fault_name = getattr(web_fault.fault, 'faultstring', None)
error = str(web_fault.fault.detail)
if fault_name == 'TableFault':
raise TableFault(error)
if fault_name == 'ListFault':
raise ListFault(error)
if fault_name == 'API_LIMIT_EXCEEDED':
raise ApiLimitError(error)
if fault_name == 'AccountFault':
raise AccountFault(error)
raise ServiceError(web_fault.fault, web_fault.document)
return response
def connect(self):
""" Connects to the Responsys soap service
Uses the credentials passed to the client init to login and setup the session id returned.
Returns True on successful connection, otherwise False.
"""
if self.session and self.session.is_expired:
# Close the session to avoid max concurrent session errors
self.disconnect(abandon_session=True)
if not self.session:
try:
login_result = self.login(self.username, self.password)
except AccountFault:
log.error('Login failed, invalid username or password')
raise
else:
self.session = login_result.session_id
self.connected = time()
return self.connected
def disconnect(self, abandon_session=False):
""" Disconnects from the Responsys soap service
Calls the service logout method and destroys the client's session information. Returns
True on success, False otherwise.
"""
self.connected = False
if (self.session and self.session.is_expired) or abandon_session:
try:
self.logout()
except:
log.warning(
'Logout call to responsys failed, session may have not been terminated',
exc_info=True
)
del self.session
return True
# Session Management Methods
def login(self, username, password):
""" Responsys.login soap call
Accepts username and password for authentication, returns a LoginResult object.
"""
return LoginResult(self.call('login', username, password))
def logout(self):
""" Responsys.logout soap call
Returns True on success, False otherwise.
"""
return self.call('logout')
def login_with_certificate(self, encrypted_server_challenge):
""" Responsys.loginWithCertificate soap call
Accepts encrypted_server_challenge for login. Returns LoginResult.
"""
return LoginResult(self.call('loginWithCertificate', encrypted_server_challenge))
def authenticate_server(self, username, client_challenge):
""" Responsys.authenticateServer soap call
Accepts username and client_challenge to authenciate. Returns ServerAuthResult.
"""
return ServerAuthResult(self.call('authenticateServer', username, client_challenge))
# List Management Methods
def merge_list_members(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembers call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a MergeResult
"""
list_ = list_.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
merge_rule = merge_rule.get_soap_object(self.client)
return MergeResult(self.call('mergeListMembers', list_, record_data, merge_rule))
def merge_list_members_RIID(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembersRIID call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a RecipientResult
"""
list_ = list_.get_soap_object(self.client)
result = self.call('mergeListMembersRIID', list_, record_data, merge_rule)
return RecipientResult(result.recipientResult)
def delete_list_members(self, list_, query_column, ids_to_delete):
""" Responsys.deleteListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
list_ = list_.get_soap_object(self.client)
result = self.call('deleteListMembers', list_, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_list_members(self, list_, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData instance
"""
list_ = list_.get_soap_object(self.client)
result = self.call('retrieveListMembers', list_, query_column, field_list, ids_to_retrieve)
return RecordData.from_soap_type(result.recordData)
# Table Management Methods
def create_table(self, table, fields):
""" Responsys.createTable call
Accepts:
InteractObject table
list fields
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTable', table, fields)
def create_table_with_pk(self, table, fields, primary_keys):
""" Responsys.createTableWithPK call
Accepts:
InteractObject table
list fields
list primary_keys
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTableWithPK', table, fields, primary_keys)
def delete_table(self, table):
""" Responsys.deleteTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('deleteTable', table)
def delete_profile_extension_members(self, profile_extension, query_column, ids_to_delete):
""" Responsys.deleteProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns list of DeleteResults
"""
profile_extension = profile_extension.get_soap_object(self.client)
result = self.call(
'deleteProfileExtensionMembers', profile_extension, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def truncate_table(self, table):
""" Responsys.truncateTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('truncateTable', table)
def delete_table_records(self, table, query_column, ids_to_delete):
""" Responsys.deleteTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
table = table.get_soap_object(self.client)
result = self.call('deleteTableRecords', table, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def merge_table_records(self, table, record_data, match_column_names):
""" Responsys.mergeTableRecords call
Accepts:
InteractObject table
RecordData record_data
list match_column_names
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecords', table, record_data, match_column_names))
def merge_table_records_with_pk(self, table, record_data, insert_on_no_match, update_on_match):
""" Responsys.mergeTableRecordsWithPK call
Accepts:
InteractObject table
RecordData record_data
string insert_on_no_match
string update_on_match
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecordsWithPK', table, record_data, insert_on_no_match, update_on_match))
def merge_into_profile_extension(self, profile_extension, record_data, match_column,
insert_on_no_match, update_on_match):
""" Responsys.mergeIntoProfileExtension call
Accepts:
InteractObject profile_extension
RecordData record_data
string match_column
string insert_on_no_match
string update_on_match
Returns a RecipientResult
"""
profile_extension = profile_extension.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
results = self.call(
'mergeIntoProfileExtension', profile_extension, record_data, match_column,
insert_on_no_match, update_on_match)
return [RecipientResult(result) for result in results]
def retrieve_table_records(self, table, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData
"""
table = table.get_soap_object(self.client)
return RecordData.from_soap_type(self.call(
'retrieveTableRecords', table, query_column, field_list, ids_to_retrieve))
# Campaign Management Methods
# TODO: implement
# GetLaunchStatus
# LaunchCampaign
# MergeTriggerEmail
# ScheduleCampaignLaunch
# TriggerCampaignMessage
def trigger_custom_event(self, custom_event, recipient_data=None):
custom_event = custom_event.get_soap_object(self.client)
recipient_data = [rdata.get_soap_object(self.client) for rdata in recipient_data]
results = self.call('triggerCustomEvent', custom_event, recipient_data)
return [TriggerResult(result) for result in results]
|
jslang/responsys | responsys/client.py | InteractClient.truncate_table | python | def truncate_table(self, table):
table = table.get_soap_object(self.client)
return self.call('truncateTable', table) | Responsys.truncateTable call
Accepts:
InteractObject table
Returns True on success | train | https://github.com/jslang/responsys/blob/9b355a444c0c75dff41064502c1e2b76dfd5cb93/responsys/client.py#L341-L350 | [
"def call(self, method, *args):\n \"\"\" Calls the service method defined with the arguments provided \"\"\"\n try:\n response = getattr(self.client.service, method)(*args)\n except (URLError, SSLError) as e:\n log.exception('Failed to connect to responsys service')\n raise ConnectError(\"Request to service timed out\")\n except WebFault as web_fault:\n fault_name = getattr(web_fault.fault, 'faultstring', None)\n error = str(web_fault.fault.detail)\n\n if fault_name == 'TableFault':\n raise TableFault(error)\n if fault_name == 'ListFault':\n raise ListFault(error)\n if fault_name == 'API_LIMIT_EXCEEDED':\n raise ApiLimitError(error)\n if fault_name == 'AccountFault':\n raise AccountFault(error)\n\n raise ServiceError(web_fault.fault, web_fault.document)\n return response\n"
] | class InteractClient(object):
""" Interact Client Class
Provides access to the methods defined by the Responsys Interact API. Example setup:
>>> client = InteractClient(username, password, pod)
>>> client.connect()
>>> client.merge_list_members(interact_object, records, merge_rules)
>>> client.disconnect()
Using the client class as a context manager will automatically connect using the credentials
provided, and disconnect upon context exit:
>>> with InteractClient(username, password, pod) as client:
... client.merge_list_members(interact_object, records, merge_rules)
Since responsys limits the number of active sessions per account, this can help ensure you
don't leave unused connections open.
"""
DEFAULT_SESSION_LIFETIME = 60 * 10
WSDLS = {
'2': 'https://ws2.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'5': 'https://ws5.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'rtm4': 'https://rtm4.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
'rtm4b': 'https://rtm4b.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
}
ENDPOINTS = {
'2': 'https://ws2.responsys.net/webservices/services/ResponsysWSService',
'5': 'https://ws5.responsys.net/webservices/services/ResponsysWSService',
'rtm4': 'http://rtm4.responsys.net:80/tmws/services/TriggeredMessageWS',
'rtm4b': 'http://rtm4b.responsys.net:80/tmws/services/TriggeredMessageWS',
}
@property
def wsdl(self):
return self.WSDLS[self.pod]
@property
def endpoint(self):
return self.ENDPOINTS[self.pod]
@property
def client(self):
if self._client is None:
self._client = Client(self.wsdl, location=self.endpoint, timeout=self.timeout)
return self._client
@property
def connected(self):
return getattr(self, '_connected', False)
@connected.setter
def connected(self, value):
self._connected = value
@property
def session(self):
return getattr(self, '_session', None)
@session.setter
def session(self, session_id):
self._session = type(
'Session', (tuple,), {
'is_expired': property(lambda s: s[1] + self.session_lifetime <= time()),
})([session_id, time()])
session_header = self.client.factory.create('SessionHeader')
session_header.sessionId = session_id
self.client.set_options(soapheaders=session_header)
@session.deleter
def session(self):
self._session = None
self.client.set_options(soapheaders=())
def __init__(self, username, password, pod, client=None, session_lifetime=600, timeout=5):
self.username = username
self.password = password
self.pod = pod
self.session_lifetime = session_lifetime
self.timeout = timeout
self._client = client
def __enter__(self):
self.connect()
return self
def __exit__(self, type_, value, traceback):
self.disconnect()
def call(self, method, *args):
""" Calls the service method defined with the arguments provided """
try:
response = getattr(self.client.service, method)(*args)
except (URLError, SSLError) as e:
log.exception('Failed to connect to responsys service')
raise ConnectError("Request to service timed out")
except WebFault as web_fault:
fault_name = getattr(web_fault.fault, 'faultstring', None)
error = str(web_fault.fault.detail)
if fault_name == 'TableFault':
raise TableFault(error)
if fault_name == 'ListFault':
raise ListFault(error)
if fault_name == 'API_LIMIT_EXCEEDED':
raise ApiLimitError(error)
if fault_name == 'AccountFault':
raise AccountFault(error)
raise ServiceError(web_fault.fault, web_fault.document)
return response
def connect(self):
""" Connects to the Responsys soap service
Uses the credentials passed to the client init to login and setup the session id returned.
Returns True on successful connection, otherwise False.
"""
if self.session and self.session.is_expired:
# Close the session to avoid max concurrent session errors
self.disconnect(abandon_session=True)
if not self.session:
try:
login_result = self.login(self.username, self.password)
except AccountFault:
log.error('Login failed, invalid username or password')
raise
else:
self.session = login_result.session_id
self.connected = time()
return self.connected
def disconnect(self, abandon_session=False):
""" Disconnects from the Responsys soap service
Calls the service logout method and destroys the client's session information. Returns
True on success, False otherwise.
"""
self.connected = False
if (self.session and self.session.is_expired) or abandon_session:
try:
self.logout()
except:
log.warning(
'Logout call to responsys failed, session may have not been terminated',
exc_info=True
)
del self.session
return True
# Session Management Methods
def login(self, username, password):
""" Responsys.login soap call
Accepts username and password for authentication, returns a LoginResult object.
"""
return LoginResult(self.call('login', username, password))
def logout(self):
""" Responsys.logout soap call
Returns True on success, False otherwise.
"""
return self.call('logout')
def login_with_certificate(self, encrypted_server_challenge):
""" Responsys.loginWithCertificate soap call
Accepts encrypted_server_challenge for login. Returns LoginResult.
"""
return LoginResult(self.call('loginWithCertificate', encrypted_server_challenge))
def authenticate_server(self, username, client_challenge):
""" Responsys.authenticateServer soap call
Accepts username and client_challenge to authenciate. Returns ServerAuthResult.
"""
return ServerAuthResult(self.call('authenticateServer', username, client_challenge))
# List Management Methods
def merge_list_members(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembers call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a MergeResult
"""
list_ = list_.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
merge_rule = merge_rule.get_soap_object(self.client)
return MergeResult(self.call('mergeListMembers', list_, record_data, merge_rule))
def merge_list_members_RIID(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembersRIID call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a RecipientResult
"""
list_ = list_.get_soap_object(self.client)
result = self.call('mergeListMembersRIID', list_, record_data, merge_rule)
return RecipientResult(result.recipientResult)
def delete_list_members(self, list_, query_column, ids_to_delete):
""" Responsys.deleteListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
list_ = list_.get_soap_object(self.client)
result = self.call('deleteListMembers', list_, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_list_members(self, list_, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData instance
"""
list_ = list_.get_soap_object(self.client)
result = self.call('retrieveListMembers', list_, query_column, field_list, ids_to_retrieve)
return RecordData.from_soap_type(result.recordData)
# Table Management Methods
def create_table(self, table, fields):
""" Responsys.createTable call
Accepts:
InteractObject table
list fields
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTable', table, fields)
def create_table_with_pk(self, table, fields, primary_keys):
""" Responsys.createTableWithPK call
Accepts:
InteractObject table
list fields
list primary_keys
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTableWithPK', table, fields, primary_keys)
def delete_table(self, table):
""" Responsys.deleteTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('deleteTable', table)
def delete_profile_extension_members(self, profile_extension, query_column, ids_to_delete):
""" Responsys.deleteProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns list of DeleteResults
"""
profile_extension = profile_extension.get_soap_object(self.client)
result = self.call(
'deleteProfileExtensionMembers', profile_extension, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_profile_extension_records(self, profile_extension, field_list, ids_to_retrieve,
query_column='RIID'):
""" Responsys.retrieveProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns RecordData
"""
profile_extension = profile_extension.get_soap_object(self.client)
return RecordData.from_soap_type(
self.call('retrieveProfileExtensionRecords',
profile_extension, query_column, field_list, ids_to_retrieve))
def delete_table_records(self, table, query_column, ids_to_delete):
""" Responsys.deleteTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
table = table.get_soap_object(self.client)
result = self.call('deleteTableRecords', table, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def merge_table_records(self, table, record_data, match_column_names):
""" Responsys.mergeTableRecords call
Accepts:
InteractObject table
RecordData record_data
list match_column_names
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecords', table, record_data, match_column_names))
def merge_table_records_with_pk(self, table, record_data, insert_on_no_match, update_on_match):
""" Responsys.mergeTableRecordsWithPK call
Accepts:
InteractObject table
RecordData record_data
string insert_on_no_match
string update_on_match
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecordsWithPK', table, record_data, insert_on_no_match, update_on_match))
def merge_into_profile_extension(self, profile_extension, record_data, match_column,
insert_on_no_match, update_on_match):
""" Responsys.mergeIntoProfileExtension call
Accepts:
InteractObject profile_extension
RecordData record_data
string match_column
string insert_on_no_match
string update_on_match
Returns a RecipientResult
"""
profile_extension = profile_extension.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
results = self.call(
'mergeIntoProfileExtension', profile_extension, record_data, match_column,
insert_on_no_match, update_on_match)
return [RecipientResult(result) for result in results]
def retrieve_table_records(self, table, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData
"""
table = table.get_soap_object(self.client)
return RecordData.from_soap_type(self.call(
'retrieveTableRecords', table, query_column, field_list, ids_to_retrieve))
# Campaign Management Methods
# TODO: implement
# GetLaunchStatus
# LaunchCampaign
# MergeTriggerEmail
# ScheduleCampaignLaunch
# TriggerCampaignMessage
def trigger_custom_event(self, custom_event, recipient_data=None):
custom_event = custom_event.get_soap_object(self.client)
recipient_data = [rdata.get_soap_object(self.client) for rdata in recipient_data]
results = self.call('triggerCustomEvent', custom_event, recipient_data)
return [TriggerResult(result) for result in results]
|
jslang/responsys | responsys/client.py | InteractClient.delete_table_records | python | def delete_table_records(self, table, query_column, ids_to_delete):
table = table.get_soap_object(self.client)
result = self.call('deleteTableRecords', table, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)] | Responsys.deleteTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances | train | https://github.com/jslang/responsys/blob/9b355a444c0c75dff41064502c1e2b76dfd5cb93/responsys/client.py#L352-L367 | [
"def call(self, method, *args):\n \"\"\" Calls the service method defined with the arguments provided \"\"\"\n try:\n response = getattr(self.client.service, method)(*args)\n except (URLError, SSLError) as e:\n log.exception('Failed to connect to responsys service')\n raise ConnectError(\"Request to service timed out\")\n except WebFault as web_fault:\n fault_name = getattr(web_fault.fault, 'faultstring', None)\n error = str(web_fault.fault.detail)\n\n if fault_name == 'TableFault':\n raise TableFault(error)\n if fault_name == 'ListFault':\n raise ListFault(error)\n if fault_name == 'API_LIMIT_EXCEEDED':\n raise ApiLimitError(error)\n if fault_name == 'AccountFault':\n raise AccountFault(error)\n\n raise ServiceError(web_fault.fault, web_fault.document)\n return response\n"
] | class InteractClient(object):
""" Interact Client Class
Provides access to the methods defined by the Responsys Interact API. Example setup:
>>> client = InteractClient(username, password, pod)
>>> client.connect()
>>> client.merge_list_members(interact_object, records, merge_rules)
>>> client.disconnect()
Using the client class as a context manager will automatically connect using the credentials
provided, and disconnect upon context exit:
>>> with InteractClient(username, password, pod) as client:
... client.merge_list_members(interact_object, records, merge_rules)
Since responsys limits the number of active sessions per account, this can help ensure you
don't leave unused connections open.
"""
DEFAULT_SESSION_LIFETIME = 60 * 10
WSDLS = {
'2': 'https://ws2.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'5': 'https://ws5.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'rtm4': 'https://rtm4.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
'rtm4b': 'https://rtm4b.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
}
ENDPOINTS = {
'2': 'https://ws2.responsys.net/webservices/services/ResponsysWSService',
'5': 'https://ws5.responsys.net/webservices/services/ResponsysWSService',
'rtm4': 'http://rtm4.responsys.net:80/tmws/services/TriggeredMessageWS',
'rtm4b': 'http://rtm4b.responsys.net:80/tmws/services/TriggeredMessageWS',
}
@property
def wsdl(self):
return self.WSDLS[self.pod]
@property
def endpoint(self):
return self.ENDPOINTS[self.pod]
@property
def client(self):
if self._client is None:
self._client = Client(self.wsdl, location=self.endpoint, timeout=self.timeout)
return self._client
@property
def connected(self):
return getattr(self, '_connected', False)
@connected.setter
def connected(self, value):
self._connected = value
@property
def session(self):
return getattr(self, '_session', None)
@session.setter
def session(self, session_id):
self._session = type(
'Session', (tuple,), {
'is_expired': property(lambda s: s[1] + self.session_lifetime <= time()),
})([session_id, time()])
session_header = self.client.factory.create('SessionHeader')
session_header.sessionId = session_id
self.client.set_options(soapheaders=session_header)
@session.deleter
def session(self):
self._session = None
self.client.set_options(soapheaders=())
def __init__(self, username, password, pod, client=None, session_lifetime=600, timeout=5):
self.username = username
self.password = password
self.pod = pod
self.session_lifetime = session_lifetime
self.timeout = timeout
self._client = client
def __enter__(self):
self.connect()
return self
def __exit__(self, type_, value, traceback):
self.disconnect()
def call(self, method, *args):
""" Calls the service method defined with the arguments provided """
try:
response = getattr(self.client.service, method)(*args)
except (URLError, SSLError) as e:
log.exception('Failed to connect to responsys service')
raise ConnectError("Request to service timed out")
except WebFault as web_fault:
fault_name = getattr(web_fault.fault, 'faultstring', None)
error = str(web_fault.fault.detail)
if fault_name == 'TableFault':
raise TableFault(error)
if fault_name == 'ListFault':
raise ListFault(error)
if fault_name == 'API_LIMIT_EXCEEDED':
raise ApiLimitError(error)
if fault_name == 'AccountFault':
raise AccountFault(error)
raise ServiceError(web_fault.fault, web_fault.document)
return response
def connect(self):
""" Connects to the Responsys soap service
Uses the credentials passed to the client init to login and setup the session id returned.
Returns True on successful connection, otherwise False.
"""
if self.session and self.session.is_expired:
# Close the session to avoid max concurrent session errors
self.disconnect(abandon_session=True)
if not self.session:
try:
login_result = self.login(self.username, self.password)
except AccountFault:
log.error('Login failed, invalid username or password')
raise
else:
self.session = login_result.session_id
self.connected = time()
return self.connected
def disconnect(self, abandon_session=False):
""" Disconnects from the Responsys soap service
Calls the service logout method and destroys the client's session information. Returns
True on success, False otherwise.
"""
self.connected = False
if (self.session and self.session.is_expired) or abandon_session:
try:
self.logout()
except:
log.warning(
'Logout call to responsys failed, session may have not been terminated',
exc_info=True
)
del self.session
return True
# Session Management Methods
def login(self, username, password):
""" Responsys.login soap call
Accepts username and password for authentication, returns a LoginResult object.
"""
return LoginResult(self.call('login', username, password))
def logout(self):
""" Responsys.logout soap call
Returns True on success, False otherwise.
"""
return self.call('logout')
def login_with_certificate(self, encrypted_server_challenge):
""" Responsys.loginWithCertificate soap call
Accepts encrypted_server_challenge for login. Returns LoginResult.
"""
return LoginResult(self.call('loginWithCertificate', encrypted_server_challenge))
def authenticate_server(self, username, client_challenge):
""" Responsys.authenticateServer soap call
Accepts username and client_challenge to authenciate. Returns ServerAuthResult.
"""
return ServerAuthResult(self.call('authenticateServer', username, client_challenge))
# List Management Methods
def merge_list_members(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembers call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a MergeResult
"""
list_ = list_.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
merge_rule = merge_rule.get_soap_object(self.client)
return MergeResult(self.call('mergeListMembers', list_, record_data, merge_rule))
def merge_list_members_RIID(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembersRIID call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a RecipientResult
"""
list_ = list_.get_soap_object(self.client)
result = self.call('mergeListMembersRIID', list_, record_data, merge_rule)
return RecipientResult(result.recipientResult)
def delete_list_members(self, list_, query_column, ids_to_delete):
""" Responsys.deleteListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
list_ = list_.get_soap_object(self.client)
result = self.call('deleteListMembers', list_, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_list_members(self, list_, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData instance
"""
list_ = list_.get_soap_object(self.client)
result = self.call('retrieveListMembers', list_, query_column, field_list, ids_to_retrieve)
return RecordData.from_soap_type(result.recordData)
# Table Management Methods
def create_table(self, table, fields):
""" Responsys.createTable call
Accepts:
InteractObject table
list fields
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTable', table, fields)
def create_table_with_pk(self, table, fields, primary_keys):
""" Responsys.createTableWithPK call
Accepts:
InteractObject table
list fields
list primary_keys
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTableWithPK', table, fields, primary_keys)
def delete_table(self, table):
""" Responsys.deleteTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('deleteTable', table)
def delete_profile_extension_members(self, profile_extension, query_column, ids_to_delete):
""" Responsys.deleteProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns list of DeleteResults
"""
profile_extension = profile_extension.get_soap_object(self.client)
result = self.call(
'deleteProfileExtensionMembers', profile_extension, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_profile_extension_records(self, profile_extension, field_list, ids_to_retrieve,
query_column='RIID'):
""" Responsys.retrieveProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns RecordData
"""
profile_extension = profile_extension.get_soap_object(self.client)
return RecordData.from_soap_type(
self.call('retrieveProfileExtensionRecords',
profile_extension, query_column, field_list, ids_to_retrieve))
def truncate_table(self, table):
""" Responsys.truncateTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('truncateTable', table)
def merge_table_records(self, table, record_data, match_column_names):
""" Responsys.mergeTableRecords call
Accepts:
InteractObject table
RecordData record_data
list match_column_names
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecords', table, record_data, match_column_names))
def merge_table_records_with_pk(self, table, record_data, insert_on_no_match, update_on_match):
""" Responsys.mergeTableRecordsWithPK call
Accepts:
InteractObject table
RecordData record_data
string insert_on_no_match
string update_on_match
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecordsWithPK', table, record_data, insert_on_no_match, update_on_match))
def merge_into_profile_extension(self, profile_extension, record_data, match_column,
insert_on_no_match, update_on_match):
""" Responsys.mergeIntoProfileExtension call
Accepts:
InteractObject profile_extension
RecordData record_data
string match_column
string insert_on_no_match
string update_on_match
Returns a RecipientResult
"""
profile_extension = profile_extension.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
results = self.call(
'mergeIntoProfileExtension', profile_extension, record_data, match_column,
insert_on_no_match, update_on_match)
return [RecipientResult(result) for result in results]
def retrieve_table_records(self, table, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData
"""
table = table.get_soap_object(self.client)
return RecordData.from_soap_type(self.call(
'retrieveTableRecords', table, query_column, field_list, ids_to_retrieve))
# Campaign Management Methods
# TODO: implement
# GetLaunchStatus
# LaunchCampaign
# MergeTriggerEmail
# ScheduleCampaignLaunch
# TriggerCampaignMessage
def trigger_custom_event(self, custom_event, recipient_data=None):
custom_event = custom_event.get_soap_object(self.client)
recipient_data = [rdata.get_soap_object(self.client) for rdata in recipient_data]
results = self.call('triggerCustomEvent', custom_event, recipient_data)
return [TriggerResult(result) for result in results]
|
jslang/responsys | responsys/client.py | InteractClient.merge_table_records | python | def merge_table_records(self, table, record_data, match_column_names):
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecords', table, record_data, match_column_names)) | Responsys.mergeTableRecords call
Accepts:
InteractObject table
RecordData record_data
list match_column_names
Returns a MergeResult | train | https://github.com/jslang/responsys/blob/9b355a444c0c75dff41064502c1e2b76dfd5cb93/responsys/client.py#L369-L382 | [
"def call(self, method, *args):\n \"\"\" Calls the service method defined with the arguments provided \"\"\"\n try:\n response = getattr(self.client.service, method)(*args)\n except (URLError, SSLError) as e:\n log.exception('Failed to connect to responsys service')\n raise ConnectError(\"Request to service timed out\")\n except WebFault as web_fault:\n fault_name = getattr(web_fault.fault, 'faultstring', None)\n error = str(web_fault.fault.detail)\n\n if fault_name == 'TableFault':\n raise TableFault(error)\n if fault_name == 'ListFault':\n raise ListFault(error)\n if fault_name == 'API_LIMIT_EXCEEDED':\n raise ApiLimitError(error)\n if fault_name == 'AccountFault':\n raise AccountFault(error)\n\n raise ServiceError(web_fault.fault, web_fault.document)\n return response\n"
] | class InteractClient(object):
""" Interact Client Class
Provides access to the methods defined by the Responsys Interact API. Example setup:
>>> client = InteractClient(username, password, pod)
>>> client.connect()
>>> client.merge_list_members(interact_object, records, merge_rules)
>>> client.disconnect()
Using the client class as a context manager will automatically connect using the credentials
provided, and disconnect upon context exit:
>>> with InteractClient(username, password, pod) as client:
... client.merge_list_members(interact_object, records, merge_rules)
Since responsys limits the number of active sessions per account, this can help ensure you
don't leave unused connections open.
"""
DEFAULT_SESSION_LIFETIME = 60 * 10
WSDLS = {
'2': 'https://ws2.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'5': 'https://ws5.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'rtm4': 'https://rtm4.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
'rtm4b': 'https://rtm4b.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
}
ENDPOINTS = {
'2': 'https://ws2.responsys.net/webservices/services/ResponsysWSService',
'5': 'https://ws5.responsys.net/webservices/services/ResponsysWSService',
'rtm4': 'http://rtm4.responsys.net:80/tmws/services/TriggeredMessageWS',
'rtm4b': 'http://rtm4b.responsys.net:80/tmws/services/TriggeredMessageWS',
}
@property
def wsdl(self):
return self.WSDLS[self.pod]
@property
def endpoint(self):
return self.ENDPOINTS[self.pod]
@property
def client(self):
if self._client is None:
self._client = Client(self.wsdl, location=self.endpoint, timeout=self.timeout)
return self._client
@property
def connected(self):
return getattr(self, '_connected', False)
@connected.setter
def connected(self, value):
self._connected = value
@property
def session(self):
return getattr(self, '_session', None)
@session.setter
def session(self, session_id):
self._session = type(
'Session', (tuple,), {
'is_expired': property(lambda s: s[1] + self.session_lifetime <= time()),
})([session_id, time()])
session_header = self.client.factory.create('SessionHeader')
session_header.sessionId = session_id
self.client.set_options(soapheaders=session_header)
@session.deleter
def session(self):
self._session = None
self.client.set_options(soapheaders=())
def __init__(self, username, password, pod, client=None, session_lifetime=600, timeout=5):
self.username = username
self.password = password
self.pod = pod
self.session_lifetime = session_lifetime
self.timeout = timeout
self._client = client
def __enter__(self):
self.connect()
return self
def __exit__(self, type_, value, traceback):
self.disconnect()
def call(self, method, *args):
""" Calls the service method defined with the arguments provided """
try:
response = getattr(self.client.service, method)(*args)
except (URLError, SSLError) as e:
log.exception('Failed to connect to responsys service')
raise ConnectError("Request to service timed out")
except WebFault as web_fault:
fault_name = getattr(web_fault.fault, 'faultstring', None)
error = str(web_fault.fault.detail)
if fault_name == 'TableFault':
raise TableFault(error)
if fault_name == 'ListFault':
raise ListFault(error)
if fault_name == 'API_LIMIT_EXCEEDED':
raise ApiLimitError(error)
if fault_name == 'AccountFault':
raise AccountFault(error)
raise ServiceError(web_fault.fault, web_fault.document)
return response
def connect(self):
""" Connects to the Responsys soap service
Uses the credentials passed to the client init to login and setup the session id returned.
Returns True on successful connection, otherwise False.
"""
if self.session and self.session.is_expired:
# Close the session to avoid max concurrent session errors
self.disconnect(abandon_session=True)
if not self.session:
try:
login_result = self.login(self.username, self.password)
except AccountFault:
log.error('Login failed, invalid username or password')
raise
else:
self.session = login_result.session_id
self.connected = time()
return self.connected
def disconnect(self, abandon_session=False):
""" Disconnects from the Responsys soap service
Calls the service logout method and destroys the client's session information. Returns
True on success, False otherwise.
"""
self.connected = False
if (self.session and self.session.is_expired) or abandon_session:
try:
self.logout()
except:
log.warning(
'Logout call to responsys failed, session may have not been terminated',
exc_info=True
)
del self.session
return True
# Session Management Methods
def login(self, username, password):
""" Responsys.login soap call
Accepts username and password for authentication, returns a LoginResult object.
"""
return LoginResult(self.call('login', username, password))
def logout(self):
""" Responsys.logout soap call
Returns True on success, False otherwise.
"""
return self.call('logout')
def login_with_certificate(self, encrypted_server_challenge):
""" Responsys.loginWithCertificate soap call
Accepts encrypted_server_challenge for login. Returns LoginResult.
"""
return LoginResult(self.call('loginWithCertificate', encrypted_server_challenge))
def authenticate_server(self, username, client_challenge):
""" Responsys.authenticateServer soap call
Accepts username and client_challenge to authenciate. Returns ServerAuthResult.
"""
return ServerAuthResult(self.call('authenticateServer', username, client_challenge))
# List Management Methods
def merge_list_members(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembers call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a MergeResult
"""
list_ = list_.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
merge_rule = merge_rule.get_soap_object(self.client)
return MergeResult(self.call('mergeListMembers', list_, record_data, merge_rule))
def merge_list_members_RIID(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembersRIID call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a RecipientResult
"""
list_ = list_.get_soap_object(self.client)
result = self.call('mergeListMembersRIID', list_, record_data, merge_rule)
return RecipientResult(result.recipientResult)
def delete_list_members(self, list_, query_column, ids_to_delete):
""" Responsys.deleteListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
list_ = list_.get_soap_object(self.client)
result = self.call('deleteListMembers', list_, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_list_members(self, list_, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData instance
"""
list_ = list_.get_soap_object(self.client)
result = self.call('retrieveListMembers', list_, query_column, field_list, ids_to_retrieve)
return RecordData.from_soap_type(result.recordData)
# Table Management Methods
def create_table(self, table, fields):
""" Responsys.createTable call
Accepts:
InteractObject table
list fields
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTable', table, fields)
def create_table_with_pk(self, table, fields, primary_keys):
""" Responsys.createTableWithPK call
Accepts:
InteractObject table
list fields
list primary_keys
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTableWithPK', table, fields, primary_keys)
def delete_table(self, table):
""" Responsys.deleteTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('deleteTable', table)
def delete_profile_extension_members(self, profile_extension, query_column, ids_to_delete):
""" Responsys.deleteProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns list of DeleteResults
"""
profile_extension = profile_extension.get_soap_object(self.client)
result = self.call(
'deleteProfileExtensionMembers', profile_extension, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_profile_extension_records(self, profile_extension, field_list, ids_to_retrieve,
query_column='RIID'):
""" Responsys.retrieveProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns RecordData
"""
profile_extension = profile_extension.get_soap_object(self.client)
return RecordData.from_soap_type(
self.call('retrieveProfileExtensionRecords',
profile_extension, query_column, field_list, ids_to_retrieve))
def truncate_table(self, table):
""" Responsys.truncateTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('truncateTable', table)
def delete_table_records(self, table, query_column, ids_to_delete):
""" Responsys.deleteTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
table = table.get_soap_object(self.client)
result = self.call('deleteTableRecords', table, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def merge_table_records_with_pk(self, table, record_data, insert_on_no_match, update_on_match):
""" Responsys.mergeTableRecordsWithPK call
Accepts:
InteractObject table
RecordData record_data
string insert_on_no_match
string update_on_match
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecordsWithPK', table, record_data, insert_on_no_match, update_on_match))
def merge_into_profile_extension(self, profile_extension, record_data, match_column,
insert_on_no_match, update_on_match):
""" Responsys.mergeIntoProfileExtension call
Accepts:
InteractObject profile_extension
RecordData record_data
string match_column
string insert_on_no_match
string update_on_match
Returns a RecipientResult
"""
profile_extension = profile_extension.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
results = self.call(
'mergeIntoProfileExtension', profile_extension, record_data, match_column,
insert_on_no_match, update_on_match)
return [RecipientResult(result) for result in results]
def retrieve_table_records(self, table, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData
"""
table = table.get_soap_object(self.client)
return RecordData.from_soap_type(self.call(
'retrieveTableRecords', table, query_column, field_list, ids_to_retrieve))
# Campaign Management Methods
# TODO: implement
# GetLaunchStatus
# LaunchCampaign
# MergeTriggerEmail
# ScheduleCampaignLaunch
# TriggerCampaignMessage
def trigger_custom_event(self, custom_event, recipient_data=None):
custom_event = custom_event.get_soap_object(self.client)
recipient_data = [rdata.get_soap_object(self.client) for rdata in recipient_data]
results = self.call('triggerCustomEvent', custom_event, recipient_data)
return [TriggerResult(result) for result in results]
|
jslang/responsys | responsys/client.py | InteractClient.merge_table_records_with_pk | python | def merge_table_records_with_pk(self, table, record_data, insert_on_no_match, update_on_match):
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecordsWithPK', table, record_data, insert_on_no_match, update_on_match)) | Responsys.mergeTableRecordsWithPK call
Accepts:
InteractObject table
RecordData record_data
string insert_on_no_match
string update_on_match
Returns a MergeResult | train | https://github.com/jslang/responsys/blob/9b355a444c0c75dff41064502c1e2b76dfd5cb93/responsys/client.py#L384-L398 | [
"def call(self, method, *args):\n \"\"\" Calls the service method defined with the arguments provided \"\"\"\n try:\n response = getattr(self.client.service, method)(*args)\n except (URLError, SSLError) as e:\n log.exception('Failed to connect to responsys service')\n raise ConnectError(\"Request to service timed out\")\n except WebFault as web_fault:\n fault_name = getattr(web_fault.fault, 'faultstring', None)\n error = str(web_fault.fault.detail)\n\n if fault_name == 'TableFault':\n raise TableFault(error)\n if fault_name == 'ListFault':\n raise ListFault(error)\n if fault_name == 'API_LIMIT_EXCEEDED':\n raise ApiLimitError(error)\n if fault_name == 'AccountFault':\n raise AccountFault(error)\n\n raise ServiceError(web_fault.fault, web_fault.document)\n return response\n"
] | class InteractClient(object):
""" Interact Client Class
Provides access to the methods defined by the Responsys Interact API. Example setup:
>>> client = InteractClient(username, password, pod)
>>> client.connect()
>>> client.merge_list_members(interact_object, records, merge_rules)
>>> client.disconnect()
Using the client class as a context manager will automatically connect using the credentials
provided, and disconnect upon context exit:
>>> with InteractClient(username, password, pod) as client:
... client.merge_list_members(interact_object, records, merge_rules)
Since responsys limits the number of active sessions per account, this can help ensure you
don't leave unused connections open.
"""
DEFAULT_SESSION_LIFETIME = 60 * 10
WSDLS = {
'2': 'https://ws2.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'5': 'https://ws5.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'rtm4': 'https://rtm4.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
'rtm4b': 'https://rtm4b.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
}
ENDPOINTS = {
'2': 'https://ws2.responsys.net/webservices/services/ResponsysWSService',
'5': 'https://ws5.responsys.net/webservices/services/ResponsysWSService',
'rtm4': 'http://rtm4.responsys.net:80/tmws/services/TriggeredMessageWS',
'rtm4b': 'http://rtm4b.responsys.net:80/tmws/services/TriggeredMessageWS',
}
@property
def wsdl(self):
return self.WSDLS[self.pod]
@property
def endpoint(self):
return self.ENDPOINTS[self.pod]
@property
def client(self):
if self._client is None:
self._client = Client(self.wsdl, location=self.endpoint, timeout=self.timeout)
return self._client
@property
def connected(self):
return getattr(self, '_connected', False)
@connected.setter
def connected(self, value):
self._connected = value
@property
def session(self):
return getattr(self, '_session', None)
@session.setter
def session(self, session_id):
self._session = type(
'Session', (tuple,), {
'is_expired': property(lambda s: s[1] + self.session_lifetime <= time()),
})([session_id, time()])
session_header = self.client.factory.create('SessionHeader')
session_header.sessionId = session_id
self.client.set_options(soapheaders=session_header)
@session.deleter
def session(self):
self._session = None
self.client.set_options(soapheaders=())
def __init__(self, username, password, pod, client=None, session_lifetime=600, timeout=5):
self.username = username
self.password = password
self.pod = pod
self.session_lifetime = session_lifetime
self.timeout = timeout
self._client = client
def __enter__(self):
self.connect()
return self
def __exit__(self, type_, value, traceback):
self.disconnect()
def call(self, method, *args):
""" Calls the service method defined with the arguments provided """
try:
response = getattr(self.client.service, method)(*args)
except (URLError, SSLError) as e:
log.exception('Failed to connect to responsys service')
raise ConnectError("Request to service timed out")
except WebFault as web_fault:
fault_name = getattr(web_fault.fault, 'faultstring', None)
error = str(web_fault.fault.detail)
if fault_name == 'TableFault':
raise TableFault(error)
if fault_name == 'ListFault':
raise ListFault(error)
if fault_name == 'API_LIMIT_EXCEEDED':
raise ApiLimitError(error)
if fault_name == 'AccountFault':
raise AccountFault(error)
raise ServiceError(web_fault.fault, web_fault.document)
return response
def connect(self):
""" Connects to the Responsys soap service
Uses the credentials passed to the client init to login and setup the session id returned.
Returns True on successful connection, otherwise False.
"""
if self.session and self.session.is_expired:
# Close the session to avoid max concurrent session errors
self.disconnect(abandon_session=True)
if not self.session:
try:
login_result = self.login(self.username, self.password)
except AccountFault:
log.error('Login failed, invalid username or password')
raise
else:
self.session = login_result.session_id
self.connected = time()
return self.connected
def disconnect(self, abandon_session=False):
""" Disconnects from the Responsys soap service
Calls the service logout method and destroys the client's session information. Returns
True on success, False otherwise.
"""
self.connected = False
if (self.session and self.session.is_expired) or abandon_session:
try:
self.logout()
except:
log.warning(
'Logout call to responsys failed, session may have not been terminated',
exc_info=True
)
del self.session
return True
# Session Management Methods
def login(self, username, password):
""" Responsys.login soap call
Accepts username and password for authentication, returns a LoginResult object.
"""
return LoginResult(self.call('login', username, password))
def logout(self):
""" Responsys.logout soap call
Returns True on success, False otherwise.
"""
return self.call('logout')
def login_with_certificate(self, encrypted_server_challenge):
""" Responsys.loginWithCertificate soap call
Accepts encrypted_server_challenge for login. Returns LoginResult.
"""
return LoginResult(self.call('loginWithCertificate', encrypted_server_challenge))
def authenticate_server(self, username, client_challenge):
""" Responsys.authenticateServer soap call
Accepts username and client_challenge to authenciate. Returns ServerAuthResult.
"""
return ServerAuthResult(self.call('authenticateServer', username, client_challenge))
# List Management Methods
def merge_list_members(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembers call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a MergeResult
"""
list_ = list_.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
merge_rule = merge_rule.get_soap_object(self.client)
return MergeResult(self.call('mergeListMembers', list_, record_data, merge_rule))
def merge_list_members_RIID(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembersRIID call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a RecipientResult
"""
list_ = list_.get_soap_object(self.client)
result = self.call('mergeListMembersRIID', list_, record_data, merge_rule)
return RecipientResult(result.recipientResult)
def delete_list_members(self, list_, query_column, ids_to_delete):
""" Responsys.deleteListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
list_ = list_.get_soap_object(self.client)
result = self.call('deleteListMembers', list_, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_list_members(self, list_, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData instance
"""
list_ = list_.get_soap_object(self.client)
result = self.call('retrieveListMembers', list_, query_column, field_list, ids_to_retrieve)
return RecordData.from_soap_type(result.recordData)
# Table Management Methods
def create_table(self, table, fields):
""" Responsys.createTable call
Accepts:
InteractObject table
list fields
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTable', table, fields)
def create_table_with_pk(self, table, fields, primary_keys):
""" Responsys.createTableWithPK call
Accepts:
InteractObject table
list fields
list primary_keys
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTableWithPK', table, fields, primary_keys)
def delete_table(self, table):
""" Responsys.deleteTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('deleteTable', table)
def delete_profile_extension_members(self, profile_extension, query_column, ids_to_delete):
""" Responsys.deleteProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns list of DeleteResults
"""
profile_extension = profile_extension.get_soap_object(self.client)
result = self.call(
'deleteProfileExtensionMembers', profile_extension, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_profile_extension_records(self, profile_extension, field_list, ids_to_retrieve,
query_column='RIID'):
""" Responsys.retrieveProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns RecordData
"""
profile_extension = profile_extension.get_soap_object(self.client)
return RecordData.from_soap_type(
self.call('retrieveProfileExtensionRecords',
profile_extension, query_column, field_list, ids_to_retrieve))
def truncate_table(self, table):
""" Responsys.truncateTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('truncateTable', table)
def delete_table_records(self, table, query_column, ids_to_delete):
""" Responsys.deleteTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
table = table.get_soap_object(self.client)
result = self.call('deleteTableRecords', table, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def merge_table_records(self, table, record_data, match_column_names):
""" Responsys.mergeTableRecords call
Accepts:
InteractObject table
RecordData record_data
list match_column_names
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecords', table, record_data, match_column_names))
def merge_into_profile_extension(self, profile_extension, record_data, match_column,
insert_on_no_match, update_on_match):
""" Responsys.mergeIntoProfileExtension call
Accepts:
InteractObject profile_extension
RecordData record_data
string match_column
string insert_on_no_match
string update_on_match
Returns a RecipientResult
"""
profile_extension = profile_extension.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
results = self.call(
'mergeIntoProfileExtension', profile_extension, record_data, match_column,
insert_on_no_match, update_on_match)
return [RecipientResult(result) for result in results]
def retrieve_table_records(self, table, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData
"""
table = table.get_soap_object(self.client)
return RecordData.from_soap_type(self.call(
'retrieveTableRecords', table, query_column, field_list, ids_to_retrieve))
# Campaign Management Methods
# TODO: implement
# GetLaunchStatus
# LaunchCampaign
# MergeTriggerEmail
# ScheduleCampaignLaunch
# TriggerCampaignMessage
def trigger_custom_event(self, custom_event, recipient_data=None):
custom_event = custom_event.get_soap_object(self.client)
recipient_data = [rdata.get_soap_object(self.client) for rdata in recipient_data]
results = self.call('triggerCustomEvent', custom_event, recipient_data)
return [TriggerResult(result) for result in results]
|
jslang/responsys | responsys/client.py | InteractClient.merge_into_profile_extension | python | def merge_into_profile_extension(self, profile_extension, record_data, match_column,
insert_on_no_match, update_on_match):
profile_extension = profile_extension.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
results = self.call(
'mergeIntoProfileExtension', profile_extension, record_data, match_column,
insert_on_no_match, update_on_match)
return [RecipientResult(result) for result in results] | Responsys.mergeIntoProfileExtension call
Accepts:
InteractObject profile_extension
RecordData record_data
string match_column
string insert_on_no_match
string update_on_match
Returns a RecipientResult | train | https://github.com/jslang/responsys/blob/9b355a444c0c75dff41064502c1e2b76dfd5cb93/responsys/client.py#L400-L418 | [
"def call(self, method, *args):\n \"\"\" Calls the service method defined with the arguments provided \"\"\"\n try:\n response = getattr(self.client.service, method)(*args)\n except (URLError, SSLError) as e:\n log.exception('Failed to connect to responsys service')\n raise ConnectError(\"Request to service timed out\")\n except WebFault as web_fault:\n fault_name = getattr(web_fault.fault, 'faultstring', None)\n error = str(web_fault.fault.detail)\n\n if fault_name == 'TableFault':\n raise TableFault(error)\n if fault_name == 'ListFault':\n raise ListFault(error)\n if fault_name == 'API_LIMIT_EXCEEDED':\n raise ApiLimitError(error)\n if fault_name == 'AccountFault':\n raise AccountFault(error)\n\n raise ServiceError(web_fault.fault, web_fault.document)\n return response\n"
] | class InteractClient(object):
""" Interact Client Class
Provides access to the methods defined by the Responsys Interact API. Example setup:
>>> client = InteractClient(username, password, pod)
>>> client.connect()
>>> client.merge_list_members(interact_object, records, merge_rules)
>>> client.disconnect()
Using the client class as a context manager will automatically connect using the credentials
provided, and disconnect upon context exit:
>>> with InteractClient(username, password, pod) as client:
... client.merge_list_members(interact_object, records, merge_rules)
Since responsys limits the number of active sessions per account, this can help ensure you
don't leave unused connections open.
"""
DEFAULT_SESSION_LIFETIME = 60 * 10
WSDLS = {
'2': 'https://ws2.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'5': 'https://ws5.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'rtm4': 'https://rtm4.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
'rtm4b': 'https://rtm4b.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
}
ENDPOINTS = {
'2': 'https://ws2.responsys.net/webservices/services/ResponsysWSService',
'5': 'https://ws5.responsys.net/webservices/services/ResponsysWSService',
'rtm4': 'http://rtm4.responsys.net:80/tmws/services/TriggeredMessageWS',
'rtm4b': 'http://rtm4b.responsys.net:80/tmws/services/TriggeredMessageWS',
}
@property
def wsdl(self):
return self.WSDLS[self.pod]
@property
def endpoint(self):
return self.ENDPOINTS[self.pod]
@property
def client(self):
if self._client is None:
self._client = Client(self.wsdl, location=self.endpoint, timeout=self.timeout)
return self._client
@property
def connected(self):
return getattr(self, '_connected', False)
@connected.setter
def connected(self, value):
self._connected = value
@property
def session(self):
return getattr(self, '_session', None)
@session.setter
def session(self, session_id):
self._session = type(
'Session', (tuple,), {
'is_expired': property(lambda s: s[1] + self.session_lifetime <= time()),
})([session_id, time()])
session_header = self.client.factory.create('SessionHeader')
session_header.sessionId = session_id
self.client.set_options(soapheaders=session_header)
@session.deleter
def session(self):
self._session = None
self.client.set_options(soapheaders=())
def __init__(self, username, password, pod, client=None, session_lifetime=600, timeout=5):
self.username = username
self.password = password
self.pod = pod
self.session_lifetime = session_lifetime
self.timeout = timeout
self._client = client
def __enter__(self):
self.connect()
return self
def __exit__(self, type_, value, traceback):
self.disconnect()
def call(self, method, *args):
""" Calls the service method defined with the arguments provided """
try:
response = getattr(self.client.service, method)(*args)
except (URLError, SSLError) as e:
log.exception('Failed to connect to responsys service')
raise ConnectError("Request to service timed out")
except WebFault as web_fault:
fault_name = getattr(web_fault.fault, 'faultstring', None)
error = str(web_fault.fault.detail)
if fault_name == 'TableFault':
raise TableFault(error)
if fault_name == 'ListFault':
raise ListFault(error)
if fault_name == 'API_LIMIT_EXCEEDED':
raise ApiLimitError(error)
if fault_name == 'AccountFault':
raise AccountFault(error)
raise ServiceError(web_fault.fault, web_fault.document)
return response
def connect(self):
""" Connects to the Responsys soap service
Uses the credentials passed to the client init to login and setup the session id returned.
Returns True on successful connection, otherwise False.
"""
if self.session and self.session.is_expired:
# Close the session to avoid max concurrent session errors
self.disconnect(abandon_session=True)
if not self.session:
try:
login_result = self.login(self.username, self.password)
except AccountFault:
log.error('Login failed, invalid username or password')
raise
else:
self.session = login_result.session_id
self.connected = time()
return self.connected
def disconnect(self, abandon_session=False):
""" Disconnects from the Responsys soap service
Calls the service logout method and destroys the client's session information. Returns
True on success, False otherwise.
"""
self.connected = False
if (self.session and self.session.is_expired) or abandon_session:
try:
self.logout()
except:
log.warning(
'Logout call to responsys failed, session may have not been terminated',
exc_info=True
)
del self.session
return True
# Session Management Methods
def login(self, username, password):
""" Responsys.login soap call
Accepts username and password for authentication, returns a LoginResult object.
"""
return LoginResult(self.call('login', username, password))
def logout(self):
""" Responsys.logout soap call
Returns True on success, False otherwise.
"""
return self.call('logout')
def login_with_certificate(self, encrypted_server_challenge):
""" Responsys.loginWithCertificate soap call
Accepts encrypted_server_challenge for login. Returns LoginResult.
"""
return LoginResult(self.call('loginWithCertificate', encrypted_server_challenge))
def authenticate_server(self, username, client_challenge):
""" Responsys.authenticateServer soap call
Accepts username and client_challenge to authenciate. Returns ServerAuthResult.
"""
return ServerAuthResult(self.call('authenticateServer', username, client_challenge))
# List Management Methods
def merge_list_members(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembers call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a MergeResult
"""
list_ = list_.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
merge_rule = merge_rule.get_soap_object(self.client)
return MergeResult(self.call('mergeListMembers', list_, record_data, merge_rule))
def merge_list_members_RIID(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembersRIID call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a RecipientResult
"""
list_ = list_.get_soap_object(self.client)
result = self.call('mergeListMembersRIID', list_, record_data, merge_rule)
return RecipientResult(result.recipientResult)
def delete_list_members(self, list_, query_column, ids_to_delete):
""" Responsys.deleteListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
list_ = list_.get_soap_object(self.client)
result = self.call('deleteListMembers', list_, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_list_members(self, list_, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData instance
"""
list_ = list_.get_soap_object(self.client)
result = self.call('retrieveListMembers', list_, query_column, field_list, ids_to_retrieve)
return RecordData.from_soap_type(result.recordData)
# Table Management Methods
def create_table(self, table, fields):
""" Responsys.createTable call
Accepts:
InteractObject table
list fields
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTable', table, fields)
def create_table_with_pk(self, table, fields, primary_keys):
""" Responsys.createTableWithPK call
Accepts:
InteractObject table
list fields
list primary_keys
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTableWithPK', table, fields, primary_keys)
def delete_table(self, table):
""" Responsys.deleteTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('deleteTable', table)
def delete_profile_extension_members(self, profile_extension, query_column, ids_to_delete):
""" Responsys.deleteProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns list of DeleteResults
"""
profile_extension = profile_extension.get_soap_object(self.client)
result = self.call(
'deleteProfileExtensionMembers', profile_extension, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_profile_extension_records(self, profile_extension, field_list, ids_to_retrieve,
query_column='RIID'):
""" Responsys.retrieveProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns RecordData
"""
profile_extension = profile_extension.get_soap_object(self.client)
return RecordData.from_soap_type(
self.call('retrieveProfileExtensionRecords',
profile_extension, query_column, field_list, ids_to_retrieve))
def truncate_table(self, table):
""" Responsys.truncateTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('truncateTable', table)
def delete_table_records(self, table, query_column, ids_to_delete):
""" Responsys.deleteTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
table = table.get_soap_object(self.client)
result = self.call('deleteTableRecords', table, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def merge_table_records(self, table, record_data, match_column_names):
""" Responsys.mergeTableRecords call
Accepts:
InteractObject table
RecordData record_data
list match_column_names
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecords', table, record_data, match_column_names))
def merge_table_records_with_pk(self, table, record_data, insert_on_no_match, update_on_match):
""" Responsys.mergeTableRecordsWithPK call
Accepts:
InteractObject table
RecordData record_data
string insert_on_no_match
string update_on_match
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecordsWithPK', table, record_data, insert_on_no_match, update_on_match))
def retrieve_table_records(self, table, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData
"""
table = table.get_soap_object(self.client)
return RecordData.from_soap_type(self.call(
'retrieveTableRecords', table, query_column, field_list, ids_to_retrieve))
# Campaign Management Methods
# TODO: implement
# GetLaunchStatus
# LaunchCampaign
# MergeTriggerEmail
# ScheduleCampaignLaunch
# TriggerCampaignMessage
def trigger_custom_event(self, custom_event, recipient_data=None):
custom_event = custom_event.get_soap_object(self.client)
recipient_data = [rdata.get_soap_object(self.client) for rdata in recipient_data]
results = self.call('triggerCustomEvent', custom_event, recipient_data)
return [TriggerResult(result) for result in results]
|
jslang/responsys | responsys/client.py | InteractClient.retrieve_table_records | python | def retrieve_table_records(self, table, query_column, field_list, ids_to_retrieve):
table = table.get_soap_object(self.client)
return RecordData.from_soap_type(self.call(
'retrieveTableRecords', table, query_column, field_list, ids_to_retrieve)) | Responsys.retrieveTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData | train | https://github.com/jslang/responsys/blob/9b355a444c0c75dff41064502c1e2b76dfd5cb93/responsys/client.py#L420-L434 | [
"def call(self, method, *args):\n \"\"\" Calls the service method defined with the arguments provided \"\"\"\n try:\n response = getattr(self.client.service, method)(*args)\n except (URLError, SSLError) as e:\n log.exception('Failed to connect to responsys service')\n raise ConnectError(\"Request to service timed out\")\n except WebFault as web_fault:\n fault_name = getattr(web_fault.fault, 'faultstring', None)\n error = str(web_fault.fault.detail)\n\n if fault_name == 'TableFault':\n raise TableFault(error)\n if fault_name == 'ListFault':\n raise ListFault(error)\n if fault_name == 'API_LIMIT_EXCEEDED':\n raise ApiLimitError(error)\n if fault_name == 'AccountFault':\n raise AccountFault(error)\n\n raise ServiceError(web_fault.fault, web_fault.document)\n return response\n",
"def from_soap_type(cls, record_data):\n record_data = [\n dict(zip(record_data.fieldNames, r.fieldValues)) for r in record_data.records]\n return cls(record_data)\n"
] | class InteractClient(object):
""" Interact Client Class
Provides access to the methods defined by the Responsys Interact API. Example setup:
>>> client = InteractClient(username, password, pod)
>>> client.connect()
>>> client.merge_list_members(interact_object, records, merge_rules)
>>> client.disconnect()
Using the client class as a context manager will automatically connect using the credentials
provided, and disconnect upon context exit:
>>> with InteractClient(username, password, pod) as client:
... client.merge_list_members(interact_object, records, merge_rules)
Since responsys limits the number of active sessions per account, this can help ensure you
don't leave unused connections open.
"""
DEFAULT_SESSION_LIFETIME = 60 * 10
WSDLS = {
'2': 'https://ws2.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'5': 'https://ws5.responsys.net/webservices/wsdl/ResponsysWS_Level1.wsdl',
'rtm4': 'https://rtm4.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
'rtm4b': 'https://rtm4b.responsys.net/tmws/services/TriggeredMessageWS?wsdl',
}
ENDPOINTS = {
'2': 'https://ws2.responsys.net/webservices/services/ResponsysWSService',
'5': 'https://ws5.responsys.net/webservices/services/ResponsysWSService',
'rtm4': 'http://rtm4.responsys.net:80/tmws/services/TriggeredMessageWS',
'rtm4b': 'http://rtm4b.responsys.net:80/tmws/services/TriggeredMessageWS',
}
@property
def wsdl(self):
return self.WSDLS[self.pod]
@property
def endpoint(self):
return self.ENDPOINTS[self.pod]
@property
def client(self):
if self._client is None:
self._client = Client(self.wsdl, location=self.endpoint, timeout=self.timeout)
return self._client
@property
def connected(self):
return getattr(self, '_connected', False)
@connected.setter
def connected(self, value):
self._connected = value
@property
def session(self):
return getattr(self, '_session', None)
@session.setter
def session(self, session_id):
self._session = type(
'Session', (tuple,), {
'is_expired': property(lambda s: s[1] + self.session_lifetime <= time()),
})([session_id, time()])
session_header = self.client.factory.create('SessionHeader')
session_header.sessionId = session_id
self.client.set_options(soapheaders=session_header)
@session.deleter
def session(self):
self._session = None
self.client.set_options(soapheaders=())
def __init__(self, username, password, pod, client=None, session_lifetime=600, timeout=5):
self.username = username
self.password = password
self.pod = pod
self.session_lifetime = session_lifetime
self.timeout = timeout
self._client = client
def __enter__(self):
self.connect()
return self
def __exit__(self, type_, value, traceback):
self.disconnect()
def call(self, method, *args):
""" Calls the service method defined with the arguments provided """
try:
response = getattr(self.client.service, method)(*args)
except (URLError, SSLError) as e:
log.exception('Failed to connect to responsys service')
raise ConnectError("Request to service timed out")
except WebFault as web_fault:
fault_name = getattr(web_fault.fault, 'faultstring', None)
error = str(web_fault.fault.detail)
if fault_name == 'TableFault':
raise TableFault(error)
if fault_name == 'ListFault':
raise ListFault(error)
if fault_name == 'API_LIMIT_EXCEEDED':
raise ApiLimitError(error)
if fault_name == 'AccountFault':
raise AccountFault(error)
raise ServiceError(web_fault.fault, web_fault.document)
return response
def connect(self):
""" Connects to the Responsys soap service
Uses the credentials passed to the client init to login and setup the session id returned.
Returns True on successful connection, otherwise False.
"""
if self.session and self.session.is_expired:
# Close the session to avoid max concurrent session errors
self.disconnect(abandon_session=True)
if not self.session:
try:
login_result = self.login(self.username, self.password)
except AccountFault:
log.error('Login failed, invalid username or password')
raise
else:
self.session = login_result.session_id
self.connected = time()
return self.connected
def disconnect(self, abandon_session=False):
""" Disconnects from the Responsys soap service
Calls the service logout method and destroys the client's session information. Returns
True on success, False otherwise.
"""
self.connected = False
if (self.session and self.session.is_expired) or abandon_session:
try:
self.logout()
except:
log.warning(
'Logout call to responsys failed, session may have not been terminated',
exc_info=True
)
del self.session
return True
# Session Management Methods
def login(self, username, password):
""" Responsys.login soap call
Accepts username and password for authentication, returns a LoginResult object.
"""
return LoginResult(self.call('login', username, password))
def logout(self):
""" Responsys.logout soap call
Returns True on success, False otherwise.
"""
return self.call('logout')
def login_with_certificate(self, encrypted_server_challenge):
""" Responsys.loginWithCertificate soap call
Accepts encrypted_server_challenge for login. Returns LoginResult.
"""
return LoginResult(self.call('loginWithCertificate', encrypted_server_challenge))
def authenticate_server(self, username, client_challenge):
""" Responsys.authenticateServer soap call
Accepts username and client_challenge to authenciate. Returns ServerAuthResult.
"""
return ServerAuthResult(self.call('authenticateServer', username, client_challenge))
# List Management Methods
def merge_list_members(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembers call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a MergeResult
"""
list_ = list_.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
merge_rule = merge_rule.get_soap_object(self.client)
return MergeResult(self.call('mergeListMembers', list_, record_data, merge_rule))
def merge_list_members_RIID(self, list_, record_data, merge_rule):
""" Responsys.mergeListMembersRIID call
Accepts:
InteractObject list_
RecordData record_data
ListMergeRule merge_rule
Returns a RecipientResult
"""
list_ = list_.get_soap_object(self.client)
result = self.call('mergeListMembersRIID', list_, record_data, merge_rule)
return RecipientResult(result.recipientResult)
def delete_list_members(self, list_, query_column, ids_to_delete):
""" Responsys.deleteListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
list_ = list_.get_soap_object(self.client)
result = self.call('deleteListMembers', list_, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_list_members(self, list_, query_column, field_list, ids_to_retrieve):
""" Responsys.retrieveListMembers call
Accepts:
InteractObject list_
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list field_list
list ids_to_retrieve
Returns a RecordData instance
"""
list_ = list_.get_soap_object(self.client)
result = self.call('retrieveListMembers', list_, query_column, field_list, ids_to_retrieve)
return RecordData.from_soap_type(result.recordData)
# Table Management Methods
def create_table(self, table, fields):
""" Responsys.createTable call
Accepts:
InteractObject table
list fields
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTable', table, fields)
def create_table_with_pk(self, table, fields, primary_keys):
""" Responsys.createTableWithPK call
Accepts:
InteractObject table
list fields
list primary_keys
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('createTableWithPK', table, fields, primary_keys)
def delete_table(self, table):
""" Responsys.deleteTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('deleteTable', table)
def delete_profile_extension_members(self, profile_extension, query_column, ids_to_delete):
""" Responsys.deleteProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns list of DeleteResults
"""
profile_extension = profile_extension.get_soap_object(self.client)
result = self.call(
'deleteProfileExtensionMembers', profile_extension, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def retrieve_profile_extension_records(self, profile_extension, field_list, ids_to_retrieve,
query_column='RIID'):
""" Responsys.retrieveProfileExtensionRecords call
Accepts:
InteractObject profile_extension
list field_list
list ids_to_retrieve
string query_column
default: 'RIID'
Returns RecordData
"""
profile_extension = profile_extension.get_soap_object(self.client)
return RecordData.from_soap_type(
self.call('retrieveProfileExtensionRecords',
profile_extension, query_column, field_list, ids_to_retrieve))
def truncate_table(self, table):
""" Responsys.truncateTable call
Accepts:
InteractObject table
Returns True on success
"""
table = table.get_soap_object(self.client)
return self.call('truncateTable', table)
def delete_table_records(self, table, query_column, ids_to_delete):
""" Responsys.deleteTableRecords call
Accepts:
InteractObject table
string query_column
possible values: 'RIID'|'EMAIL_ADDRESS'|'CUSTOMER_ID'|'MOBILE_NUMBER'
list ids_to_delete
Returns a list of DeleteResult instances
"""
table = table.get_soap_object(self.client)
result = self.call('deleteTableRecords', table, query_column, ids_to_delete)
if hasattr(result, '__iter__'):
return [DeleteResult(delete_result) for delete_result in result]
return [DeleteResult(result)]
def merge_table_records(self, table, record_data, match_column_names):
""" Responsys.mergeTableRecords call
Accepts:
InteractObject table
RecordData record_data
list match_column_names
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecords', table, record_data, match_column_names))
def merge_table_records_with_pk(self, table, record_data, insert_on_no_match, update_on_match):
""" Responsys.mergeTableRecordsWithPK call
Accepts:
InteractObject table
RecordData record_data
string insert_on_no_match
string update_on_match
Returns a MergeResult
"""
table = table.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
return MergeResult(self.call(
'mergeTableRecordsWithPK', table, record_data, insert_on_no_match, update_on_match))
def merge_into_profile_extension(self, profile_extension, record_data, match_column,
insert_on_no_match, update_on_match):
""" Responsys.mergeIntoProfileExtension call
Accepts:
InteractObject profile_extension
RecordData record_data
string match_column
string insert_on_no_match
string update_on_match
Returns a RecipientResult
"""
profile_extension = profile_extension.get_soap_object(self.client)
record_data = record_data.get_soap_object(self.client)
results = self.call(
'mergeIntoProfileExtension', profile_extension, record_data, match_column,
insert_on_no_match, update_on_match)
return [RecipientResult(result) for result in results]
# Campaign Management Methods
# TODO: implement
# GetLaunchStatus
# LaunchCampaign
# MergeTriggerEmail
# ScheduleCampaignLaunch
# TriggerCampaignMessage
def trigger_custom_event(self, custom_event, recipient_data=None):
custom_event = custom_event.get_soap_object(self.client)
recipient_data = [rdata.get_soap_object(self.client) for rdata in recipient_data]
results = self.call('triggerCustomEvent', custom_event, recipient_data)
return [TriggerResult(result) for result in results]
|
jslang/responsys | responsys/types.py | InteractType.soap_attribute | python | def soap_attribute(self, name, value):
setattr(self, name, value)
self._attributes.add(name) | Marks an attribute as being a part of the data defined by the soap datatype | train | https://github.com/jslang/responsys/blob/9b355a444c0c75dff41064502c1e2b76dfd5cb93/responsys/types.py#L32-L35 | null | class InteractType(object):
""" InteractType class
Provides base interact type functionality. Interact types should register their WSDL defined
attributes via the soap_attribute method. This allows interact types to provide their own
soap friendly objects for use with the suds client used by the InteractClient.
Interact type attributes can be accessed via dictionary lookup, for example:
>>> InteractType(foo=1)
>>> InteractType['foo'] == InteractType.foo
... True
"""
def __init__(self, *args, **kwargs):
self._attributes = set()
self.set_attributes(*args, **kwargs)
def __getitem__(self, name):
return getattr(self, name)
@property
def soap_name(self):
""" Provide the WSDL defined name for this class. """
return self.__class__.__name__
def get_soap_object(self, client):
""" Create and return a soap service type defined for this instance """
def to_soap_attribute(attr):
words = attr.split('_')
words = words[:1] + [word.capitalize() for word in words[1:]]
return ''.join(words)
soap_object = client.factory.create(self.soap_name)
for attr in self._attributes:
value = getattr(self, attr)
setattr(soap_object, to_soap_attribute(attr), value)
return soap_object
def set_attributes(self, *args, **kwargs):
for name, value in list(kwargs.items()):
self.soap_attribute(name, value)
def __eq__(self, a):
attr_equal = lambda attr: getattr(self, attr) == getattr(a, attr)
return all([attr_equal(attr) for attr in self._attributes])
|
jslang/responsys | responsys/types.py | InteractType.get_soap_object | python | def get_soap_object(self, client):
def to_soap_attribute(attr):
words = attr.split('_')
words = words[:1] + [word.capitalize() for word in words[1:]]
return ''.join(words)
soap_object = client.factory.create(self.soap_name)
for attr in self._attributes:
value = getattr(self, attr)
setattr(soap_object, to_soap_attribute(attr), value)
return soap_object | Create and return a soap service type defined for this instance | train | https://github.com/jslang/responsys/blob/9b355a444c0c75dff41064502c1e2b76dfd5cb93/responsys/types.py#L37-L49 | [
"def to_soap_attribute(attr):\n words = attr.split('_')\n words = words[:1] + [word.capitalize() for word in words[1:]]\n return ''.join(words)\n"
] | class InteractType(object):
""" InteractType class
Provides base interact type functionality. Interact types should register their WSDL defined
attributes via the soap_attribute method. This allows interact types to provide their own
soap friendly objects for use with the suds client used by the InteractClient.
Interact type attributes can be accessed via dictionary lookup, for example:
>>> InteractType(foo=1)
>>> InteractType['foo'] == InteractType.foo
... True
"""
def __init__(self, *args, **kwargs):
self._attributes = set()
self.set_attributes(*args, **kwargs)
def __getitem__(self, name):
return getattr(self, name)
@property
def soap_name(self):
""" Provide the WSDL defined name for this class. """
return self.__class__.__name__
def soap_attribute(self, name, value):
""" Marks an attribute as being a part of the data defined by the soap datatype"""
setattr(self, name, value)
self._attributes.add(name)
def set_attributes(self, *args, **kwargs):
for name, value in list(kwargs.items()):
self.soap_attribute(name, value)
def __eq__(self, a):
attr_equal = lambda attr: getattr(self, attr) == getattr(a, attr)
return all([attr_equal(attr) for attr in self._attributes])
|
jslang/responsys | responsys/types.py | RecordData.get_soap_object | python | def get_soap_object(self, client):
record_data = super().get_soap_object(client)
record_data.records = [Record(r).get_soap_object(client) for r in record_data.records]
return record_data | Override default get_soap_object behavior to account for child Record types | train | https://github.com/jslang/responsys/blob/9b355a444c0c75dff41064502c1e2b76dfd5cb93/responsys/types.py#L158-L162 | [
"def get_soap_object(self, client):\n \"\"\" Create and return a soap service type defined for this instance \"\"\"\n def to_soap_attribute(attr):\n words = attr.split('_')\n words = words[:1] + [word.capitalize() for word in words[1:]]\n return ''.join(words)\n\n soap_object = client.factory.create(self.soap_name)\n for attr in self._attributes:\n value = getattr(self, attr)\n setattr(soap_object, to_soap_attribute(attr), value)\n\n return soap_object\n"
] | class RecordData(InteractType):
""" Responsys RecordData Type
Responsys type representing a mapping of field names to values. Accepts a list of dictionary
like objects for init.
"""
@classmethod
def from_soap_type(cls, record_data):
record_data = [
dict(zip(record_data.fieldNames, r.fieldValues)) for r in record_data.records]
return cls(record_data)
def set_attributes(self, record_data):
assert len(record_data), "Record list length must be non-zero"
field_names = list(record_data[0].keys())
records = []
for record in record_data:
records.append([record[field_name] for field_name in field_names])
self.soap_attribute('field_names', field_names)
self.soap_attribute('records', records)
def __iter__(self):
for record in self.records:
yield dict(zip(self.field_names, record.field_values))
def __len__(self):
return len(self.records)
|
eventifyio/eventify | eventify/drivers/base.py | BaseComponent.onConnect | python | async def onConnect(self):
# Add extra attribute
# This allows for following crossbar/autobahn spec
# without changing legacy configuration
if not hasattr(self.config, 'extra'):
original_config = {'config': self.config}
self.config = objdict(self.config)
setattr(self.config, 'extra', original_config)
self.config.extra['handlers'] = self.handlers
# setup transport host
self.transport_host = self.config.extra['config']['transport_host']
# subscription setup
self.subscribe_options = SubscribeOptions(**self.config.extra['config']['sub_options'])
self.replay_events = self.config.extra['config']['replay_events']
# publishing setup
self.publish_topic = self.config.extra['config']['publish_topic']['topic']
self.publish_options = PublishOptions(**self.config.extra['config']['pub_options'])
# setup callback
self.handlers = self.config.extra['handlers']
# optional subscribed topics from config.json
self.subscribed_topics = self.config.extra['config']['subscribed_topics']
# put name on session
self.name = self.config.extra['config']['name']
# setup db pool - optionally
if self.config.extra['config']['pub_options']['retain'] is True:
self.pool = await asyncpg.create_pool(
user=EVENT_DB_USER,
password=EVENT_DB_PASS,
host=EVENT_DB_HOST,
database=EVENT_DB_NAME
)
# Handle non crossbar drivers
try:
self.join(self.config.realm)
except AttributeError:
pass | Configure the component | train | https://github.com/eventifyio/eventify/blob/0e519964a56bd07a879b266f21f177749c63aaed/eventify/drivers/base.py#L20-L66 | null | class BaseComponent(object):
"""
Base class for driver components
"""
|
eventifyio/eventify | eventify/tracking/__init__.py | track_event | python | async def track_event(event, state, service_name):
redis = await aioredis.create_redis(
(EVENT_TRACKING_HOST, 6379), loop=loop)
now = datetime.utcnow()
event_id = event.event_id
tracking_data = json.dumps({
"event_id": event_id,
"timestamp": str(now),
"state": state
})
await redis.rpush(service_name, tracking_data)
redis.close()
await redis.wait_closed() | Store state of events in memory
:param event: Event object
:param state: EventState object
:param service_name: Name of service name | train | https://github.com/eventifyio/eventify/blob/0e519964a56bd07a879b266f21f177749c63aaed/eventify/tracking/__init__.py#L17-L37 | null | """
Event Tracking Module
"""
from datetime import datetime
import asyncio
import json
import aioredis
from eventify.tracking.constants import EVENT_TRACKING_HOST
loop = asyncio.get_event_loop()
|
eventifyio/eventify | eventify/service.py | event_tracker | python | def event_tracker(func):
@wraps(func)
async def wrapper(*args, **kwargs):
"""
Wraps function to provide redis
tracking
"""
event = Event(args[0])
session = kwargs['session']
service_name = session.name
await track_event(event, EventState.started, service_name)
await func(*args, **kwargs)
await track_event(event, EventState.completed, service_name)
return wrapper | Event tracking handler | train | https://github.com/eventifyio/eventify/blob/0e519964a56bd07a879b266f21f177749c63aaed/eventify/service.py#L19-L35 | null | """
Service Module
"""
from __future__ import print_function
import logging
from functools import wraps
from eventify.event import Event
from eventify.tracking import track_event
from eventify.tracking.constants import EventState
from eventify.drivers.crossbar import Service as CrossbarService
logger = logging.getLogger('eventify.service')
class Service(CrossbarService):
"""
Crossbar Service
"""
pass
|
eventifyio/eventify | eventify/persist/__init__.py | persist_event | python | async def persist_event(topic, event, pool):
# Event to json
json_event = json.dumps(event.__dict__)
# Connect to database or create and connect if non existent
conn = await pool.acquire()
# Insert event if not processed
try:
query = """
CREATE TABLE IF NOT EXISTS public."topic_placeholder"
(
id SERIAL PRIMARY KEY,
event json NOT NULL,
issued_at timestamp without time zone NOT NULL
)
WITH (
OIDS=FALSE
);
ALTER TABLE public."topic_placeholder"
OWNER TO root;
"""
query = query.replace('topic_placeholder', topic)
await conn.execute(query)
issued_at = datetime.utcnow()
query = 'INSERT INTO "%s" (event, issued_at) VALUES ($1, $2)' % topic
await conn.execute(query, json_event, issued_at)
finally:
await pool.release(conn) | Track event to prevent duplication of work
and potential loss of event
:param topic: The event topic
:param event: The event object | train | https://github.com/eventifyio/eventify/blob/0e519964a56bd07a879b266f21f177749c63aaed/eventify/persist/__init__.py#L16-L50 | null | """
Persist Helper Module
"""
from __future__ import print_function
from datetime import datetime
import json
import os
import asyncio
import asyncpg
from eventify.exceptions import EventifySanityError
|
eventifyio/eventify | eventify/drivers/zeromq.py | Component.run | python | def run(self):
loop = asyncio.get_event_loop()
if loop.is_closed():
asyncio.set_event_loop(asyncio.new_event_loop())
loop = asyncio.get_event_loop()
txaio.start_logging()
loop.run_until_complete(self.onConnect()) | start component | train | https://github.com/eventifyio/eventify/blob/0e519964a56bd07a879b266f21f177749c63aaed/eventify/drivers/zeromq.py#L40-L50 | [
"async def onConnect(self):\n \"\"\"\n Inherited from BaseComponent\n \"\"\"\n await super().onConnect()\n self.log.info(\"connected\")\n await self.onJoin()\n"
] | class Component(BaseComponent):
"""
Handle subscribing to topics
"""
log = logging.getLogger("eventify.drivers.zeromq")
def __init__(self, config, handlers):
self.config = config
self.handlers = handlers
async def onConnect(self):
"""
Inherited from BaseComponent
"""
await super().onConnect()
self.log.info("connected")
await self.onJoin()
async def emit_event(self, event):
"""
Publish an event
:param event: Event object
"""
self.log.info("publishing event on %s", self.publish_topic)
if self.config.extra['config']['pub_options']['retain']:
try:
await persist_event(
self.publish_topic,
event,
self.pool
)
except SystemError as error:
self.log.error(error)
return
await asyncio.sleep(1)
async def onJoin(self):
self.log.info("connected to zmq")
for handler in self.handlers:
# initialize handler
handler_instance = handler()
handler_instance.set_session(self)
if hasattr(handler_instance, 'init'):
await handler_instance.init()
if hasattr(handler_instance, 'on_event'):
self.log.debug("subscribing to topic %s", handler_instance.subscribe_topic)
# Used with base handler defined subscribe_topic
if handler_instance.subscribe_topic is not None:
session = ctx.socket(zmq.SUB)
session.connect(self.transport_host)
session.subscribe(handler_instance.subscribe_topic)
self.log.debug("subscribed to topic: %s", handler_instance.subscribe_topic)
while True:
msg = await session.recv_multipart()
await handler_instance.on_event(msg)
else:
# Used with config.json defined topics
if self.subscribed_topics is not None:
session = ctx.socket(zmq.SUB)
session.connect(self.transport_host)
for topic in self.subscribed_topics:
session.subscribe(topic)
self.log.info("subscribed to topic: %s", topic)
while True:
msg = await session.recv_multipart()
self.log.info('got msg %s', msg)
await handler_instance.on_event(msg)
if hasattr(handler_instance, 'worker'):
while True:
try:
await handler_instance.worker()
except Exception as error:
self.log.error("Operation failed. %s", error)
traceback.print_exc(file=sys.stdout)
continue
|
eventifyio/eventify | eventify/drivers/zeromq.py | Component.emit_event | python | async def emit_event(self, event):
self.log.info("publishing event on %s", self.publish_topic)
if self.config.extra['config']['pub_options']['retain']:
try:
await persist_event(
self.publish_topic,
event,
self.pool
)
except SystemError as error:
self.log.error(error)
return
await asyncio.sleep(1) | Publish an event
:param event: Event object | train | https://github.com/eventifyio/eventify/blob/0e519964a56bd07a879b266f21f177749c63aaed/eventify/drivers/zeromq.py#L60-L77 | [
"async def persist_event(topic, event, pool):\n \"\"\"\n Track event to prevent duplication of work\n and potential loss of event\n :param topic: The event topic\n :param event: The event object\n \"\"\"\n # Event to json\n json_event = json.dumps(event.__dict__)\n\n # Connect to database or create and connect if non existent\n conn = await pool.acquire()\n\n # Insert event if not processed\n try:\n query = \"\"\"\n CREATE TABLE IF NOT EXISTS public.\"topic_placeholder\"\n (\n id SERIAL PRIMARY KEY,\n event json NOT NULL,\n issued_at timestamp without time zone NOT NULL\n )\n WITH (\n OIDS=FALSE\n );\n ALTER TABLE public.\"topic_placeholder\"\n OWNER TO root;\n \"\"\"\n query = query.replace('topic_placeholder', topic)\n await conn.execute(query)\n issued_at = datetime.utcnow()\n query = 'INSERT INTO \"%s\" (event, issued_at) VALUES ($1, $2)' % topic\n await conn.execute(query, json_event, issued_at)\n finally:\n await pool.release(conn)\n"
] | class Component(BaseComponent):
"""
Handle subscribing to topics
"""
log = logging.getLogger("eventify.drivers.zeromq")
def __init__(self, config, handlers):
self.config = config
self.handlers = handlers
def run(self):
"""
start component
"""
loop = asyncio.get_event_loop()
if loop.is_closed():
asyncio.set_event_loop(asyncio.new_event_loop())
loop = asyncio.get_event_loop()
txaio.start_logging()
loop.run_until_complete(self.onConnect())
async def onConnect(self):
"""
Inherited from BaseComponent
"""
await super().onConnect()
self.log.info("connected")
await self.onJoin()
async def onJoin(self):
self.log.info("connected to zmq")
for handler in self.handlers:
# initialize handler
handler_instance = handler()
handler_instance.set_session(self)
if hasattr(handler_instance, 'init'):
await handler_instance.init()
if hasattr(handler_instance, 'on_event'):
self.log.debug("subscribing to topic %s", handler_instance.subscribe_topic)
# Used with base handler defined subscribe_topic
if handler_instance.subscribe_topic is not None:
session = ctx.socket(zmq.SUB)
session.connect(self.transport_host)
session.subscribe(handler_instance.subscribe_topic)
self.log.debug("subscribed to topic: %s", handler_instance.subscribe_topic)
while True:
msg = await session.recv_multipart()
await handler_instance.on_event(msg)
else:
# Used with config.json defined topics
if self.subscribed_topics is not None:
session = ctx.socket(zmq.SUB)
session.connect(self.transport_host)
for topic in self.subscribed_topics:
session.subscribe(topic)
self.log.info("subscribed to topic: %s", topic)
while True:
msg = await session.recv_multipart()
self.log.info('got msg %s', msg)
await handler_instance.on_event(msg)
if hasattr(handler_instance, 'worker'):
while True:
try:
await handler_instance.worker()
except Exception as error:
self.log.error("Operation failed. %s", error)
traceback.print_exc(file=sys.stdout)
continue
|
eventifyio/eventify | eventify/drivers/zeromq.py | Service.check_transport_host | python | def check_transport_host(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
result = sock.connect_ex(('events-server', 8080))
if result == 0:
logging.info('port 8080 on zmq is open!')
return True
return False | Check if zeromq socket is available
on transport host | train | https://github.com/eventifyio/eventify/blob/0e519964a56bd07a879b266f21f177749c63aaed/eventify/drivers/zeromq.py#L130-L140 | null | class Service(Eventify):
"""
Create zeromq service
"""
def start(self):
"""
Start a producer/consumer service
"""
component = Component(self.config, self.handlers)
component.run()
|
eventifyio/eventify | eventify/drivers/zeromq.py | Service.start | python | def start(self):
component = Component(self.config, self.handlers)
component.run() | Start a producer/consumer service | train | https://github.com/eventifyio/eventify/blob/0e519964a56bd07a879b266f21f177749c63aaed/eventify/drivers/zeromq.py#L142-L147 | [
"def run(self):\n \"\"\"\n start component\n \"\"\"\n loop = asyncio.get_event_loop()\n if loop.is_closed():\n asyncio.set_event_loop(asyncio.new_event_loop())\n loop = asyncio.get_event_loop()\n\n txaio.start_logging()\n loop.run_until_complete(self.onConnect())\n"
] | class Service(Eventify):
"""
Create zeromq service
"""
def check_transport_host(self):
"""
Check if zeromq socket is available
on transport host
"""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
result = sock.connect_ex(('events-server', 8080))
if result == 0:
logging.info('port 8080 on zmq is open!')
return True
return False
|
eventifyio/eventify | eventify/drivers/kafka.py | Component.emit_event | python | async def emit_event(self, event):
self.log.info("publishing event on %s", self.publish_topic)
if self.config.extra['config']['pub_options']['retain']:
try:
await persist_event(
self.publish_topic,
event,
self.pool
)
except SystemError as error:
self.log.error(error)
return
loop = asyncio.get_event_loop()
producer = AIOKafkaProducer(
loop=loop,
bootstrap_servers=self.transport_host
)
await producer.start()
try:
event = json.dumps(event.__dict__).encode()
await producer.send_and_wait(
self.publish_topic,
event
)
finally:
await producer.stop() | Publish an event
:param event: Event object | train | https://github.com/eventifyio/eventify/blob/0e519964a56bd07a879b266f21f177749c63aaed/eventify/drivers/kafka.py#L56-L87 | [
"async def persist_event(topic, event, pool):\n \"\"\"\n Track event to prevent duplication of work\n and potential loss of event\n :param topic: The event topic\n :param event: The event object\n \"\"\"\n # Event to json\n json_event = json.dumps(event.__dict__)\n\n # Connect to database or create and connect if non existent\n conn = await pool.acquire()\n\n # Insert event if not processed\n try:\n query = \"\"\"\n CREATE TABLE IF NOT EXISTS public.\"topic_placeholder\"\n (\n id SERIAL PRIMARY KEY,\n event json NOT NULL,\n issued_at timestamp without time zone NOT NULL\n )\n WITH (\n OIDS=FALSE\n );\n ALTER TABLE public.\"topic_placeholder\"\n OWNER TO root;\n \"\"\"\n query = query.replace('topic_placeholder', topic)\n await conn.execute(query)\n issued_at = datetime.utcnow()\n query = 'INSERT INTO \"%s\" (event, issued_at) VALUES ($1, $2)' % topic\n await conn.execute(query, json_event, issued_at)\n finally:\n await pool.release(conn)\n"
] | class Component(BaseComponent):
"""
Handle subscribing to topics
"""
log = logging.getLogger("eventify.drivers.kafka")
def __init__(self, config, handlers):
self.config = config
self.handlers = handlers
def run(self):
"""
start component
"""
loop = asyncio.get_event_loop()
if loop.is_closed():
asyncio.set_event_loop(asyncio.new_event_loop())
loop = asyncio.get_event_loop()
txaio.start_logging()
loop.run_until_complete(self.onConnect())
async def onConnect(self):
"""
Inherited from BaseComponent
"""
await super().onConnect()
await self.onJoin()
async def onJoin(self):
loop = asyncio.get_event_loop()
for handler in self.handlers:
# initialize handler
handler_instance = handler()
handler_instance.set_session(self)
if hasattr(handler_instance, 'init'):
await handler_instance.init()
if hasattr(handler_instance, 'on_event'):
self.log.debug("subscribing to topic %s", handler_instance.subscribe_topic)
# Used with base handler defined subscribe_topic
if handler_instance.subscribe_topic is not None:
consumer = AIOKafkaConsumer(
handler_instance.subscribe_topic,
bootstrap_servers=self.transport_host,
loop=loop
)
await consumer.start()
self.log.debug("subscribed to topic: %s", handler_instance.subscribe_topic)
try:
async for msg in consumer:
await handler_instance.on_event(msg.value)
finally:
await consumer.stop()
else:
# Used with config.json defined topics
if self.subscribed_topics is not None:
consumer = AIOKafkaConsumer(
bootstrap_servers=self.transport_host,
loop=loop,
group_id='my-group'
)
await consumer.start()
# Subscribe to all topics
for topic in self.subscribed_topics:
consumer.subscribe(topic)
try:
async for msg in consumer:
value = msg.value.decode()
await handler_instance.on_event(value)
except Exception as error:
self.log.error("Consumer error. %s", error)
await asyncio.sleep(0)
if hasattr(handler_instance, 'worker'):
while True:
try:
await handler_instance.worker()
except Exception as error:
self.log.error("Operation failed. %s", error)
traceback.print_exc(file=sys.stdout)
continue
|
eventifyio/eventify | eventify/drivers/crossbar.py | Component.emit_event | python | async def emit_event(self, event):
self.log.debug("publishing event on %s", self.publish_topic)
if self.config.extra['config']['pub_options']['retain']:
try:
await persist_event(
self.publish_topic,
event,
self.pool
)
except SystemError as error:
self.log.error(error)
return
try:
await self.publish(
self.publish_topic,
event.__dict__,
options=self.publish_options
)
except TransportLost as error:
for task in asyncio.Task.all_tasks():
task.cancel()
asyncio.get_event_loop().stop()
self.log.error(error) | Publish an event back to crossbar
:param event: Event object | train | https://github.com/eventifyio/eventify/blob/0e519964a56bd07a879b266f21f177749c63aaed/eventify/drivers/crossbar.py#L34-L61 | [
"async def persist_event(topic, event, pool):\n \"\"\"\n Track event to prevent duplication of work\n and potential loss of event\n :param topic: The event topic\n :param event: The event object\n \"\"\"\n # Event to json\n json_event = json.dumps(event.__dict__)\n\n # Connect to database or create and connect if non existent\n conn = await pool.acquire()\n\n # Insert event if not processed\n try:\n query = \"\"\"\n CREATE TABLE IF NOT EXISTS public.\"topic_placeholder\"\n (\n id SERIAL PRIMARY KEY,\n event json NOT NULL,\n issued_at timestamp without time zone NOT NULL\n )\n WITH (\n OIDS=FALSE\n );\n ALTER TABLE public.\"topic_placeholder\"\n OWNER TO root;\n \"\"\"\n query = query.replace('topic_placeholder', topic)\n await conn.execute(query)\n issued_at = datetime.utcnow()\n query = 'INSERT INTO \"%s\" (event, issued_at) VALUES ($1, $2)' % topic\n await conn.execute(query, json_event, issued_at)\n finally:\n await pool.release(conn)\n"
] | class Component(BaseComponent, ApplicationSession):
"""
Handle subscribing to topics
"""
log = logging.getLogger("eventify.drivers.crossbar")
def onClose(self, wasClean):
"""
Disconnect when connection to message
broker is lost
"""
self.log.error('lost connection to crossbar on session %' + str(self.session_id))
for task in asyncio.Task.all_tasks():
task.cancel()
asyncio.get_event_loop().stop()
def onDisconnect(self):
"""
Event fired when transport is lost
"""
self.log.error('onDisconnect event fired')
def onLeave(self, reason=None, message=None):
"""
:param reason:
:param message:
"""
self.log.info('Leaving realm; reason: %s', reason)
def onUserError(self, fail, message):
"""
Handle user errors
"""
self.log.error(fail)
self.log.error(message)
async def onJoin(self, details):
self.log.debug("joined websocket realm: %s", details)
# set session_id for reconnect
self.session_id = details.session
self.realm_id = details.realm
for handler in self.handlers:
# initialize handler
handler_instance = handler()
handler_instance.set_session(self)
if hasattr(handler_instance, 'init'):
await handler_instance.init()
if hasattr(handler_instance, 'on_event'):
self.log.debug("subscribing to topic %s", handler_instance.subscribe_topic)
# Used with base handler defined subscribe_topic
if handler_instance.subscribe_topic is not None:
await self.subscribe(
handler_instance.on_event,
handler_instance.subscribe_topic,
)
self.log.debug("subscribed to topic: %s", handler_instance.subscribe_topic)
else:
# Used with config.json defined topics
if self.subscribed_topics is not None:
for topic in self.subscribed_topics:
await self.subscribe(
handler_instance.on_event,
topic
)
self.log.debug("subscribed to topic: %s", topic)
if hasattr(handler_instance, 'worker'):
# or just await handler.worker()
while True:
try:
await handler_instance.worker()
except Exception as error:
self.log.error("Operation failed. %s", error)
traceback.print_exc(file=sys.stdout)
continue
async def show_sessions(self):
"""
Returns an object with a lists of the session IDs
for all sessions currently attached to the realm
http://crossbar.io/docs/Session-Metaevents-and-Procedures/
"""
res = await self.call("wamp.session.list")
for session_id in res:
session = await self.call("wamp.session.get", session_id)
self.log.info(session)
async def total_sessions(self):
"""
Returns the number of sessions currently attached to the realm.
http://crossbar.io/docs/Session-Metaevents-and-Procedures/
"""
res = await self.call("wamp.session.count")
self.log.info(res)
async def lookup_session(self, topic_name):
"""
Attempts to find the session id for a given topic
http://crossbar.io/docs/Subscription-Meta-Events-and-Procedures/
"""
res = await self.call("wamp.subscription.lookup", topic_name)
self.log.info(res)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.