code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def __start(self): # pragma: no cover
"""Starts the real-time engine that captures events."""
assert not self._listener_thread
self._listener_thread = threading.Thread(target=self.__run_listener,
name='clearly-listener')
self._listener_thread.daemon = True
self._listener_thread.start()
self._wait_event.wait()
self._wait_event.clear() | def function[__start, parameter[self]]:
constant[Starts the real-time engine that captures events.]
assert[<ast.UnaryOp object at 0x7da1b12aaf80>]
name[self]._listener_thread assign[=] call[name[threading].Thread, parameter[]]
name[self]._listener_thread.daemon assign[=] constant[True]
call[name[self]._listener_thread.start, parameter[]]
call[name[self]._wait_event.wait, parameter[]]
call[name[self]._wait_event.clear, parameter[]] | keyword[def] identifier[__start] ( identifier[self] ):
literal[string]
keyword[assert] keyword[not] identifier[self] . identifier[_listener_thread]
identifier[self] . identifier[_listener_thread] = identifier[threading] . identifier[Thread] ( identifier[target] = identifier[self] . identifier[__run_listener] ,
identifier[name] = literal[string] )
identifier[self] . identifier[_listener_thread] . identifier[daemon] = keyword[True]
identifier[self] . identifier[_listener_thread] . identifier[start] ()
identifier[self] . identifier[_wait_event] . identifier[wait] ()
identifier[self] . identifier[_wait_event] . identifier[clear] () | def __start(self): # pragma: no cover
'Starts the real-time engine that captures events.'
assert not self._listener_thread
self._listener_thread = threading.Thread(target=self.__run_listener, name='clearly-listener')
self._listener_thread.daemon = True
self._listener_thread.start()
self._wait_event.wait()
self._wait_event.clear() |
def construct_xml_tree(self):
"""Construct the basic XML tree"""
report = et.Element('report')
plipversion = et.SubElement(report, 'plipversion')
plipversion.text = __version__
date_of_creation = et.SubElement(report, 'date_of_creation')
date_of_creation.text = time.strftime("%Y/%m/%d")
citation_information = et.SubElement(report, 'citation_information')
citation_information.text = "Salentin,S. et al. PLIP: fully automated protein-ligand interaction profiler. " \
"Nucl. Acids Res. (1 July 2015) 43 (W1): W443-W447. doi: 10.1093/nar/gkv315"
mode = et.SubElement(report, 'mode')
if config.DNARECEPTOR:
mode.text = 'dna_receptor'
else:
mode.text = 'default'
pdbid = et.SubElement(report, 'pdbid')
pdbid.text = self.mol.pymol_name.upper()
filetype = et.SubElement(report, 'filetype')
filetype.text = self.mol.filetype.upper()
pdbfile = et.SubElement(report, 'pdbfile')
pdbfile.text = self.mol.sourcefiles['pdbcomplex']
pdbfixes = et.SubElement(report, 'pdbfixes')
pdbfixes.text = str(self.mol.information['pdbfixes'])
filename = et.SubElement(report, 'filename')
filename.text = str(self.mol.sourcefiles.get('filename') or None)
exligs = et.SubElement(report, 'excluded_ligands')
for i, exlig in enumerate(self.excluded):
e = et.SubElement(exligs, 'excluded_ligand', id=str(i + 1))
e.text = exlig
covalent = et.SubElement(report, 'covlinkages')
for i, covlinkage in enumerate(self.mol.covalent):
e = et.SubElement(covalent, 'covlinkage', id=str(i + 1))
f1 = et.SubElement(e, 'res1')
f2 = et.SubElement(e, 'res2')
f1.text = ":".join([covlinkage.id1, covlinkage.chain1, str(covlinkage.pos1)])
f2.text = ":".join([covlinkage.id2, covlinkage.chain2, str(covlinkage.pos2)])
return report | def function[construct_xml_tree, parameter[self]]:
constant[Construct the basic XML tree]
variable[report] assign[=] call[name[et].Element, parameter[constant[report]]]
variable[plipversion] assign[=] call[name[et].SubElement, parameter[name[report], constant[plipversion]]]
name[plipversion].text assign[=] name[__version__]
variable[date_of_creation] assign[=] call[name[et].SubElement, parameter[name[report], constant[date_of_creation]]]
name[date_of_creation].text assign[=] call[name[time].strftime, parameter[constant[%Y/%m/%d]]]
variable[citation_information] assign[=] call[name[et].SubElement, parameter[name[report], constant[citation_information]]]
name[citation_information].text assign[=] constant[Salentin,S. et al. PLIP: fully automated protein-ligand interaction profiler. Nucl. Acids Res. (1 July 2015) 43 (W1): W443-W447. doi: 10.1093/nar/gkv315]
variable[mode] assign[=] call[name[et].SubElement, parameter[name[report], constant[mode]]]
if name[config].DNARECEPTOR begin[:]
name[mode].text assign[=] constant[dna_receptor]
variable[pdbid] assign[=] call[name[et].SubElement, parameter[name[report], constant[pdbid]]]
name[pdbid].text assign[=] call[name[self].mol.pymol_name.upper, parameter[]]
variable[filetype] assign[=] call[name[et].SubElement, parameter[name[report], constant[filetype]]]
name[filetype].text assign[=] call[name[self].mol.filetype.upper, parameter[]]
variable[pdbfile] assign[=] call[name[et].SubElement, parameter[name[report], constant[pdbfile]]]
name[pdbfile].text assign[=] call[name[self].mol.sourcefiles][constant[pdbcomplex]]
variable[pdbfixes] assign[=] call[name[et].SubElement, parameter[name[report], constant[pdbfixes]]]
name[pdbfixes].text assign[=] call[name[str], parameter[call[name[self].mol.information][constant[pdbfixes]]]]
variable[filename] assign[=] call[name[et].SubElement, parameter[name[report], constant[filename]]]
name[filename].text assign[=] call[name[str], parameter[<ast.BoolOp object at 0x7da2041d9000>]]
variable[exligs] assign[=] call[name[et].SubElement, parameter[name[report], constant[excluded_ligands]]]
for taget[tuple[[<ast.Name object at 0x7da2041db2b0>, <ast.Name object at 0x7da2041d84f0>]]] in starred[call[name[enumerate], parameter[name[self].excluded]]] begin[:]
variable[e] assign[=] call[name[et].SubElement, parameter[name[exligs], constant[excluded_ligand]]]
name[e].text assign[=] name[exlig]
variable[covalent] assign[=] call[name[et].SubElement, parameter[name[report], constant[covlinkages]]]
for taget[tuple[[<ast.Name object at 0x7da2041dbdc0>, <ast.Name object at 0x7da2041da980>]]] in starred[call[name[enumerate], parameter[name[self].mol.covalent]]] begin[:]
variable[e] assign[=] call[name[et].SubElement, parameter[name[covalent], constant[covlinkage]]]
variable[f1] assign[=] call[name[et].SubElement, parameter[name[e], constant[res1]]]
variable[f2] assign[=] call[name[et].SubElement, parameter[name[e], constant[res2]]]
name[f1].text assign[=] call[constant[:].join, parameter[list[[<ast.Attribute object at 0x7da2041da200>, <ast.Attribute object at 0x7da2041d9030>, <ast.Call object at 0x7da2041d94e0>]]]]
name[f2].text assign[=] call[constant[:].join, parameter[list[[<ast.Attribute object at 0x7da18bcc85e0>, <ast.Attribute object at 0x7da18bcca8c0>, <ast.Call object at 0x7da18bcc96c0>]]]]
return[name[report]] | keyword[def] identifier[construct_xml_tree] ( identifier[self] ):
literal[string]
identifier[report] = identifier[et] . identifier[Element] ( literal[string] )
identifier[plipversion] = identifier[et] . identifier[SubElement] ( identifier[report] , literal[string] )
identifier[plipversion] . identifier[text] = identifier[__version__]
identifier[date_of_creation] = identifier[et] . identifier[SubElement] ( identifier[report] , literal[string] )
identifier[date_of_creation] . identifier[text] = identifier[time] . identifier[strftime] ( literal[string] )
identifier[citation_information] = identifier[et] . identifier[SubElement] ( identifier[report] , literal[string] )
identifier[citation_information] . identifier[text] = literal[string] literal[string]
identifier[mode] = identifier[et] . identifier[SubElement] ( identifier[report] , literal[string] )
keyword[if] identifier[config] . identifier[DNARECEPTOR] :
identifier[mode] . identifier[text] = literal[string]
keyword[else] :
identifier[mode] . identifier[text] = literal[string]
identifier[pdbid] = identifier[et] . identifier[SubElement] ( identifier[report] , literal[string] )
identifier[pdbid] . identifier[text] = identifier[self] . identifier[mol] . identifier[pymol_name] . identifier[upper] ()
identifier[filetype] = identifier[et] . identifier[SubElement] ( identifier[report] , literal[string] )
identifier[filetype] . identifier[text] = identifier[self] . identifier[mol] . identifier[filetype] . identifier[upper] ()
identifier[pdbfile] = identifier[et] . identifier[SubElement] ( identifier[report] , literal[string] )
identifier[pdbfile] . identifier[text] = identifier[self] . identifier[mol] . identifier[sourcefiles] [ literal[string] ]
identifier[pdbfixes] = identifier[et] . identifier[SubElement] ( identifier[report] , literal[string] )
identifier[pdbfixes] . identifier[text] = identifier[str] ( identifier[self] . identifier[mol] . identifier[information] [ literal[string] ])
identifier[filename] = identifier[et] . identifier[SubElement] ( identifier[report] , literal[string] )
identifier[filename] . identifier[text] = identifier[str] ( identifier[self] . identifier[mol] . identifier[sourcefiles] . identifier[get] ( literal[string] ) keyword[or] keyword[None] )
identifier[exligs] = identifier[et] . identifier[SubElement] ( identifier[report] , literal[string] )
keyword[for] identifier[i] , identifier[exlig] keyword[in] identifier[enumerate] ( identifier[self] . identifier[excluded] ):
identifier[e] = identifier[et] . identifier[SubElement] ( identifier[exligs] , literal[string] , identifier[id] = identifier[str] ( identifier[i] + literal[int] ))
identifier[e] . identifier[text] = identifier[exlig]
identifier[covalent] = identifier[et] . identifier[SubElement] ( identifier[report] , literal[string] )
keyword[for] identifier[i] , identifier[covlinkage] keyword[in] identifier[enumerate] ( identifier[self] . identifier[mol] . identifier[covalent] ):
identifier[e] = identifier[et] . identifier[SubElement] ( identifier[covalent] , literal[string] , identifier[id] = identifier[str] ( identifier[i] + literal[int] ))
identifier[f1] = identifier[et] . identifier[SubElement] ( identifier[e] , literal[string] )
identifier[f2] = identifier[et] . identifier[SubElement] ( identifier[e] , literal[string] )
identifier[f1] . identifier[text] = literal[string] . identifier[join] ([ identifier[covlinkage] . identifier[id1] , identifier[covlinkage] . identifier[chain1] , identifier[str] ( identifier[covlinkage] . identifier[pos1] )])
identifier[f2] . identifier[text] = literal[string] . identifier[join] ([ identifier[covlinkage] . identifier[id2] , identifier[covlinkage] . identifier[chain2] , identifier[str] ( identifier[covlinkage] . identifier[pos2] )])
keyword[return] identifier[report] | def construct_xml_tree(self):
"""Construct the basic XML tree"""
report = et.Element('report')
plipversion = et.SubElement(report, 'plipversion')
plipversion.text = __version__
date_of_creation = et.SubElement(report, 'date_of_creation')
date_of_creation.text = time.strftime('%Y/%m/%d')
citation_information = et.SubElement(report, 'citation_information')
citation_information.text = 'Salentin,S. et al. PLIP: fully automated protein-ligand interaction profiler. Nucl. Acids Res. (1 July 2015) 43 (W1): W443-W447. doi: 10.1093/nar/gkv315'
mode = et.SubElement(report, 'mode')
if config.DNARECEPTOR:
mode.text = 'dna_receptor' # depends on [control=['if'], data=[]]
else:
mode.text = 'default'
pdbid = et.SubElement(report, 'pdbid')
pdbid.text = self.mol.pymol_name.upper()
filetype = et.SubElement(report, 'filetype')
filetype.text = self.mol.filetype.upper()
pdbfile = et.SubElement(report, 'pdbfile')
pdbfile.text = self.mol.sourcefiles['pdbcomplex']
pdbfixes = et.SubElement(report, 'pdbfixes')
pdbfixes.text = str(self.mol.information['pdbfixes'])
filename = et.SubElement(report, 'filename')
filename.text = str(self.mol.sourcefiles.get('filename') or None)
exligs = et.SubElement(report, 'excluded_ligands')
for (i, exlig) in enumerate(self.excluded):
e = et.SubElement(exligs, 'excluded_ligand', id=str(i + 1))
e.text = exlig # depends on [control=['for'], data=[]]
covalent = et.SubElement(report, 'covlinkages')
for (i, covlinkage) in enumerate(self.mol.covalent):
e = et.SubElement(covalent, 'covlinkage', id=str(i + 1))
f1 = et.SubElement(e, 'res1')
f2 = et.SubElement(e, 'res2')
f1.text = ':'.join([covlinkage.id1, covlinkage.chain1, str(covlinkage.pos1)])
f2.text = ':'.join([covlinkage.id2, covlinkage.chain2, str(covlinkage.pos2)]) # depends on [control=['for'], data=[]]
return report |
def send_payload(self, payload=(), payload_type=None, retry=True,
delay_xmit=None, needskeepalive=False, timeout=None):
"""Send payload over the IPMI Session
:param needskeepalive: If the payload is expected not to count as
'active' by the BMC, set this to True
to avoid Session considering the
job done because of this payload.
Notably, 0-length SOL packets
are prone to confusion.
:param timeout: Specify a custom timeout for long-running request
"""
if payload and self.lastpayload:
# we already have a packet outgoing, make this
# a pending payload
# this way a simplistic BMC won't get confused
# and we also avoid having to do more complicated
# retry mechanism where each payload is
# retried separately
self.pendingpayloads.append((payload, payload_type, retry))
return
if payload_type is None:
payload_type = self.last_payload_type
if not payload:
payload = self.lastpayload
message = bytearray(b'\x06\x00\xff\x07') # constant IPMI RMCP header
if retry:
self.lastpayload = payload
self.last_payload_type = payload_type
if not isinstance(payload, bytearray):
payload = bytearray(payload)
message.append(self.authtype)
baretype = payload_type
if self.integrityalgo:
payload_type |= 0b01000000
if self.confalgo:
payload_type |= 0b10000000
if self.ipmiversion == 2.0:
message.append(payload_type)
if baretype == 2:
# TODO(jbjohnso): OEM payload types
raise NotImplementedError("OEM Payloads")
elif baretype not in constants.payload_types.values():
raise NotImplementedError(
"Unrecognized payload type %d" % baretype)
message += struct.pack("<I", self.sessionid)
message += struct.pack("<I", self.sequencenumber)
if self.ipmiversion == 1.5:
message += struct.pack("<I", self.sessionid)
if not self.authtype == 0:
message += self._ipmi15authcode(payload)
message.append(len(payload))
message += payload
# Guessing the ipmi spec means the whole
totlen = 34 + len(message)
# packet and assume no tag in old 1.5 world
if totlen in (56, 84, 112, 128, 156):
message.append(0) # Legacy pad as mandated by ipmi spec
elif self.ipmiversion == 2.0:
psize = len(payload)
if self.confalgo:
pad = (psize + 1) % 16 # pad has to cope with one byte
# field like the _aespad function
if pad: # if no pad needed, then we take no more action
pad = 16 - pad
# new payload size grew according to pad
newpsize = psize + pad + 17
# size, plus pad length, plus 16 byte IV
# (Table 13-20)
message.append(newpsize & 0xff)
message.append(newpsize >> 8)
iv = os.urandom(16)
message += iv
payloadtocrypt = bytes(payload + _aespad(payload))
crypter = Cipher(
algorithm=algorithms.AES(self.aeskey),
mode=modes.CBC(iv),
backend=self._crypto_backend
)
encryptor = crypter.encryptor()
message += encryptor.update(payloadtocrypt
) + encryptor.finalize()
else: # no confidetiality algorithm
message.append(psize & 0xff)
message.append(psize >> 8)
message += payload
if self.integrityalgo: # see table 13-8,
# RMCP+ packet format
# TODO(jbjohnso): SHA256 which is now
# allowed
neededpad = (len(message) - 2) % 4
if neededpad:
neededpad = 4 - neededpad
message += b'\xff' * neededpad
message.append(neededpad)
message.append(7) # reserved, 7 is the required value for the
# specification followed
message += hmac.new(self.k1,
bytes(message[4:]),
hashlib.sha1).digest()[:12] # SHA1-96
# per RFC2404 truncates to 96 bits
self.netpacket = message
# advance idle timer since we don't need keepalive while sending
# packets out naturally
with util.protect(KEEPALIVE_SESSIONS):
if (self in Session.keepalive_sessions and not needskeepalive and
not self._customkeepalives):
Session.keepalive_sessions[self]['timeout'] = \
_monotonic_time() + MAX_IDLE - (random.random() * 4.9)
self._xmit_packet(retry, delay_xmit=delay_xmit, timeout=timeout) | def function[send_payload, parameter[self, payload, payload_type, retry, delay_xmit, needskeepalive, timeout]]:
constant[Send payload over the IPMI Session
:param needskeepalive: If the payload is expected not to count as
'active' by the BMC, set this to True
to avoid Session considering the
job done because of this payload.
Notably, 0-length SOL packets
are prone to confusion.
:param timeout: Specify a custom timeout for long-running request
]
if <ast.BoolOp object at 0x7da20e9618a0> begin[:]
call[name[self].pendingpayloads.append, parameter[tuple[[<ast.Name object at 0x7da20e960c70>, <ast.Name object at 0x7da20e9639a0>, <ast.Name object at 0x7da20e961660>]]]]
return[None]
if compare[name[payload_type] is constant[None]] begin[:]
variable[payload_type] assign[=] name[self].last_payload_type
if <ast.UnaryOp object at 0x7da18bccaa40> begin[:]
variable[payload] assign[=] name[self].lastpayload
variable[message] assign[=] call[name[bytearray], parameter[constant[b'\x06\x00\xff\x07']]]
if name[retry] begin[:]
name[self].lastpayload assign[=] name[payload]
name[self].last_payload_type assign[=] name[payload_type]
if <ast.UnaryOp object at 0x7da18bcca290> begin[:]
variable[payload] assign[=] call[name[bytearray], parameter[name[payload]]]
call[name[message].append, parameter[name[self].authtype]]
variable[baretype] assign[=] name[payload_type]
if name[self].integrityalgo begin[:]
<ast.AugAssign object at 0x7da18bcc9060>
if name[self].confalgo begin[:]
<ast.AugAssign object at 0x7da18bccaf20>
if compare[name[self].ipmiversion equal[==] constant[2.0]] begin[:]
call[name[message].append, parameter[name[payload_type]]]
if compare[name[baretype] equal[==] constant[2]] begin[:]
<ast.Raise object at 0x7da18bcc9900>
<ast.AugAssign object at 0x7da18bccb9a0>
<ast.AugAssign object at 0x7da18bcc8130>
if compare[name[self].ipmiversion equal[==] constant[1.5]] begin[:]
<ast.AugAssign object at 0x7da18bcc8610>
if <ast.UnaryOp object at 0x7da18bccadd0> begin[:]
<ast.AugAssign object at 0x7da18bcc86a0>
call[name[message].append, parameter[call[name[len], parameter[name[payload]]]]]
<ast.AugAssign object at 0x7da18bcc8100>
variable[totlen] assign[=] binary_operation[constant[34] + call[name[len], parameter[name[message]]]]
if compare[name[totlen] in tuple[[<ast.Constant object at 0x7da18bcc94b0>, <ast.Constant object at 0x7da18bcc9480>, <ast.Constant object at 0x7da18bcc8430>, <ast.Constant object at 0x7da18bcc9390>, <ast.Constant object at 0x7da18bccb070>]]] begin[:]
call[name[message].append, parameter[constant[0]]]
name[self].netpacket assign[=] name[message]
with call[name[util].protect, parameter[name[KEEPALIVE_SESSIONS]]] begin[:]
if <ast.BoolOp object at 0x7da204623bb0> begin[:]
call[call[name[Session].keepalive_sessions][name[self]]][constant[timeout]] assign[=] binary_operation[binary_operation[call[name[_monotonic_time], parameter[]] + name[MAX_IDLE]] - binary_operation[call[name[random].random, parameter[]] * constant[4.9]]]
call[name[self]._xmit_packet, parameter[name[retry]]] | keyword[def] identifier[send_payload] ( identifier[self] , identifier[payload] =(), identifier[payload_type] = keyword[None] , identifier[retry] = keyword[True] ,
identifier[delay_xmit] = keyword[None] , identifier[needskeepalive] = keyword[False] , identifier[timeout] = keyword[None] ):
literal[string]
keyword[if] identifier[payload] keyword[and] identifier[self] . identifier[lastpayload] :
identifier[self] . identifier[pendingpayloads] . identifier[append] (( identifier[payload] , identifier[payload_type] , identifier[retry] ))
keyword[return]
keyword[if] identifier[payload_type] keyword[is] keyword[None] :
identifier[payload_type] = identifier[self] . identifier[last_payload_type]
keyword[if] keyword[not] identifier[payload] :
identifier[payload] = identifier[self] . identifier[lastpayload]
identifier[message] = identifier[bytearray] ( literal[string] )
keyword[if] identifier[retry] :
identifier[self] . identifier[lastpayload] = identifier[payload]
identifier[self] . identifier[last_payload_type] = identifier[payload_type]
keyword[if] keyword[not] identifier[isinstance] ( identifier[payload] , identifier[bytearray] ):
identifier[payload] = identifier[bytearray] ( identifier[payload] )
identifier[message] . identifier[append] ( identifier[self] . identifier[authtype] )
identifier[baretype] = identifier[payload_type]
keyword[if] identifier[self] . identifier[integrityalgo] :
identifier[payload_type] |= literal[int]
keyword[if] identifier[self] . identifier[confalgo] :
identifier[payload_type] |= literal[int]
keyword[if] identifier[self] . identifier[ipmiversion] == literal[int] :
identifier[message] . identifier[append] ( identifier[payload_type] )
keyword[if] identifier[baretype] == literal[int] :
keyword[raise] identifier[NotImplementedError] ( literal[string] )
keyword[elif] identifier[baretype] keyword[not] keyword[in] identifier[constants] . identifier[payload_types] . identifier[values] ():
keyword[raise] identifier[NotImplementedError] (
literal[string] % identifier[baretype] )
identifier[message] += identifier[struct] . identifier[pack] ( literal[string] , identifier[self] . identifier[sessionid] )
identifier[message] += identifier[struct] . identifier[pack] ( literal[string] , identifier[self] . identifier[sequencenumber] )
keyword[if] identifier[self] . identifier[ipmiversion] == literal[int] :
identifier[message] += identifier[struct] . identifier[pack] ( literal[string] , identifier[self] . identifier[sessionid] )
keyword[if] keyword[not] identifier[self] . identifier[authtype] == literal[int] :
identifier[message] += identifier[self] . identifier[_ipmi15authcode] ( identifier[payload] )
identifier[message] . identifier[append] ( identifier[len] ( identifier[payload] ))
identifier[message] += identifier[payload]
identifier[totlen] = literal[int] + identifier[len] ( identifier[message] )
keyword[if] identifier[totlen] keyword[in] ( literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ):
identifier[message] . identifier[append] ( literal[int] )
keyword[elif] identifier[self] . identifier[ipmiversion] == literal[int] :
identifier[psize] = identifier[len] ( identifier[payload] )
keyword[if] identifier[self] . identifier[confalgo] :
identifier[pad] =( identifier[psize] + literal[int] )% literal[int]
keyword[if] identifier[pad] :
identifier[pad] = literal[int] - identifier[pad]
identifier[newpsize] = identifier[psize] + identifier[pad] + literal[int]
identifier[message] . identifier[append] ( identifier[newpsize] & literal[int] )
identifier[message] . identifier[append] ( identifier[newpsize] >> literal[int] )
identifier[iv] = identifier[os] . identifier[urandom] ( literal[int] )
identifier[message] += identifier[iv]
identifier[payloadtocrypt] = identifier[bytes] ( identifier[payload] + identifier[_aespad] ( identifier[payload] ))
identifier[crypter] = identifier[Cipher] (
identifier[algorithm] = identifier[algorithms] . identifier[AES] ( identifier[self] . identifier[aeskey] ),
identifier[mode] = identifier[modes] . identifier[CBC] ( identifier[iv] ),
identifier[backend] = identifier[self] . identifier[_crypto_backend]
)
identifier[encryptor] = identifier[crypter] . identifier[encryptor] ()
identifier[message] += identifier[encryptor] . identifier[update] ( identifier[payloadtocrypt]
)+ identifier[encryptor] . identifier[finalize] ()
keyword[else] :
identifier[message] . identifier[append] ( identifier[psize] & literal[int] )
identifier[message] . identifier[append] ( identifier[psize] >> literal[int] )
identifier[message] += identifier[payload]
keyword[if] identifier[self] . identifier[integrityalgo] :
identifier[neededpad] =( identifier[len] ( identifier[message] )- literal[int] )% literal[int]
keyword[if] identifier[neededpad] :
identifier[neededpad] = literal[int] - identifier[neededpad]
identifier[message] += literal[string] * identifier[neededpad]
identifier[message] . identifier[append] ( identifier[neededpad] )
identifier[message] . identifier[append] ( literal[int] )
identifier[message] += identifier[hmac] . identifier[new] ( identifier[self] . identifier[k1] ,
identifier[bytes] ( identifier[message] [ literal[int] :]),
identifier[hashlib] . identifier[sha1] ). identifier[digest] ()[: literal[int] ]
identifier[self] . identifier[netpacket] = identifier[message]
keyword[with] identifier[util] . identifier[protect] ( identifier[KEEPALIVE_SESSIONS] ):
keyword[if] ( identifier[self] keyword[in] identifier[Session] . identifier[keepalive_sessions] keyword[and] keyword[not] identifier[needskeepalive] keyword[and]
keyword[not] identifier[self] . identifier[_customkeepalives] ):
identifier[Session] . identifier[keepalive_sessions] [ identifier[self] ][ literal[string] ]= identifier[_monotonic_time] ()+ identifier[MAX_IDLE] -( identifier[random] . identifier[random] ()* literal[int] )
identifier[self] . identifier[_xmit_packet] ( identifier[retry] , identifier[delay_xmit] = identifier[delay_xmit] , identifier[timeout] = identifier[timeout] ) | def send_payload(self, payload=(), payload_type=None, retry=True, delay_xmit=None, needskeepalive=False, timeout=None):
"""Send payload over the IPMI Session
:param needskeepalive: If the payload is expected not to count as
'active' by the BMC, set this to True
to avoid Session considering the
job done because of this payload.
Notably, 0-length SOL packets
are prone to confusion.
:param timeout: Specify a custom timeout for long-running request
"""
if payload and self.lastpayload:
# we already have a packet outgoing, make this
# a pending payload
# this way a simplistic BMC won't get confused
# and we also avoid having to do more complicated
# retry mechanism where each payload is
# retried separately
self.pendingpayloads.append((payload, payload_type, retry))
return # depends on [control=['if'], data=[]]
if payload_type is None:
payload_type = self.last_payload_type # depends on [control=['if'], data=['payload_type']]
if not payload:
payload = self.lastpayload # depends on [control=['if'], data=[]]
message = bytearray(b'\x06\x00\xff\x07') # constant IPMI RMCP header
if retry:
self.lastpayload = payload
self.last_payload_type = payload_type # depends on [control=['if'], data=[]]
if not isinstance(payload, bytearray):
payload = bytearray(payload) # depends on [control=['if'], data=[]]
message.append(self.authtype)
baretype = payload_type
if self.integrityalgo:
payload_type |= 64 # depends on [control=['if'], data=[]]
if self.confalgo:
payload_type |= 128 # depends on [control=['if'], data=[]]
if self.ipmiversion == 2.0:
message.append(payload_type)
if baretype == 2:
# TODO(jbjohnso): OEM payload types
raise NotImplementedError('OEM Payloads') # depends on [control=['if'], data=[]]
elif baretype not in constants.payload_types.values():
raise NotImplementedError('Unrecognized payload type %d' % baretype) # depends on [control=['if'], data=['baretype']]
message += struct.pack('<I', self.sessionid) # depends on [control=['if'], data=[]]
message += struct.pack('<I', self.sequencenumber)
if self.ipmiversion == 1.5:
message += struct.pack('<I', self.sessionid)
if not self.authtype == 0:
message += self._ipmi15authcode(payload) # depends on [control=['if'], data=[]]
message.append(len(payload))
message += payload
# Guessing the ipmi spec means the whole
totlen = 34 + len(message)
# packet and assume no tag in old 1.5 world
if totlen in (56, 84, 112, 128, 156):
message.append(0) # Legacy pad as mandated by ipmi spec # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif self.ipmiversion == 2.0:
psize = len(payload)
if self.confalgo:
pad = (psize + 1) % 16 # pad has to cope with one byte
# field like the _aespad function
if pad: # if no pad needed, then we take no more action
pad = 16 - pad # depends on [control=['if'], data=[]]
# new payload size grew according to pad
newpsize = psize + pad + 17
# size, plus pad length, plus 16 byte IV
# (Table 13-20)
message.append(newpsize & 255)
message.append(newpsize >> 8)
iv = os.urandom(16)
message += iv
payloadtocrypt = bytes(payload + _aespad(payload))
crypter = Cipher(algorithm=algorithms.AES(self.aeskey), mode=modes.CBC(iv), backend=self._crypto_backend)
encryptor = crypter.encryptor()
message += encryptor.update(payloadtocrypt) + encryptor.finalize() # depends on [control=['if'], data=[]]
else: # no confidetiality algorithm
message.append(psize & 255)
message.append(psize >> 8)
message += payload
if self.integrityalgo: # see table 13-8,
# RMCP+ packet format
# TODO(jbjohnso): SHA256 which is now
# allowed
neededpad = (len(message) - 2) % 4
if neededpad:
neededpad = 4 - neededpad # depends on [control=['if'], data=[]]
message += b'\xff' * neededpad
message.append(neededpad)
message.append(7) # reserved, 7 is the required value for the
# specification followed
message += hmac.new(self.k1, bytes(message[4:]), hashlib.sha1).digest()[:12] # SHA1-96 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# per RFC2404 truncates to 96 bits
self.netpacket = message
# advance idle timer since we don't need keepalive while sending
# packets out naturally
with util.protect(KEEPALIVE_SESSIONS):
if self in Session.keepalive_sessions and (not needskeepalive) and (not self._customkeepalives):
Session.keepalive_sessions[self]['timeout'] = _monotonic_time() + MAX_IDLE - random.random() * 4.9 # depends on [control=['if'], data=[]]
self._xmit_packet(retry, delay_xmit=delay_xmit, timeout=timeout) # depends on [control=['with'], data=[]] |
def _string_to_dictsql(self, part):
""" Do magic matching of single words or quoted string
"""
self._logger.debug("parsing string: " + unicode(part[0]) + " of type: " + part.getName())
if part.getName() == 'tag':
self._logger.debug("Query part '" + part[0] + "' interpreted as tag")
dictsql = {
'interpretation': {
'string': part[0],
'interpretation': '(inherited) tag',
'attribute': 'tag',
'operator': 'equals_any',
'error': False
},
'operator': 'or',
'val1': {
'operator': 'equals_any',
'val1': 'tags',
'val2': part[0][1:]
},
'val2': {
'operator': 'equals_any',
'val1': 'inherited_tags',
'val2': part[0][1:]
}
}
elif part.getName() == 'vrf_rt':
self._logger.debug("Query part '" + part.vrf_rt + "' interpreted as VRF RT")
dictsql = {
'interpretation': {
'attribute': 'VRF RT',
'interpretation': 'vrf_rt',
'operator': 'equals',
'string': part.vrf_rt,
'error': False
},
'operator': 'equals',
'val1': 'vrf_rt',
'val2': part.vrf_rt
}
elif part.getName() == 'ipv6_address':
self._logger.debug("Query part '" + part.ipv6_address + "' interpreted as IPv6 address")
dictsql = {
'interpretation': {
'string': part.ipv6_address,
'interpretation': 'IPv6 address',
'attribute': 'prefix',
'operator': 'contains_equals',
'error': False
},
'operator': 'contains_equals',
'val1': 'prefix',
'val2': part.ipv6_address
}
elif part.getName() == 'ipv6_prefix':
self._logger.debug("Query part '" + part.ipv6_prefix[0] + "' interpreted as IPv6 prefix")
strict_prefix = unicode(IPy.IP(part.ipv6_prefix[0], make_net=True))
interp = {
'string': part.ipv6_prefix[0],
'interpretation': 'IPv6 prefix',
'attribute': 'prefix',
'operator': 'contained_within_equals',
'error': False
}
if part.ipv6_prefix[0] != strict_prefix:
interp['strict_prefix'] = strict_prefix
dictsql = {
'interpretation': interp,
'operator': 'contained_within_equals',
'val1': 'prefix',
'val2': strict_prefix
}
else:
# since it's difficult to parse shortened IPv4 addresses (like 10/8)
# using pyparsing we do a bit of good ol parsing here
if self._get_afi(part[0]) == 4 and len(part[0].split('/')) == 2:
self._logger.debug("Query part '" + part[0] + "' interpreted as prefix")
address, prefix_length = part[0].split('/')
# complete a prefix to it's fully expanded form
# 10/8 will be expanded into 10.0.0.0/8 which PostgreSQL can
# parse correctly
while len(address.split('.')) < 4:
address += '.0'
prefix = address + '/' + prefix_length
strict_prefix = unicode(IPy.IP(part[0], make_net=True))
interp = {
'string': part[0],
'interpretation': 'IPv4 prefix',
'attribute': 'prefix',
'operator': 'contained_within_equals',
'error': False
}
if prefix != part[0]:
interp['expanded'] = prefix
if prefix != strict_prefix:
interp['strict_prefix'] = strict_prefix
dictsql = {
'interpretation': interp,
'operator': 'contained_within_equals',
'val1': 'prefix',
'val2': strict_prefix
}
# IPv4 address
# split on dot to make sure we have all four octets before we do a
# search
elif self._get_afi(part[0]) == 4 and len(part[0].split('.')) == 4:
self._logger.debug("Query part '" + part[0] + "' interpreted as prefix")
address = unicode(IPy.IP(part[0]))
dictsql = {
'interpretation': {
'string': address,
'interpretation': 'IPv4 address',
'attribute': 'prefix',
'operator': 'contains_equals',
'error': False
},
'operator': 'contains_equals',
'val1': 'prefix',
'val2': address
}
else:
# Description or comment
self._logger.debug("Query part '" + part[0] + "' interpreted as text")
dictsql = {
'interpretation': {
'string': part[0],
'interpretation': 'text',
'attribute': 'description or comment or node or order_id or customer_id',
'operator': 'regex',
'error': False
},
'operator': 'or',
'val1': {
'operator': 'or',
'val1': {
'operator': 'or',
'val1': {
'operator': 'or',
'val1': {
'operator': 'regex_match',
'val1': 'comment',
'val2': part[0]
},
'val2': {
'operator': 'regex_match',
'val1': 'description',
'val2': part[0]
}
},
'val2': {
'operator': 'regex_match',
'val1': 'node',
'val2': part[0]
}
},
'val2': {
'operator': 'regex_match',
'val1': 'order_id',
'val2': part[0]
},
},
'val2': {
'operator': 'regex_match',
'val1': 'customer_id',
'val2': part[0]
}
}
return dictsql | def function[_string_to_dictsql, parameter[self, part]]:
constant[ Do magic matching of single words or quoted string
]
call[name[self]._logger.debug, parameter[binary_operation[binary_operation[binary_operation[constant[parsing string: ] + call[name[unicode], parameter[call[name[part]][constant[0]]]]] + constant[ of type: ]] + call[name[part].getName, parameter[]]]]]
if compare[call[name[part].getName, parameter[]] equal[==] constant[tag]] begin[:]
call[name[self]._logger.debug, parameter[binary_operation[binary_operation[constant[Query part '] + call[name[part]][constant[0]]] + constant[' interpreted as tag]]]]
variable[dictsql] assign[=] dictionary[[<ast.Constant object at 0x7da2054a6f20>, <ast.Constant object at 0x7da2054a4310>, <ast.Constant object at 0x7da2054a7eb0>, <ast.Constant object at 0x7da2054a7520>], [<ast.Dict object at 0x7da2054a6c20>, <ast.Constant object at 0x7da20c6c4670>, <ast.Dict object at 0x7da20c6c7a60>, <ast.Dict object at 0x7da20c6c71f0>]]
return[name[dictsql]] | keyword[def] identifier[_string_to_dictsql] ( identifier[self] , identifier[part] ):
literal[string]
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] + identifier[unicode] ( identifier[part] [ literal[int] ])+ literal[string] + identifier[part] . identifier[getName] ())
keyword[if] identifier[part] . identifier[getName] ()== literal[string] :
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] + identifier[part] [ literal[int] ]+ literal[string] )
identifier[dictsql] ={
literal[string] :{
literal[string] : identifier[part] [ literal[int] ],
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : keyword[False]
},
literal[string] : literal[string] ,
literal[string] :{
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[part] [ literal[int] ][ literal[int] :]
},
literal[string] :{
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[part] [ literal[int] ][ literal[int] :]
}
}
keyword[elif] identifier[part] . identifier[getName] ()== literal[string] :
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] + identifier[part] . identifier[vrf_rt] + literal[string] )
identifier[dictsql] ={
literal[string] :{
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[part] . identifier[vrf_rt] ,
literal[string] : keyword[False]
},
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[part] . identifier[vrf_rt]
}
keyword[elif] identifier[part] . identifier[getName] ()== literal[string] :
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] + identifier[part] . identifier[ipv6_address] + literal[string] )
identifier[dictsql] ={
literal[string] :{
literal[string] : identifier[part] . identifier[ipv6_address] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : keyword[False]
},
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[part] . identifier[ipv6_address]
}
keyword[elif] identifier[part] . identifier[getName] ()== literal[string] :
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] + identifier[part] . identifier[ipv6_prefix] [ literal[int] ]+ literal[string] )
identifier[strict_prefix] = identifier[unicode] ( identifier[IPy] . identifier[IP] ( identifier[part] . identifier[ipv6_prefix] [ literal[int] ], identifier[make_net] = keyword[True] ))
identifier[interp] ={
literal[string] : identifier[part] . identifier[ipv6_prefix] [ literal[int] ],
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : keyword[False]
}
keyword[if] identifier[part] . identifier[ipv6_prefix] [ literal[int] ]!= identifier[strict_prefix] :
identifier[interp] [ literal[string] ]= identifier[strict_prefix]
identifier[dictsql] ={
literal[string] : identifier[interp] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[strict_prefix]
}
keyword[else] :
keyword[if] identifier[self] . identifier[_get_afi] ( identifier[part] [ literal[int] ])== literal[int] keyword[and] identifier[len] ( identifier[part] [ literal[int] ]. identifier[split] ( literal[string] ))== literal[int] :
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] + identifier[part] [ literal[int] ]+ literal[string] )
identifier[address] , identifier[prefix_length] = identifier[part] [ literal[int] ]. identifier[split] ( literal[string] )
keyword[while] identifier[len] ( identifier[address] . identifier[split] ( literal[string] ))< literal[int] :
identifier[address] += literal[string]
identifier[prefix] = identifier[address] + literal[string] + identifier[prefix_length]
identifier[strict_prefix] = identifier[unicode] ( identifier[IPy] . identifier[IP] ( identifier[part] [ literal[int] ], identifier[make_net] = keyword[True] ))
identifier[interp] ={
literal[string] : identifier[part] [ literal[int] ],
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : keyword[False]
}
keyword[if] identifier[prefix] != identifier[part] [ literal[int] ]:
identifier[interp] [ literal[string] ]= identifier[prefix]
keyword[if] identifier[prefix] != identifier[strict_prefix] :
identifier[interp] [ literal[string] ]= identifier[strict_prefix]
identifier[dictsql] ={
literal[string] : identifier[interp] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[strict_prefix]
}
keyword[elif] identifier[self] . identifier[_get_afi] ( identifier[part] [ literal[int] ])== literal[int] keyword[and] identifier[len] ( identifier[part] [ literal[int] ]. identifier[split] ( literal[string] ))== literal[int] :
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] + identifier[part] [ literal[int] ]+ literal[string] )
identifier[address] = identifier[unicode] ( identifier[IPy] . identifier[IP] ( identifier[part] [ literal[int] ]))
identifier[dictsql] ={
literal[string] :{
literal[string] : identifier[address] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : keyword[False]
},
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[address]
}
keyword[else] :
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] + identifier[part] [ literal[int] ]+ literal[string] )
identifier[dictsql] ={
literal[string] :{
literal[string] : identifier[part] [ literal[int] ],
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : keyword[False]
},
literal[string] : literal[string] ,
literal[string] :{
literal[string] : literal[string] ,
literal[string] :{
literal[string] : literal[string] ,
literal[string] :{
literal[string] : literal[string] ,
literal[string] :{
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[part] [ literal[int] ]
},
literal[string] :{
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[part] [ literal[int] ]
}
},
literal[string] :{
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[part] [ literal[int] ]
}
},
literal[string] :{
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[part] [ literal[int] ]
},
},
literal[string] :{
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[part] [ literal[int] ]
}
}
keyword[return] identifier[dictsql] | def _string_to_dictsql(self, part):
""" Do magic matching of single words or quoted string
"""
self._logger.debug('parsing string: ' + unicode(part[0]) + ' of type: ' + part.getName())
if part.getName() == 'tag':
self._logger.debug("Query part '" + part[0] + "' interpreted as tag")
dictsql = {'interpretation': {'string': part[0], 'interpretation': '(inherited) tag', 'attribute': 'tag', 'operator': 'equals_any', 'error': False}, 'operator': 'or', 'val1': {'operator': 'equals_any', 'val1': 'tags', 'val2': part[0][1:]}, 'val2': {'operator': 'equals_any', 'val1': 'inherited_tags', 'val2': part[0][1:]}} # depends on [control=['if'], data=[]]
elif part.getName() == 'vrf_rt':
self._logger.debug("Query part '" + part.vrf_rt + "' interpreted as VRF RT")
dictsql = {'interpretation': {'attribute': 'VRF RT', 'interpretation': 'vrf_rt', 'operator': 'equals', 'string': part.vrf_rt, 'error': False}, 'operator': 'equals', 'val1': 'vrf_rt', 'val2': part.vrf_rt} # depends on [control=['if'], data=[]]
elif part.getName() == 'ipv6_address':
self._logger.debug("Query part '" + part.ipv6_address + "' interpreted as IPv6 address")
dictsql = {'interpretation': {'string': part.ipv6_address, 'interpretation': 'IPv6 address', 'attribute': 'prefix', 'operator': 'contains_equals', 'error': False}, 'operator': 'contains_equals', 'val1': 'prefix', 'val2': part.ipv6_address} # depends on [control=['if'], data=[]]
elif part.getName() == 'ipv6_prefix':
self._logger.debug("Query part '" + part.ipv6_prefix[0] + "' interpreted as IPv6 prefix")
strict_prefix = unicode(IPy.IP(part.ipv6_prefix[0], make_net=True))
interp = {'string': part.ipv6_prefix[0], 'interpretation': 'IPv6 prefix', 'attribute': 'prefix', 'operator': 'contained_within_equals', 'error': False}
if part.ipv6_prefix[0] != strict_prefix:
interp['strict_prefix'] = strict_prefix # depends on [control=['if'], data=['strict_prefix']]
dictsql = {'interpretation': interp, 'operator': 'contained_within_equals', 'val1': 'prefix', 'val2': strict_prefix} # depends on [control=['if'], data=[]]
# since it's difficult to parse shortened IPv4 addresses (like 10/8)
# using pyparsing we do a bit of good ol parsing here
elif self._get_afi(part[0]) == 4 and len(part[0].split('/')) == 2:
self._logger.debug("Query part '" + part[0] + "' interpreted as prefix")
(address, prefix_length) = part[0].split('/')
# complete a prefix to it's fully expanded form
# 10/8 will be expanded into 10.0.0.0/8 which PostgreSQL can
# parse correctly
while len(address.split('.')) < 4:
address += '.0' # depends on [control=['while'], data=[]]
prefix = address + '/' + prefix_length
strict_prefix = unicode(IPy.IP(part[0], make_net=True))
interp = {'string': part[0], 'interpretation': 'IPv4 prefix', 'attribute': 'prefix', 'operator': 'contained_within_equals', 'error': False}
if prefix != part[0]:
interp['expanded'] = prefix # depends on [control=['if'], data=['prefix']]
if prefix != strict_prefix:
interp['strict_prefix'] = strict_prefix # depends on [control=['if'], data=['strict_prefix']]
dictsql = {'interpretation': interp, 'operator': 'contained_within_equals', 'val1': 'prefix', 'val2': strict_prefix} # depends on [control=['if'], data=[]]
# IPv4 address
# split on dot to make sure we have all four octets before we do a
# search
elif self._get_afi(part[0]) == 4 and len(part[0].split('.')) == 4:
self._logger.debug("Query part '" + part[0] + "' interpreted as prefix")
address = unicode(IPy.IP(part[0]))
dictsql = {'interpretation': {'string': address, 'interpretation': 'IPv4 address', 'attribute': 'prefix', 'operator': 'contains_equals', 'error': False}, 'operator': 'contains_equals', 'val1': 'prefix', 'val2': address} # depends on [control=['if'], data=[]]
else:
# Description or comment
self._logger.debug("Query part '" + part[0] + "' interpreted as text")
dictsql = {'interpretation': {'string': part[0], 'interpretation': 'text', 'attribute': 'description or comment or node or order_id or customer_id', 'operator': 'regex', 'error': False}, 'operator': 'or', 'val1': {'operator': 'or', 'val1': {'operator': 'or', 'val1': {'operator': 'or', 'val1': {'operator': 'regex_match', 'val1': 'comment', 'val2': part[0]}, 'val2': {'operator': 'regex_match', 'val1': 'description', 'val2': part[0]}}, 'val2': {'operator': 'regex_match', 'val1': 'node', 'val2': part[0]}}, 'val2': {'operator': 'regex_match', 'val1': 'order_id', 'val2': part[0]}}, 'val2': {'operator': 'regex_match', 'val1': 'customer_id', 'val2': part[0]}}
return dictsql |
def to_indexable(*args, **kwargs):
"""Ensure that all args are an indexable type.
Conversion runs lazily for dask objects, immediately otherwise.
Parameters
----------
args : array_like or scalar
allow_scalars : bool, optional
Whether to allow scalars in args. Default is False.
"""
if kwargs.get("allow_scalars", False):
indexable = _maybe_indexable
else:
indexable = _indexable
for x in args:
if x is None or isinstance(x, (da.Array, dd.DataFrame)):
yield x
elif is_dask_collection(x):
yield delayed(indexable, pure=True)(x)
else:
yield indexable(x) | def function[to_indexable, parameter[]]:
constant[Ensure that all args are an indexable type.
Conversion runs lazily for dask objects, immediately otherwise.
Parameters
----------
args : array_like or scalar
allow_scalars : bool, optional
Whether to allow scalars in args. Default is False.
]
if call[name[kwargs].get, parameter[constant[allow_scalars], constant[False]]] begin[:]
variable[indexable] assign[=] name[_maybe_indexable]
for taget[name[x]] in starred[name[args]] begin[:]
if <ast.BoolOp object at 0x7da1b1983fd0> begin[:]
<ast.Yield object at 0x7da1b19804f0> | keyword[def] identifier[to_indexable] (* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] ):
identifier[indexable] = identifier[_maybe_indexable]
keyword[else] :
identifier[indexable] = identifier[_indexable]
keyword[for] identifier[x] keyword[in] identifier[args] :
keyword[if] identifier[x] keyword[is] keyword[None] keyword[or] identifier[isinstance] ( identifier[x] ,( identifier[da] . identifier[Array] , identifier[dd] . identifier[DataFrame] )):
keyword[yield] identifier[x]
keyword[elif] identifier[is_dask_collection] ( identifier[x] ):
keyword[yield] identifier[delayed] ( identifier[indexable] , identifier[pure] = keyword[True] )( identifier[x] )
keyword[else] :
keyword[yield] identifier[indexable] ( identifier[x] ) | def to_indexable(*args, **kwargs):
"""Ensure that all args are an indexable type.
Conversion runs lazily for dask objects, immediately otherwise.
Parameters
----------
args : array_like or scalar
allow_scalars : bool, optional
Whether to allow scalars in args. Default is False.
"""
if kwargs.get('allow_scalars', False):
indexable = _maybe_indexable # depends on [control=['if'], data=[]]
else:
indexable = _indexable
for x in args:
if x is None or isinstance(x, (da.Array, dd.DataFrame)):
yield x # depends on [control=['if'], data=[]]
elif is_dask_collection(x):
yield delayed(indexable, pure=True)(x) # depends on [control=['if'], data=[]]
else:
yield indexable(x) # depends on [control=['for'], data=['x']] |
def perform_request_report(cmt_id, client_ip_address, uid=-1):
"""
Report a comment/review for inappropriate content.
Will send an email to the administrator if number of reports is a multiple
of CFG_WEBCOMMENT_NB_REPORTS_BEFORE_SEND_EMAIL_TO_ADMIN
:param cmt_id: comment id
:return: integer 1 if successful, integer 0 if not. -2 if comment does not exist
"""
cmt_id = wash_url_argument(cmt_id, 'int')
if cmt_id <= 0:
return 0
(query_res, nb_abuse_reports) = query_record_report_this(cmt_id)
if query_res == 0:
return 0
elif query_res == -2:
return -2
if not(check_user_can_report(cmt_id, client_ip_address, uid)):
return 0
action_date = convert_datestruct_to_datetext(time.localtime())
action_code = CFG_WEBCOMMENT_ACTION_CODE['REPORT_ABUSE']
query = """INSERT INTO "cmtACTIONHISTORY" ("id_cmtRECORDCOMMENT", id_bibrec,
id_user, client_host, action_time, action_code)
VALUES (%s, NULL, %s, inet_aton(%s), %s, %s)"""
params = (cmt_id, uid, client_ip_address, action_date, action_code)
run_sql(query, params)
if nb_abuse_reports % CFG_WEBCOMMENT_NB_REPORTS_BEFORE_SEND_EMAIL_TO_ADMIN == 0:
(cmt_id2,
id_bibrec,
id_user,
cmt_body,
cmt_date,
cmt_star,
cmt_vote, cmt_nb_votes_total,
cmt_title,
cmt_reported,
round_name,
restriction) = query_get_comment(cmt_id)
(user_nb_abuse_reports,
user_votes,
user_nb_votes_total) = query_get_user_reports_and_votes(int(id_user))
(nickname,
user_email,
last_login) = query_get_user_contact_info(id_user)
from_addr = '%s Alert Engine <%s>' % (
CFG_SITE_NAME, CFG_WEBALERT_ALERT_ENGINE_EMAIL)
comment_collection = get_comment_collection(cmt_id)
to_addrs = get_collection_moderators(comment_collection)
subject = "A comment has been reported as inappropriate by a user"
body = '''
The following comment has been reported a total of %(cmt_reported)s times.
Author: nickname = %(nickname)s
email = %(user_email)s
user_id = %(uid)s
This user has:
total number of reports = %(user_nb_abuse_reports)s
%(votes)s
Comment: comment_id = %(cmt_id)s
record_id = %(id_bibrec)s
date written = %(cmt_date)s
nb reports = %(cmt_reported)s
%(review_stuff)s
body =
---start body---
%(cmt_body)s
---end body---
Please go to the record page %(comment_admin_link)s to delete this message if necessary. A warning will be sent to the user in question.''' % \
{'cfg-report_max': CFG_WEBCOMMENT_NB_REPORTS_BEFORE_SEND_EMAIL_TO_ADMIN,
'nickname': nickname,
'user_email': user_email,
'uid': id_user,
'user_nb_abuse_reports': user_nb_abuse_reports,
'user_votes': user_votes,
'votes': CFG_WEBCOMMENT_ALLOW_REVIEWS and
"total number of positive votes\t= %s\n\t\ttotal number of negative votes\t= %s" %
(user_votes,
(user_nb_votes_total - user_votes)) or "\n",
'cmt_id': cmt_id,
'id_bibrec': id_bibrec,
'cmt_date': cmt_date,
'cmt_reported': cmt_reported,
'review_stuff': CFG_WEBCOMMENT_ALLOW_REVIEWS and
"star score\t= %s\n\treview title\t= %s" % (
cmt_star,
cmt_title) or "",
'cmt_body': cmt_body,
'comment_admin_link': CFG_SITE_URL + "/" + CFG_SITE_RECORD + "/" + str(id_bibrec) + '/comments#' + str(cmt_id),
'user_admin_link': "user_admin_link" # ! FIXME
}
# FIXME to be added to email when websession module is over:
# If you wish to ban the user, you can do so via the User Admin Panel
# %(user_admin_link)s.
send_email(from_addr, to_addrs, subject, body)
return 1 | def function[perform_request_report, parameter[cmt_id, client_ip_address, uid]]:
constant[
Report a comment/review for inappropriate content.
Will send an email to the administrator if number of reports is a multiple
of CFG_WEBCOMMENT_NB_REPORTS_BEFORE_SEND_EMAIL_TO_ADMIN
:param cmt_id: comment id
:return: integer 1 if successful, integer 0 if not. -2 if comment does not exist
]
variable[cmt_id] assign[=] call[name[wash_url_argument], parameter[name[cmt_id], constant[int]]]
if compare[name[cmt_id] less_or_equal[<=] constant[0]] begin[:]
return[constant[0]]
<ast.Tuple object at 0x7da207f02110> assign[=] call[name[query_record_report_this], parameter[name[cmt_id]]]
if compare[name[query_res] equal[==] constant[0]] begin[:]
return[constant[0]]
if <ast.UnaryOp object at 0x7da207f02680> begin[:]
return[constant[0]]
variable[action_date] assign[=] call[name[convert_datestruct_to_datetext], parameter[call[name[time].localtime, parameter[]]]]
variable[action_code] assign[=] call[name[CFG_WEBCOMMENT_ACTION_CODE]][constant[REPORT_ABUSE]]
variable[query] assign[=] constant[INSERT INTO "cmtACTIONHISTORY" ("id_cmtRECORDCOMMENT", id_bibrec,
id_user, client_host, action_time, action_code)
VALUES (%s, NULL, %s, inet_aton(%s), %s, %s)]
variable[params] assign[=] tuple[[<ast.Name object at 0x7da18f721900>, <ast.Name object at 0x7da18f723ca0>, <ast.Name object at 0x7da18f7239a0>, <ast.Name object at 0x7da18f720d00>, <ast.Name object at 0x7da18f720730>]]
call[name[run_sql], parameter[name[query], name[params]]]
if compare[binary_operation[name[nb_abuse_reports] <ast.Mod object at 0x7da2590d6920> name[CFG_WEBCOMMENT_NB_REPORTS_BEFORE_SEND_EMAIL_TO_ADMIN]] equal[==] constant[0]] begin[:]
<ast.Tuple object at 0x7da18f721ff0> assign[=] call[name[query_get_comment], parameter[name[cmt_id]]]
<ast.Tuple object at 0x7da18f7217e0> assign[=] call[name[query_get_user_reports_and_votes], parameter[call[name[int], parameter[name[id_user]]]]]
<ast.Tuple object at 0x7da18f722350> assign[=] call[name[query_get_user_contact_info], parameter[name[id_user]]]
variable[from_addr] assign[=] binary_operation[constant[%s Alert Engine <%s>] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f723fd0>, <ast.Name object at 0x7da18f723a60>]]]
variable[comment_collection] assign[=] call[name[get_comment_collection], parameter[name[cmt_id]]]
variable[to_addrs] assign[=] call[name[get_collection_moderators], parameter[name[comment_collection]]]
variable[subject] assign[=] constant[A comment has been reported as inappropriate by a user]
variable[body] assign[=] binary_operation[constant[
The following comment has been reported a total of %(cmt_reported)s times.
Author: nickname = %(nickname)s
email = %(user_email)s
user_id = %(uid)s
This user has:
total number of reports = %(user_nb_abuse_reports)s
%(votes)s
Comment: comment_id = %(cmt_id)s
record_id = %(id_bibrec)s
date written = %(cmt_date)s
nb reports = %(cmt_reported)s
%(review_stuff)s
body =
---start body---
%(cmt_body)s
---end body---
Please go to the record page %(comment_admin_link)s to delete this message if necessary. A warning will be sent to the user in question.] <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da18f722da0>, <ast.Constant object at 0x7da18f7228f0>, <ast.Constant object at 0x7da18f722bf0>, <ast.Constant object at 0x7da18f723a00>, <ast.Constant object at 0x7da18f721690>, <ast.Constant object at 0x7da18f720640>, <ast.Constant object at 0x7da18f721330>, <ast.Constant object at 0x7da18f7231c0>, <ast.Constant object at 0x7da18f721d80>, <ast.Constant object at 0x7da18f7233d0>, <ast.Constant object at 0x7da18f7220b0>, <ast.Constant object at 0x7da18f720610>, <ast.Constant object at 0x7da18f7237c0>, <ast.Constant object at 0x7da18f722140>, <ast.Constant object at 0x7da18f722bc0>], [<ast.Name object at 0x7da18f722590>, <ast.Name object at 0x7da18f721450>, <ast.Name object at 0x7da18f720e50>, <ast.Name object at 0x7da18f7223e0>, <ast.Name object at 0x7da18f7207f0>, <ast.Name object at 0x7da18f721930>, <ast.BoolOp object at 0x7da18f720280>, <ast.Name object at 0x7da207f03040>, <ast.Name object at 0x7da207f03fd0>, <ast.Name object at 0x7da207f003a0>, <ast.Name object at 0x7da207f021a0>, <ast.BoolOp object at 0x7da207f03070>, <ast.Name object at 0x7da207f001c0>, <ast.BinOp object at 0x7da207f01bd0>, <ast.Constant object at 0x7da207f015d0>]]]
call[name[send_email], parameter[name[from_addr], name[to_addrs], name[subject], name[body]]]
return[constant[1]] | keyword[def] identifier[perform_request_report] ( identifier[cmt_id] , identifier[client_ip_address] , identifier[uid] =- literal[int] ):
literal[string]
identifier[cmt_id] = identifier[wash_url_argument] ( identifier[cmt_id] , literal[string] )
keyword[if] identifier[cmt_id] <= literal[int] :
keyword[return] literal[int]
( identifier[query_res] , identifier[nb_abuse_reports] )= identifier[query_record_report_this] ( identifier[cmt_id] )
keyword[if] identifier[query_res] == literal[int] :
keyword[return] literal[int]
keyword[elif] identifier[query_res] ==- literal[int] :
keyword[return] - literal[int]
keyword[if] keyword[not] ( identifier[check_user_can_report] ( identifier[cmt_id] , identifier[client_ip_address] , identifier[uid] )):
keyword[return] literal[int]
identifier[action_date] = identifier[convert_datestruct_to_datetext] ( identifier[time] . identifier[localtime] ())
identifier[action_code] = identifier[CFG_WEBCOMMENT_ACTION_CODE] [ literal[string] ]
identifier[query] = literal[string]
identifier[params] =( identifier[cmt_id] , identifier[uid] , identifier[client_ip_address] , identifier[action_date] , identifier[action_code] )
identifier[run_sql] ( identifier[query] , identifier[params] )
keyword[if] identifier[nb_abuse_reports] % identifier[CFG_WEBCOMMENT_NB_REPORTS_BEFORE_SEND_EMAIL_TO_ADMIN] == literal[int] :
( identifier[cmt_id2] ,
identifier[id_bibrec] ,
identifier[id_user] ,
identifier[cmt_body] ,
identifier[cmt_date] ,
identifier[cmt_star] ,
identifier[cmt_vote] , identifier[cmt_nb_votes_total] ,
identifier[cmt_title] ,
identifier[cmt_reported] ,
identifier[round_name] ,
identifier[restriction] )= identifier[query_get_comment] ( identifier[cmt_id] )
( identifier[user_nb_abuse_reports] ,
identifier[user_votes] ,
identifier[user_nb_votes_total] )= identifier[query_get_user_reports_and_votes] ( identifier[int] ( identifier[id_user] ))
( identifier[nickname] ,
identifier[user_email] ,
identifier[last_login] )= identifier[query_get_user_contact_info] ( identifier[id_user] )
identifier[from_addr] = literal[string] %(
identifier[CFG_SITE_NAME] , identifier[CFG_WEBALERT_ALERT_ENGINE_EMAIL] )
identifier[comment_collection] = identifier[get_comment_collection] ( identifier[cmt_id] )
identifier[to_addrs] = identifier[get_collection_moderators] ( identifier[comment_collection] )
identifier[subject] = literal[string]
identifier[body] = literal[string] %{ literal[string] : identifier[CFG_WEBCOMMENT_NB_REPORTS_BEFORE_SEND_EMAIL_TO_ADMIN] ,
literal[string] : identifier[nickname] ,
literal[string] : identifier[user_email] ,
literal[string] : identifier[id_user] ,
literal[string] : identifier[user_nb_abuse_reports] ,
literal[string] : identifier[user_votes] ,
literal[string] : identifier[CFG_WEBCOMMENT_ALLOW_REVIEWS] keyword[and]
literal[string] %
( identifier[user_votes] ,
( identifier[user_nb_votes_total] - identifier[user_votes] )) keyword[or] literal[string] ,
literal[string] : identifier[cmt_id] ,
literal[string] : identifier[id_bibrec] ,
literal[string] : identifier[cmt_date] ,
literal[string] : identifier[cmt_reported] ,
literal[string] : identifier[CFG_WEBCOMMENT_ALLOW_REVIEWS] keyword[and]
literal[string] %(
identifier[cmt_star] ,
identifier[cmt_title] ) keyword[or] literal[string] ,
literal[string] : identifier[cmt_body] ,
literal[string] : identifier[CFG_SITE_URL] + literal[string] + identifier[CFG_SITE_RECORD] + literal[string] + identifier[str] ( identifier[id_bibrec] )+ literal[string] + identifier[str] ( identifier[cmt_id] ),
literal[string] : literal[string]
}
identifier[send_email] ( identifier[from_addr] , identifier[to_addrs] , identifier[subject] , identifier[body] )
keyword[return] literal[int] | def perform_request_report(cmt_id, client_ip_address, uid=-1):
"""
Report a comment/review for inappropriate content.
Will send an email to the administrator if number of reports is a multiple
of CFG_WEBCOMMENT_NB_REPORTS_BEFORE_SEND_EMAIL_TO_ADMIN
:param cmt_id: comment id
:return: integer 1 if successful, integer 0 if not. -2 if comment does not exist
"""
cmt_id = wash_url_argument(cmt_id, 'int')
if cmt_id <= 0:
return 0 # depends on [control=['if'], data=[]]
(query_res, nb_abuse_reports) = query_record_report_this(cmt_id)
if query_res == 0:
return 0 # depends on [control=['if'], data=[]]
elif query_res == -2:
return -2 # depends on [control=['if'], data=[]]
if not check_user_can_report(cmt_id, client_ip_address, uid):
return 0 # depends on [control=['if'], data=[]]
action_date = convert_datestruct_to_datetext(time.localtime())
action_code = CFG_WEBCOMMENT_ACTION_CODE['REPORT_ABUSE']
query = 'INSERT INTO "cmtACTIONHISTORY" ("id_cmtRECORDCOMMENT", id_bibrec,\n id_user, client_host, action_time, action_code)\n VALUES (%s, NULL, %s, inet_aton(%s), %s, %s)'
params = (cmt_id, uid, client_ip_address, action_date, action_code)
run_sql(query, params)
if nb_abuse_reports % CFG_WEBCOMMENT_NB_REPORTS_BEFORE_SEND_EMAIL_TO_ADMIN == 0:
(cmt_id2, id_bibrec, id_user, cmt_body, cmt_date, cmt_star, cmt_vote, cmt_nb_votes_total, cmt_title, cmt_reported, round_name, restriction) = query_get_comment(cmt_id)
(user_nb_abuse_reports, user_votes, user_nb_votes_total) = query_get_user_reports_and_votes(int(id_user))
(nickname, user_email, last_login) = query_get_user_contact_info(id_user)
from_addr = '%s Alert Engine <%s>' % (CFG_SITE_NAME, CFG_WEBALERT_ALERT_ENGINE_EMAIL)
comment_collection = get_comment_collection(cmt_id)
to_addrs = get_collection_moderators(comment_collection)
subject = 'A comment has been reported as inappropriate by a user' # ! FIXME
body = '\nThe following comment has been reported a total of %(cmt_reported)s times.\n\nAuthor: nickname = %(nickname)s\n email = %(user_email)s\n user_id = %(uid)s\n This user has:\n total number of reports = %(user_nb_abuse_reports)s\n %(votes)s\nComment: comment_id = %(cmt_id)s\n record_id = %(id_bibrec)s\n date written = %(cmt_date)s\n nb reports = %(cmt_reported)s\n %(review_stuff)s\n body =\n---start body---\n%(cmt_body)s\n---end body---\n\nPlease go to the record page %(comment_admin_link)s to delete this message if necessary. A warning will be sent to the user in question.' % {'cfg-report_max': CFG_WEBCOMMENT_NB_REPORTS_BEFORE_SEND_EMAIL_TO_ADMIN, 'nickname': nickname, 'user_email': user_email, 'uid': id_user, 'user_nb_abuse_reports': user_nb_abuse_reports, 'user_votes': user_votes, 'votes': CFG_WEBCOMMENT_ALLOW_REVIEWS and 'total number of positive votes\t= %s\n\t\ttotal number of negative votes\t= %s' % (user_votes, user_nb_votes_total - user_votes) or '\n', 'cmt_id': cmt_id, 'id_bibrec': id_bibrec, 'cmt_date': cmt_date, 'cmt_reported': cmt_reported, 'review_stuff': CFG_WEBCOMMENT_ALLOW_REVIEWS and 'star score\t= %s\n\treview title\t= %s' % (cmt_star, cmt_title) or '', 'cmt_body': cmt_body, 'comment_admin_link': CFG_SITE_URL + '/' + CFG_SITE_RECORD + '/' + str(id_bibrec) + '/comments#' + str(cmt_id), 'user_admin_link': 'user_admin_link'}
# FIXME to be added to email when websession module is over:
# If you wish to ban the user, you can do so via the User Admin Panel
# %(user_admin_link)s.
send_email(from_addr, to_addrs, subject, body) # depends on [control=['if'], data=[]]
return 1 |
def get_value(self, spec, row):
""" Returns the value or a dict with a 'value' entry plus extra fields. """
column = spec.get('column')
default = spec.get('default')
if column is None:
if default is not None:
return self.convert_type(default, spec)
return
value = row.get(column)
if is_empty(value):
if default is not None:
return self.convert_type(default, spec)
return None
return self.convert_type(value, spec) | def function[get_value, parameter[self, spec, row]]:
constant[ Returns the value or a dict with a 'value' entry plus extra fields. ]
variable[column] assign[=] call[name[spec].get, parameter[constant[column]]]
variable[default] assign[=] call[name[spec].get, parameter[constant[default]]]
if compare[name[column] is constant[None]] begin[:]
if compare[name[default] is_not constant[None]] begin[:]
return[call[name[self].convert_type, parameter[name[default], name[spec]]]]
return[None]
variable[value] assign[=] call[name[row].get, parameter[name[column]]]
if call[name[is_empty], parameter[name[value]]] begin[:]
if compare[name[default] is_not constant[None]] begin[:]
return[call[name[self].convert_type, parameter[name[default], name[spec]]]]
return[constant[None]]
return[call[name[self].convert_type, parameter[name[value], name[spec]]]] | keyword[def] identifier[get_value] ( identifier[self] , identifier[spec] , identifier[row] ):
literal[string]
identifier[column] = identifier[spec] . identifier[get] ( literal[string] )
identifier[default] = identifier[spec] . identifier[get] ( literal[string] )
keyword[if] identifier[column] keyword[is] keyword[None] :
keyword[if] identifier[default] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[convert_type] ( identifier[default] , identifier[spec] )
keyword[return]
identifier[value] = identifier[row] . identifier[get] ( identifier[column] )
keyword[if] identifier[is_empty] ( identifier[value] ):
keyword[if] identifier[default] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[convert_type] ( identifier[default] , identifier[spec] )
keyword[return] keyword[None]
keyword[return] identifier[self] . identifier[convert_type] ( identifier[value] , identifier[spec] ) | def get_value(self, spec, row):
""" Returns the value or a dict with a 'value' entry plus extra fields. """
column = spec.get('column')
default = spec.get('default')
if column is None:
if default is not None:
return self.convert_type(default, spec) # depends on [control=['if'], data=['default']]
return # depends on [control=['if'], data=[]]
value = row.get(column)
if is_empty(value):
if default is not None:
return self.convert_type(default, spec) # depends on [control=['if'], data=['default']]
return None # depends on [control=['if'], data=[]]
return self.convert_type(value, spec) |
def template(filename, **locals):
''' Returns '''
path = 'templates/{}'.format(filename)
with open(path) as source:
code = compile(source.read(), path, 'exec')
global_vars = {'t': t,
'f': f,
'e': e,
'escape': html.escape,
'ctime': time.ctime}
exec(code, global_vars, locals)
return locals['page'] | def function[template, parameter[filename]]:
constant[ Returns ]
variable[path] assign[=] call[constant[templates/{}].format, parameter[name[filename]]]
with call[name[open], parameter[name[path]]] begin[:]
variable[code] assign[=] call[name[compile], parameter[call[name[source].read, parameter[]], name[path], constant[exec]]]
variable[global_vars] assign[=] dictionary[[<ast.Constant object at 0x7da1b26a7bb0>, <ast.Constant object at 0x7da1b26a69b0>, <ast.Constant object at 0x7da1b26a58d0>, <ast.Constant object at 0x7da1b26a7520>, <ast.Constant object at 0x7da1b26a6d10>], [<ast.Name object at 0x7da1b26a60e0>, <ast.Name object at 0x7da1b26a7eb0>, <ast.Name object at 0x7da1b26a4fd0>, <ast.Attribute object at 0x7da1b26a72e0>, <ast.Attribute object at 0x7da1b26a6d40>]]
call[name[exec], parameter[name[code], name[global_vars], name[locals]]]
return[call[name[locals]][constant[page]]] | keyword[def] identifier[template] ( identifier[filename] ,** identifier[locals] ):
literal[string]
identifier[path] = literal[string] . identifier[format] ( identifier[filename] )
keyword[with] identifier[open] ( identifier[path] ) keyword[as] identifier[source] :
identifier[code] = identifier[compile] ( identifier[source] . identifier[read] (), identifier[path] , literal[string] )
identifier[global_vars] ={ literal[string] : identifier[t] ,
literal[string] : identifier[f] ,
literal[string] : identifier[e] ,
literal[string] : identifier[html] . identifier[escape] ,
literal[string] : identifier[time] . identifier[ctime] }
identifier[exec] ( identifier[code] , identifier[global_vars] , identifier[locals] )
keyword[return] identifier[locals] [ literal[string] ] | def template(filename, **locals):
""" Returns """
path = 'templates/{}'.format(filename)
with open(path) as source:
code = compile(source.read(), path, 'exec')
global_vars = {'t': t, 'f': f, 'e': e, 'escape': html.escape, 'ctime': time.ctime}
exec(code, global_vars, locals)
return locals['page'] # depends on [control=['with'], data=['source']] |
def re_execute_with_exception(frame, exception, traceback):
"""
Dark magic. Causes ``frame`` to raise an exception at the current location
with ``traceback`` appended to it.
Note that since the line tracer is raising an exception, the interpreter
disables the global trace, so it's not possible to restore the previous
tracing conditions.
"""
if sys.gettrace() == globaltrace:
# If our trace handler is already installed, that means that this
# function has been called twice before the line tracer had a chance to
# run. That can happen if more than one exception was logged.
return
call_lineno = frame.f_lineno
def intercept_next_line(f, why, *args):
if f is not frame:
return
set_linetrace_on_frame(f)
# Undo modifications to the callers code (ick ick ick)
back_like_nothing_happened()
# Raise exception in (almost) the perfect place (except for duplication)
if sys.version_info[0] < 3:
#raise exception.__class__, exception, traceback
raise exception
raise exception.with_traceback(traceback)
set_linetrace_on_frame(frame, intercept_next_line)
linestarts = list(dis.findlinestarts(frame.f_code))
linestarts = [a for a, l in linestarts if l >= call_lineno]
# Jump target
dest = linestarts[0]
oc = frame.f_code.co_code[frame.f_lasti]
if sys.version_info[0] < 3:
oc = ord(oc)
opcode_size = 2 if oc >= opcode.HAVE_ARGUMENT else 0
# Opcode to overwrite
where = frame.f_lasti + 1 + opcode_size
#dis.disco(frame.f_code)
pc = PyCodeObject.from_address(id(frame.f_code))
back_like_nothing_happened = pc.co_code.contents.inject_jump(where, dest)
#print("#"*100)
#dis.disco(frame.f_code)
sys.settrace(globaltrace) | def function[re_execute_with_exception, parameter[frame, exception, traceback]]:
constant[
Dark magic. Causes ``frame`` to raise an exception at the current location
with ``traceback`` appended to it.
Note that since the line tracer is raising an exception, the interpreter
disables the global trace, so it's not possible to restore the previous
tracing conditions.
]
if compare[call[name[sys].gettrace, parameter[]] equal[==] name[globaltrace]] begin[:]
return[None]
variable[call_lineno] assign[=] name[frame].f_lineno
def function[intercept_next_line, parameter[f, why]]:
if compare[name[f] is_not name[frame]] begin[:]
return[None]
call[name[set_linetrace_on_frame], parameter[name[f]]]
call[name[back_like_nothing_happened], parameter[]]
if compare[call[name[sys].version_info][constant[0]] less[<] constant[3]] begin[:]
<ast.Raise object at 0x7da1b1174070>
<ast.Raise object at 0x7da1b1175360>
call[name[set_linetrace_on_frame], parameter[name[frame], name[intercept_next_line]]]
variable[linestarts] assign[=] call[name[list], parameter[call[name[dis].findlinestarts, parameter[name[frame].f_code]]]]
variable[linestarts] assign[=] <ast.ListComp object at 0x7da1b1119ba0>
variable[dest] assign[=] call[name[linestarts]][constant[0]]
variable[oc] assign[=] call[name[frame].f_code.co_code][name[frame].f_lasti]
if compare[call[name[sys].version_info][constant[0]] less[<] constant[3]] begin[:]
variable[oc] assign[=] call[name[ord], parameter[name[oc]]]
variable[opcode_size] assign[=] <ast.IfExp object at 0x7da1b111aec0>
variable[where] assign[=] binary_operation[binary_operation[name[frame].f_lasti + constant[1]] + name[opcode_size]]
variable[pc] assign[=] call[name[PyCodeObject].from_address, parameter[call[name[id], parameter[name[frame].f_code]]]]
variable[back_like_nothing_happened] assign[=] call[name[pc].co_code.contents.inject_jump, parameter[name[where], name[dest]]]
call[name[sys].settrace, parameter[name[globaltrace]]] | keyword[def] identifier[re_execute_with_exception] ( identifier[frame] , identifier[exception] , identifier[traceback] ):
literal[string]
keyword[if] identifier[sys] . identifier[gettrace] ()== identifier[globaltrace] :
keyword[return]
identifier[call_lineno] = identifier[frame] . identifier[f_lineno]
keyword[def] identifier[intercept_next_line] ( identifier[f] , identifier[why] ,* identifier[args] ):
keyword[if] identifier[f] keyword[is] keyword[not] identifier[frame] :
keyword[return]
identifier[set_linetrace_on_frame] ( identifier[f] )
identifier[back_like_nothing_happened] ()
keyword[if] identifier[sys] . identifier[version_info] [ literal[int] ]< literal[int] :
keyword[raise] identifier[exception]
keyword[raise] identifier[exception] . identifier[with_traceback] ( identifier[traceback] )
identifier[set_linetrace_on_frame] ( identifier[frame] , identifier[intercept_next_line] )
identifier[linestarts] = identifier[list] ( identifier[dis] . identifier[findlinestarts] ( identifier[frame] . identifier[f_code] ))
identifier[linestarts] =[ identifier[a] keyword[for] identifier[a] , identifier[l] keyword[in] identifier[linestarts] keyword[if] identifier[l] >= identifier[call_lineno] ]
identifier[dest] = identifier[linestarts] [ literal[int] ]
identifier[oc] = identifier[frame] . identifier[f_code] . identifier[co_code] [ identifier[frame] . identifier[f_lasti] ]
keyword[if] identifier[sys] . identifier[version_info] [ literal[int] ]< literal[int] :
identifier[oc] = identifier[ord] ( identifier[oc] )
identifier[opcode_size] = literal[int] keyword[if] identifier[oc] >= identifier[opcode] . identifier[HAVE_ARGUMENT] keyword[else] literal[int]
identifier[where] = identifier[frame] . identifier[f_lasti] + literal[int] + identifier[opcode_size]
identifier[pc] = identifier[PyCodeObject] . identifier[from_address] ( identifier[id] ( identifier[frame] . identifier[f_code] ))
identifier[back_like_nothing_happened] = identifier[pc] . identifier[co_code] . identifier[contents] . identifier[inject_jump] ( identifier[where] , identifier[dest] )
identifier[sys] . identifier[settrace] ( identifier[globaltrace] ) | def re_execute_with_exception(frame, exception, traceback):
"""
Dark magic. Causes ``frame`` to raise an exception at the current location
with ``traceback`` appended to it.
Note that since the line tracer is raising an exception, the interpreter
disables the global trace, so it's not possible to restore the previous
tracing conditions.
"""
if sys.gettrace() == globaltrace:
# If our trace handler is already installed, that means that this
# function has been called twice before the line tracer had a chance to
# run. That can happen if more than one exception was logged.
return # depends on [control=['if'], data=[]]
call_lineno = frame.f_lineno
def intercept_next_line(f, why, *args):
if f is not frame:
return # depends on [control=['if'], data=[]]
set_linetrace_on_frame(f)
# Undo modifications to the callers code (ick ick ick)
back_like_nothing_happened()
# Raise exception in (almost) the perfect place (except for duplication)
if sys.version_info[0] < 3:
#raise exception.__class__, exception, traceback
raise exception # depends on [control=['if'], data=[]]
raise exception.with_traceback(traceback)
set_linetrace_on_frame(frame, intercept_next_line)
linestarts = list(dis.findlinestarts(frame.f_code))
linestarts = [a for (a, l) in linestarts if l >= call_lineno]
# Jump target
dest = linestarts[0]
oc = frame.f_code.co_code[frame.f_lasti]
if sys.version_info[0] < 3:
oc = ord(oc) # depends on [control=['if'], data=[]]
opcode_size = 2 if oc >= opcode.HAVE_ARGUMENT else 0
# Opcode to overwrite
where = frame.f_lasti + 1 + opcode_size
#dis.disco(frame.f_code)
pc = PyCodeObject.from_address(id(frame.f_code))
back_like_nothing_happened = pc.co_code.contents.inject_jump(where, dest)
#print("#"*100)
#dis.disco(frame.f_code)
sys.settrace(globaltrace) |
def init_app(self, app):
"""
Initialize the application once the configuration has been loaded
there.
"""
self.app = app
self.log = app.logger.getChild('compass')
self.log.debug("Initializing compass integration")
self.compass_path = self.app.config.get('COMPASS_PATH', 'compass')
self.config_files = self.app.config.get('COMPASS_CONFIGS', None)
self.requestcheck_debug_only = self.app.config.get(
'COMPASS_REQUESTCHECK_DEBUG_ONLY', True)
self.skip_mtime_check = self.app.config.get(
'COMPASS_SKIP_MTIME_CHECK', False)
self.debug_only = self.app.config.get(
'COMPASS_DEBUG_ONLY', False)
self.disabled = self.app.config.get('COMPASS_DISABLED', False)
if not self.debug_only:
self.compile()
if (not self.debug_only) \
and (not self.requestcheck_debug_only or self.app.debug):
self.app.after_request(self.after_request) | def function[init_app, parameter[self, app]]:
constant[
Initialize the application once the configuration has been loaded
there.
]
name[self].app assign[=] name[app]
name[self].log assign[=] call[name[app].logger.getChild, parameter[constant[compass]]]
call[name[self].log.debug, parameter[constant[Initializing compass integration]]]
name[self].compass_path assign[=] call[name[self].app.config.get, parameter[constant[COMPASS_PATH], constant[compass]]]
name[self].config_files assign[=] call[name[self].app.config.get, parameter[constant[COMPASS_CONFIGS], constant[None]]]
name[self].requestcheck_debug_only assign[=] call[name[self].app.config.get, parameter[constant[COMPASS_REQUESTCHECK_DEBUG_ONLY], constant[True]]]
name[self].skip_mtime_check assign[=] call[name[self].app.config.get, parameter[constant[COMPASS_SKIP_MTIME_CHECK], constant[False]]]
name[self].debug_only assign[=] call[name[self].app.config.get, parameter[constant[COMPASS_DEBUG_ONLY], constant[False]]]
name[self].disabled assign[=] call[name[self].app.config.get, parameter[constant[COMPASS_DISABLED], constant[False]]]
if <ast.UnaryOp object at 0x7da18f721c90> begin[:]
call[name[self].compile, parameter[]]
if <ast.BoolOp object at 0x7da20c6c5720> begin[:]
call[name[self].app.after_request, parameter[name[self].after_request]] | keyword[def] identifier[init_app] ( identifier[self] , identifier[app] ):
literal[string]
identifier[self] . identifier[app] = identifier[app]
identifier[self] . identifier[log] = identifier[app] . identifier[logger] . identifier[getChild] ( literal[string] )
identifier[self] . identifier[log] . identifier[debug] ( literal[string] )
identifier[self] . identifier[compass_path] = identifier[self] . identifier[app] . identifier[config] . identifier[get] ( literal[string] , literal[string] )
identifier[self] . identifier[config_files] = identifier[self] . identifier[app] . identifier[config] . identifier[get] ( literal[string] , keyword[None] )
identifier[self] . identifier[requestcheck_debug_only] = identifier[self] . identifier[app] . identifier[config] . identifier[get] (
literal[string] , keyword[True] )
identifier[self] . identifier[skip_mtime_check] = identifier[self] . identifier[app] . identifier[config] . identifier[get] (
literal[string] , keyword[False] )
identifier[self] . identifier[debug_only] = identifier[self] . identifier[app] . identifier[config] . identifier[get] (
literal[string] , keyword[False] )
identifier[self] . identifier[disabled] = identifier[self] . identifier[app] . identifier[config] . identifier[get] ( literal[string] , keyword[False] )
keyword[if] keyword[not] identifier[self] . identifier[debug_only] :
identifier[self] . identifier[compile] ()
keyword[if] ( keyword[not] identifier[self] . identifier[debug_only] ) keyword[and] ( keyword[not] identifier[self] . identifier[requestcheck_debug_only] keyword[or] identifier[self] . identifier[app] . identifier[debug] ):
identifier[self] . identifier[app] . identifier[after_request] ( identifier[self] . identifier[after_request] ) | def init_app(self, app):
"""
Initialize the application once the configuration has been loaded
there.
"""
self.app = app
self.log = app.logger.getChild('compass')
self.log.debug('Initializing compass integration')
self.compass_path = self.app.config.get('COMPASS_PATH', 'compass')
self.config_files = self.app.config.get('COMPASS_CONFIGS', None)
self.requestcheck_debug_only = self.app.config.get('COMPASS_REQUESTCHECK_DEBUG_ONLY', True)
self.skip_mtime_check = self.app.config.get('COMPASS_SKIP_MTIME_CHECK', False)
self.debug_only = self.app.config.get('COMPASS_DEBUG_ONLY', False)
self.disabled = self.app.config.get('COMPASS_DISABLED', False)
if not self.debug_only:
self.compile() # depends on [control=['if'], data=[]]
if not self.debug_only and (not self.requestcheck_debug_only or self.app.debug):
self.app.after_request(self.after_request) # depends on [control=['if'], data=[]] |
def libvlc_audio_equalizer_get_preset_name(u_index):
'''Get the name of a particular equalizer preset.
This name can be used, for example, to prepare a preset label or menu in a user
interface.
@param u_index: index of the preset, counting from zero.
@return: preset name, or NULL if there is no such preset.
@version: LibVLC 2.2.0 or later.
'''
f = _Cfunctions.get('libvlc_audio_equalizer_get_preset_name', None) or \
_Cfunction('libvlc_audio_equalizer_get_preset_name', ((1,),), None,
ctypes.c_char_p, ctypes.c_uint)
return f(u_index) | def function[libvlc_audio_equalizer_get_preset_name, parameter[u_index]]:
constant[Get the name of a particular equalizer preset.
This name can be used, for example, to prepare a preset label or menu in a user
interface.
@param u_index: index of the preset, counting from zero.
@return: preset name, or NULL if there is no such preset.
@version: LibVLC 2.2.0 or later.
]
variable[f] assign[=] <ast.BoolOp object at 0x7da1b2346c20>
return[call[name[f], parameter[name[u_index]]]] | keyword[def] identifier[libvlc_audio_equalizer_get_preset_name] ( identifier[u_index] ):
literal[string]
identifier[f] = identifier[_Cfunctions] . identifier[get] ( literal[string] , keyword[None] ) keyword[or] identifier[_Cfunction] ( literal[string] ,(( literal[int] ,),), keyword[None] ,
identifier[ctypes] . identifier[c_char_p] , identifier[ctypes] . identifier[c_uint] )
keyword[return] identifier[f] ( identifier[u_index] ) | def libvlc_audio_equalizer_get_preset_name(u_index):
"""Get the name of a particular equalizer preset.
This name can be used, for example, to prepare a preset label or menu in a user
interface.
@param u_index: index of the preset, counting from zero.
@return: preset name, or NULL if there is no such preset.
@version: LibVLC 2.2.0 or later.
"""
f = _Cfunctions.get('libvlc_audio_equalizer_get_preset_name', None) or _Cfunction('libvlc_audio_equalizer_get_preset_name', ((1,),), None, ctypes.c_char_p, ctypes.c_uint)
return f(u_index) |
def listen(self, timeout=10):
"""
Listen for incoming messages. Timeout is used to check if the server must be switched off.
:param timeout: Socket Timeout in seconds
"""
self._socket.settimeout(float(timeout))
while not self.stopped.isSet():
try:
data, client_address = self._socket.recvfrom(4096)
except socket.timeout:
continue
try:
#Start a new thread not to block other requests
args = ((data, client_address), )
t = threading.Thread(target=self.receive_datagram, args=args)
t.daemon = True
t.start()
except RuntimeError:
logging.exception("Exception with Executor")
logging.debug("closing socket")
self._socket.close() | def function[listen, parameter[self, timeout]]:
constant[
Listen for incoming messages. Timeout is used to check if the server must be switched off.
:param timeout: Socket Timeout in seconds
]
call[name[self]._socket.settimeout, parameter[call[name[float], parameter[name[timeout]]]]]
while <ast.UnaryOp object at 0x7da204623f70> begin[:]
<ast.Try object at 0x7da1b23459c0>
<ast.Try object at 0x7da1b23477f0>
call[name[logging].debug, parameter[constant[closing socket]]]
call[name[self]._socket.close, parameter[]] | keyword[def] identifier[listen] ( identifier[self] , identifier[timeout] = literal[int] ):
literal[string]
identifier[self] . identifier[_socket] . identifier[settimeout] ( identifier[float] ( identifier[timeout] ))
keyword[while] keyword[not] identifier[self] . identifier[stopped] . identifier[isSet] ():
keyword[try] :
identifier[data] , identifier[client_address] = identifier[self] . identifier[_socket] . identifier[recvfrom] ( literal[int] )
keyword[except] identifier[socket] . identifier[timeout] :
keyword[continue]
keyword[try] :
identifier[args] =(( identifier[data] , identifier[client_address] ),)
identifier[t] = identifier[threading] . identifier[Thread] ( identifier[target] = identifier[self] . identifier[receive_datagram] , identifier[args] = identifier[args] )
identifier[t] . identifier[daemon] = keyword[True]
identifier[t] . identifier[start] ()
keyword[except] identifier[RuntimeError] :
identifier[logging] . identifier[exception] ( literal[string] )
identifier[logging] . identifier[debug] ( literal[string] )
identifier[self] . identifier[_socket] . identifier[close] () | def listen(self, timeout=10):
"""
Listen for incoming messages. Timeout is used to check if the server must be switched off.
:param timeout: Socket Timeout in seconds
"""
self._socket.settimeout(float(timeout))
while not self.stopped.isSet():
try:
(data, client_address) = self._socket.recvfrom(4096) # depends on [control=['try'], data=[]]
except socket.timeout:
continue # depends on [control=['except'], data=[]]
try:
#Start a new thread not to block other requests
args = ((data, client_address),)
t = threading.Thread(target=self.receive_datagram, args=args)
t.daemon = True
t.start() # depends on [control=['try'], data=[]]
except RuntimeError:
logging.exception('Exception with Executor') # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]]
logging.debug('closing socket')
self._socket.close() |
def gene_name(st, exclude=("ev",), sep="."):
"""
Helper functions in the BLAST filtering to get rid alternative splicings.
This is ugly, but different annotation groups are inconsistent with respect
to how the alternative splicings are named. Mostly it can be done by removing
the suffix, except for ones in the exclude list.
"""
if any(st.startswith(x) for x in exclude):
sep = None
st = st.split('|')[0]
if sep and sep in st:
name, suffix = st.rsplit(sep, 1)
else:
name, suffix = st, ""
# We only want to remove suffix that are isoforms, longer suffix would
# suggest that it is part of the right gene name
if len(suffix) != 1:
name = st
return name | def function[gene_name, parameter[st, exclude, sep]]:
constant[
Helper functions in the BLAST filtering to get rid alternative splicings.
This is ugly, but different annotation groups are inconsistent with respect
to how the alternative splicings are named. Mostly it can be done by removing
the suffix, except for ones in the exclude list.
]
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da18dc9ad70>]] begin[:]
variable[sep] assign[=] constant[None]
variable[st] assign[=] call[call[name[st].split, parameter[constant[|]]]][constant[0]]
if <ast.BoolOp object at 0x7da18dc991b0> begin[:]
<ast.Tuple object at 0x7da18dc99300> assign[=] call[name[st].rsplit, parameter[name[sep], constant[1]]]
if compare[call[name[len], parameter[name[suffix]]] not_equal[!=] constant[1]] begin[:]
variable[name] assign[=] name[st]
return[name[name]] | keyword[def] identifier[gene_name] ( identifier[st] , identifier[exclude] =( literal[string] ,), identifier[sep] = literal[string] ):
literal[string]
keyword[if] identifier[any] ( identifier[st] . identifier[startswith] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[exclude] ):
identifier[sep] = keyword[None]
identifier[st] = identifier[st] . identifier[split] ( literal[string] )[ literal[int] ]
keyword[if] identifier[sep] keyword[and] identifier[sep] keyword[in] identifier[st] :
identifier[name] , identifier[suffix] = identifier[st] . identifier[rsplit] ( identifier[sep] , literal[int] )
keyword[else] :
identifier[name] , identifier[suffix] = identifier[st] , literal[string]
keyword[if] identifier[len] ( identifier[suffix] )!= literal[int] :
identifier[name] = identifier[st]
keyword[return] identifier[name] | def gene_name(st, exclude=('ev',), sep='.'):
"""
Helper functions in the BLAST filtering to get rid alternative splicings.
This is ugly, but different annotation groups are inconsistent with respect
to how the alternative splicings are named. Mostly it can be done by removing
the suffix, except for ones in the exclude list.
"""
if any((st.startswith(x) for x in exclude)):
sep = None # depends on [control=['if'], data=[]]
st = st.split('|')[0]
if sep and sep in st:
(name, suffix) = st.rsplit(sep, 1) # depends on [control=['if'], data=[]]
else:
(name, suffix) = (st, '')
# We only want to remove suffix that are isoforms, longer suffix would
# suggest that it is part of the right gene name
if len(suffix) != 1:
name = st # depends on [control=['if'], data=[]]
return name |
def parse_segdict_key(self, key):
"""
Return ifo and name from the segdict key.
"""
splt = key.split(':')
if len(splt) == 2:
return splt[0], splt[1]
else:
err_msg = "Key should be of the format 'ifo:name', got %s." %(key,)
raise ValueError(err_msg) | def function[parse_segdict_key, parameter[self, key]]:
constant[
Return ifo and name from the segdict key.
]
variable[splt] assign[=] call[name[key].split, parameter[constant[:]]]
if compare[call[name[len], parameter[name[splt]]] equal[==] constant[2]] begin[:]
return[tuple[[<ast.Subscript object at 0x7da2044c2b90>, <ast.Subscript object at 0x7da2044c3430>]]] | keyword[def] identifier[parse_segdict_key] ( identifier[self] , identifier[key] ):
literal[string]
identifier[splt] = identifier[key] . identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[splt] )== literal[int] :
keyword[return] identifier[splt] [ literal[int] ], identifier[splt] [ literal[int] ]
keyword[else] :
identifier[err_msg] = literal[string] %( identifier[key] ,)
keyword[raise] identifier[ValueError] ( identifier[err_msg] ) | def parse_segdict_key(self, key):
"""
Return ifo and name from the segdict key.
"""
splt = key.split(':')
if len(splt) == 2:
return (splt[0], splt[1]) # depends on [control=['if'], data=[]]
else:
err_msg = "Key should be of the format 'ifo:name', got %s." % (key,)
raise ValueError(err_msg) |
def status(self) -> str:
"""Return the workflow stage status."""
# As status is a modifiable property, have to reload from the db.
self._config = self._load_config()
return self._config.get('status') | def function[status, parameter[self]]:
constant[Return the workflow stage status.]
name[self]._config assign[=] call[name[self]._load_config, parameter[]]
return[call[name[self]._config.get, parameter[constant[status]]]] | keyword[def] identifier[status] ( identifier[self] )-> identifier[str] :
literal[string]
identifier[self] . identifier[_config] = identifier[self] . identifier[_load_config] ()
keyword[return] identifier[self] . identifier[_config] . identifier[get] ( literal[string] ) | def status(self) -> str:
"""Return the workflow stage status."""
# As status is a modifiable property, have to reload from the db.
self._config = self._load_config()
return self._config.get('status') |
def update_status(self, header, message):
"""Process incoming status message. Acquire lock for status dictionary before updating."""
with self._lock:
if self.message_box:
self.message_box.erase()
self.message_box.move(0, 0)
for n, field in enumerate(header):
if n == 0:
self.message_box.addstr(field + ":", curses.color_pair(1))
else:
self.message_box.addstr(
", " + field + ":", curses.color_pair(1)
)
self.message_box.addstr(header[field])
self.message_box.addstr(": ", curses.color_pair(1))
self.message_box.addstr(
str(message), curses.color_pair(2) + curses.A_BOLD
)
self.message_box.refresh()
if (
message["host"] not in self._node_status
or int(header["timestamp"])
>= self._node_status[message["host"]]["last_seen"]
):
self._node_status[message["host"]] = message
self._node_status[message["host"]]["last_seen"] = int(
header["timestamp"]
) | def function[update_status, parameter[self, header, message]]:
constant[Process incoming status message. Acquire lock for status dictionary before updating.]
with name[self]._lock begin[:]
if name[self].message_box begin[:]
call[name[self].message_box.erase, parameter[]]
call[name[self].message_box.move, parameter[constant[0], constant[0]]]
for taget[tuple[[<ast.Name object at 0x7da18c4cc2e0>, <ast.Name object at 0x7da18c4cf580>]]] in starred[call[name[enumerate], parameter[name[header]]]] begin[:]
if compare[name[n] equal[==] constant[0]] begin[:]
call[name[self].message_box.addstr, parameter[binary_operation[name[field] + constant[:]], call[name[curses].color_pair, parameter[constant[1]]]]]
call[name[self].message_box.addstr, parameter[call[name[header]][name[field]]]]
call[name[self].message_box.addstr, parameter[constant[: ], call[name[curses].color_pair, parameter[constant[1]]]]]
call[name[self].message_box.addstr, parameter[call[name[str], parameter[name[message]]], binary_operation[call[name[curses].color_pair, parameter[constant[2]]] + name[curses].A_BOLD]]]
call[name[self].message_box.refresh, parameter[]]
if <ast.BoolOp object at 0x7da18c4cd450> begin[:]
call[name[self]._node_status][call[name[message]][constant[host]]] assign[=] name[message]
call[call[name[self]._node_status][call[name[message]][constant[host]]]][constant[last_seen]] assign[=] call[name[int], parameter[call[name[header]][constant[timestamp]]]] | keyword[def] identifier[update_status] ( identifier[self] , identifier[header] , identifier[message] ):
literal[string]
keyword[with] identifier[self] . identifier[_lock] :
keyword[if] identifier[self] . identifier[message_box] :
identifier[self] . identifier[message_box] . identifier[erase] ()
identifier[self] . identifier[message_box] . identifier[move] ( literal[int] , literal[int] )
keyword[for] identifier[n] , identifier[field] keyword[in] identifier[enumerate] ( identifier[header] ):
keyword[if] identifier[n] == literal[int] :
identifier[self] . identifier[message_box] . identifier[addstr] ( identifier[field] + literal[string] , identifier[curses] . identifier[color_pair] ( literal[int] ))
keyword[else] :
identifier[self] . identifier[message_box] . identifier[addstr] (
literal[string] + identifier[field] + literal[string] , identifier[curses] . identifier[color_pair] ( literal[int] )
)
identifier[self] . identifier[message_box] . identifier[addstr] ( identifier[header] [ identifier[field] ])
identifier[self] . identifier[message_box] . identifier[addstr] ( literal[string] , identifier[curses] . identifier[color_pair] ( literal[int] ))
identifier[self] . identifier[message_box] . identifier[addstr] (
identifier[str] ( identifier[message] ), identifier[curses] . identifier[color_pair] ( literal[int] )+ identifier[curses] . identifier[A_BOLD]
)
identifier[self] . identifier[message_box] . identifier[refresh] ()
keyword[if] (
identifier[message] [ literal[string] ] keyword[not] keyword[in] identifier[self] . identifier[_node_status]
keyword[or] identifier[int] ( identifier[header] [ literal[string] ])
>= identifier[self] . identifier[_node_status] [ identifier[message] [ literal[string] ]][ literal[string] ]
):
identifier[self] . identifier[_node_status] [ identifier[message] [ literal[string] ]]= identifier[message]
identifier[self] . identifier[_node_status] [ identifier[message] [ literal[string] ]][ literal[string] ]= identifier[int] (
identifier[header] [ literal[string] ]
) | def update_status(self, header, message):
"""Process incoming status message. Acquire lock for status dictionary before updating."""
with self._lock:
if self.message_box:
self.message_box.erase()
self.message_box.move(0, 0)
for (n, field) in enumerate(header):
if n == 0:
self.message_box.addstr(field + ':', curses.color_pair(1)) # depends on [control=['if'], data=[]]
else:
self.message_box.addstr(', ' + field + ':', curses.color_pair(1))
self.message_box.addstr(header[field]) # depends on [control=['for'], data=[]]
self.message_box.addstr(': ', curses.color_pair(1))
self.message_box.addstr(str(message), curses.color_pair(2) + curses.A_BOLD)
self.message_box.refresh() # depends on [control=['if'], data=[]]
if message['host'] not in self._node_status or int(header['timestamp']) >= self._node_status[message['host']]['last_seen']:
self._node_status[message['host']] = message
self._node_status[message['host']]['last_seen'] = int(header['timestamp']) # depends on [control=['if'], data=[]] # depends on [control=['with'], data=[]] |
def write(self):
"""Dump JSON to file"""
with open(self.log_path, "w") as f:
json.dump(self.log_dict, f, indent=1) | def function[write, parameter[self]]:
constant[Dump JSON to file]
with call[name[open], parameter[name[self].log_path, constant[w]]] begin[:]
call[name[json].dump, parameter[name[self].log_dict, name[f]]] | keyword[def] identifier[write] ( identifier[self] ):
literal[string]
keyword[with] identifier[open] ( identifier[self] . identifier[log_path] , literal[string] ) keyword[as] identifier[f] :
identifier[json] . identifier[dump] ( identifier[self] . identifier[log_dict] , identifier[f] , identifier[indent] = literal[int] ) | def write(self):
"""Dump JSON to file"""
with open(self.log_path, 'w') as f:
json.dump(self.log_dict, f, indent=1) # depends on [control=['with'], data=['f']] |
def rpc(self, service, routing_id, method, args=None, kwargs=None,
timeout=None, broadcast=False):
'''Send an RPC request and return the corresponding response
This will block waiting until the response has been received.
:param service: the service name (the routing top level)
:type service: anything hash-able
:param routing_id:
The id used for routing within the registered handlers of the
service.
:type routing_id: int
:param method: the method name to call
:type method: string
:param args:
The positional arguments to send along with the request. If the
first argument is a generator, the request will be sent in chunks
:ref:`(more info) <chunked-messages>`.
:type args: tuple
:param kwargs: keyword arguments to send along with the request
:type kwargs: dict
:param timeout:
maximum time to wait for a response in seconds. with None, there is
no timeout.
:type timeout: float or None
:param broadcast:
if ``True``, send to every peer with a matching subscription
:type broadcast: bool
:returns:
a list of the objects returned by the RPC's targets. these could be
of any serializable type.
:raises:
- :class:`Unroutable <junction.errors.Unroutable>` if no peers are
registered to receive the message
- :class:`WaitTimeout <junction.errors.WaitTimeout>` if a timeout
was provided and it expires
'''
rpc = self.send_rpc(service, routing_id, method,
args or (), kwargs or {}, broadcast)
return rpc.get(timeout) | def function[rpc, parameter[self, service, routing_id, method, args, kwargs, timeout, broadcast]]:
constant[Send an RPC request and return the corresponding response
This will block waiting until the response has been received.
:param service: the service name (the routing top level)
:type service: anything hash-able
:param routing_id:
The id used for routing within the registered handlers of the
service.
:type routing_id: int
:param method: the method name to call
:type method: string
:param args:
The positional arguments to send along with the request. If the
first argument is a generator, the request will be sent in chunks
:ref:`(more info) <chunked-messages>`.
:type args: tuple
:param kwargs: keyword arguments to send along with the request
:type kwargs: dict
:param timeout:
maximum time to wait for a response in seconds. with None, there is
no timeout.
:type timeout: float or None
:param broadcast:
if ``True``, send to every peer with a matching subscription
:type broadcast: bool
:returns:
a list of the objects returned by the RPC's targets. these could be
of any serializable type.
:raises:
- :class:`Unroutable <junction.errors.Unroutable>` if no peers are
registered to receive the message
- :class:`WaitTimeout <junction.errors.WaitTimeout>` if a timeout
was provided and it expires
]
variable[rpc] assign[=] call[name[self].send_rpc, parameter[name[service], name[routing_id], name[method], <ast.BoolOp object at 0x7da2044c0a00>, <ast.BoolOp object at 0x7da2044c3070>, name[broadcast]]]
return[call[name[rpc].get, parameter[name[timeout]]]] | keyword[def] identifier[rpc] ( identifier[self] , identifier[service] , identifier[routing_id] , identifier[method] , identifier[args] = keyword[None] , identifier[kwargs] = keyword[None] ,
identifier[timeout] = keyword[None] , identifier[broadcast] = keyword[False] ):
literal[string]
identifier[rpc] = identifier[self] . identifier[send_rpc] ( identifier[service] , identifier[routing_id] , identifier[method] ,
identifier[args] keyword[or] (), identifier[kwargs] keyword[or] {}, identifier[broadcast] )
keyword[return] identifier[rpc] . identifier[get] ( identifier[timeout] ) | def rpc(self, service, routing_id, method, args=None, kwargs=None, timeout=None, broadcast=False):
"""Send an RPC request and return the corresponding response
This will block waiting until the response has been received.
:param service: the service name (the routing top level)
:type service: anything hash-able
:param routing_id:
The id used for routing within the registered handlers of the
service.
:type routing_id: int
:param method: the method name to call
:type method: string
:param args:
The positional arguments to send along with the request. If the
first argument is a generator, the request will be sent in chunks
:ref:`(more info) <chunked-messages>`.
:type args: tuple
:param kwargs: keyword arguments to send along with the request
:type kwargs: dict
:param timeout:
maximum time to wait for a response in seconds. with None, there is
no timeout.
:type timeout: float or None
:param broadcast:
if ``True``, send to every peer with a matching subscription
:type broadcast: bool
:returns:
a list of the objects returned by the RPC's targets. these could be
of any serializable type.
:raises:
- :class:`Unroutable <junction.errors.Unroutable>` if no peers are
registered to receive the message
- :class:`WaitTimeout <junction.errors.WaitTimeout>` if a timeout
was provided and it expires
"""
rpc = self.send_rpc(service, routing_id, method, args or (), kwargs or {}, broadcast)
return rpc.get(timeout) |
def init_login(self, from_local=False):
"""Display login screen. May ask for local data loading if from_local is True."""
if self.toolbar:
self.removeToolBar(self.toolbar)
widget_login = login.Loading(self.statusBar(), self.theory_main)
self.centralWidget().addWidget(widget_login)
widget_login.loaded.connect(self.init_tabs)
widget_login.canceled.connect(self._quit)
widget_login.updated.connect(self.on_update_at_launch)
if from_local:
widget_login.propose_load_local()
else:
self.statusBar().showMessage("Données chargées depuis le serveur.", 5000) | def function[init_login, parameter[self, from_local]]:
constant[Display login screen. May ask for local data loading if from_local is True.]
if name[self].toolbar begin[:]
call[name[self].removeToolBar, parameter[name[self].toolbar]]
variable[widget_login] assign[=] call[name[login].Loading, parameter[call[name[self].statusBar, parameter[]], name[self].theory_main]]
call[call[name[self].centralWidget, parameter[]].addWidget, parameter[name[widget_login]]]
call[name[widget_login].loaded.connect, parameter[name[self].init_tabs]]
call[name[widget_login].canceled.connect, parameter[name[self]._quit]]
call[name[widget_login].updated.connect, parameter[name[self].on_update_at_launch]]
if name[from_local] begin[:]
call[name[widget_login].propose_load_local, parameter[]] | keyword[def] identifier[init_login] ( identifier[self] , identifier[from_local] = keyword[False] ):
literal[string]
keyword[if] identifier[self] . identifier[toolbar] :
identifier[self] . identifier[removeToolBar] ( identifier[self] . identifier[toolbar] )
identifier[widget_login] = identifier[login] . identifier[Loading] ( identifier[self] . identifier[statusBar] (), identifier[self] . identifier[theory_main] )
identifier[self] . identifier[centralWidget] (). identifier[addWidget] ( identifier[widget_login] )
identifier[widget_login] . identifier[loaded] . identifier[connect] ( identifier[self] . identifier[init_tabs] )
identifier[widget_login] . identifier[canceled] . identifier[connect] ( identifier[self] . identifier[_quit] )
identifier[widget_login] . identifier[updated] . identifier[connect] ( identifier[self] . identifier[on_update_at_launch] )
keyword[if] identifier[from_local] :
identifier[widget_login] . identifier[propose_load_local] ()
keyword[else] :
identifier[self] . identifier[statusBar] (). identifier[showMessage] ( literal[string] , literal[int] ) | def init_login(self, from_local=False):
"""Display login screen. May ask for local data loading if from_local is True."""
if self.toolbar:
self.removeToolBar(self.toolbar) # depends on [control=['if'], data=[]]
widget_login = login.Loading(self.statusBar(), self.theory_main)
self.centralWidget().addWidget(widget_login)
widget_login.loaded.connect(self.init_tabs)
widget_login.canceled.connect(self._quit)
widget_login.updated.connect(self.on_update_at_launch)
if from_local:
widget_login.propose_load_local() # depends on [control=['if'], data=[]]
else:
self.statusBar().showMessage('Données chargées depuis le serveur.', 5000) |
def participation_coef_sign(W, ci):
'''
Participation coefficient is a measure of diversity of intermodular
connections of individual nodes.
Parameters
----------
W : NxN np.ndarray
undirected connection matrix with positive and negative weights
ci : Nx1 np.ndarray
community affiliation vector
Returns
-------
Ppos : Nx1 np.ndarray
participation coefficient from positive weights
Pneg : Nx1 np.ndarray
participation coefficient from negative weights
'''
_, ci = np.unique(ci, return_inverse=True)
ci += 1
n = len(W) # number of vertices
def pcoef(W_):
S = np.sum(W_, axis=1) # strength
# neighbor community affil.
Gc = np.dot(np.logical_not(W_ == 0), np.diag(ci))
Sc2 = np.zeros((n,))
for i in range(1, int(np.max(ci) + 1)):
Sc2 += np.square(np.sum(W_ * (Gc == i), axis=1))
P = np.ones((n,)) - Sc2 / np.square(S)
P[np.where(np.isnan(P))] = 0
P[np.where(np.logical_not(P))] = 0 # p_ind=0 if no (out)neighbors
return P
#explicitly ignore compiler warning for division by zero
with np.errstate(invalid='ignore'):
Ppos = pcoef(W * (W > 0))
Pneg = pcoef(-W * (W < 0))
return Ppos, Pneg | def function[participation_coef_sign, parameter[W, ci]]:
constant[
Participation coefficient is a measure of diversity of intermodular
connections of individual nodes.
Parameters
----------
W : NxN np.ndarray
undirected connection matrix with positive and negative weights
ci : Nx1 np.ndarray
community affiliation vector
Returns
-------
Ppos : Nx1 np.ndarray
participation coefficient from positive weights
Pneg : Nx1 np.ndarray
participation coefficient from negative weights
]
<ast.Tuple object at 0x7da1b07ce050> assign[=] call[name[np].unique, parameter[name[ci]]]
<ast.AugAssign object at 0x7da1b07cc850>
variable[n] assign[=] call[name[len], parameter[name[W]]]
def function[pcoef, parameter[W_]]:
variable[S] assign[=] call[name[np].sum, parameter[name[W_]]]
variable[Gc] assign[=] call[name[np].dot, parameter[call[name[np].logical_not, parameter[compare[name[W_] equal[==] constant[0]]]], call[name[np].diag, parameter[name[ci]]]]]
variable[Sc2] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b07ccf10>]]]]
for taget[name[i]] in starred[call[name[range], parameter[constant[1], call[name[int], parameter[binary_operation[call[name[np].max, parameter[name[ci]]] + constant[1]]]]]]] begin[:]
<ast.AugAssign object at 0x7da1b07cfa60>
variable[P] assign[=] binary_operation[call[name[np].ones, parameter[tuple[[<ast.Name object at 0x7da1b07cc2b0>]]]] - binary_operation[name[Sc2] / call[name[np].square, parameter[name[S]]]]]
call[name[P]][call[name[np].where, parameter[call[name[np].isnan, parameter[name[P]]]]]] assign[=] constant[0]
call[name[P]][call[name[np].where, parameter[call[name[np].logical_not, parameter[name[P]]]]]] assign[=] constant[0]
return[name[P]]
with call[name[np].errstate, parameter[]] begin[:]
variable[Ppos] assign[=] call[name[pcoef], parameter[binary_operation[name[W] * compare[name[W] greater[>] constant[0]]]]]
variable[Pneg] assign[=] call[name[pcoef], parameter[binary_operation[<ast.UnaryOp object at 0x7da1b07cea70> * compare[name[W] less[<] constant[0]]]]]
return[tuple[[<ast.Name object at 0x7da1b07cea10>, <ast.Name object at 0x7da1b07ceb60>]]] | keyword[def] identifier[participation_coef_sign] ( identifier[W] , identifier[ci] ):
literal[string]
identifier[_] , identifier[ci] = identifier[np] . identifier[unique] ( identifier[ci] , identifier[return_inverse] = keyword[True] )
identifier[ci] += literal[int]
identifier[n] = identifier[len] ( identifier[W] )
keyword[def] identifier[pcoef] ( identifier[W_] ):
identifier[S] = identifier[np] . identifier[sum] ( identifier[W_] , identifier[axis] = literal[int] )
identifier[Gc] = identifier[np] . identifier[dot] ( identifier[np] . identifier[logical_not] ( identifier[W_] == literal[int] ), identifier[np] . identifier[diag] ( identifier[ci] ))
identifier[Sc2] = identifier[np] . identifier[zeros] (( identifier[n] ,))
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[int] ( identifier[np] . identifier[max] ( identifier[ci] )+ literal[int] )):
identifier[Sc2] += identifier[np] . identifier[square] ( identifier[np] . identifier[sum] ( identifier[W_] *( identifier[Gc] == identifier[i] ), identifier[axis] = literal[int] ))
identifier[P] = identifier[np] . identifier[ones] (( identifier[n] ,))- identifier[Sc2] / identifier[np] . identifier[square] ( identifier[S] )
identifier[P] [ identifier[np] . identifier[where] ( identifier[np] . identifier[isnan] ( identifier[P] ))]= literal[int]
identifier[P] [ identifier[np] . identifier[where] ( identifier[np] . identifier[logical_not] ( identifier[P] ))]= literal[int]
keyword[return] identifier[P]
keyword[with] identifier[np] . identifier[errstate] ( identifier[invalid] = literal[string] ):
identifier[Ppos] = identifier[pcoef] ( identifier[W] *( identifier[W] > literal[int] ))
identifier[Pneg] = identifier[pcoef] (- identifier[W] *( identifier[W] < literal[int] ))
keyword[return] identifier[Ppos] , identifier[Pneg] | def participation_coef_sign(W, ci):
"""
Participation coefficient is a measure of diversity of intermodular
connections of individual nodes.
Parameters
----------
W : NxN np.ndarray
undirected connection matrix with positive and negative weights
ci : Nx1 np.ndarray
community affiliation vector
Returns
-------
Ppos : Nx1 np.ndarray
participation coefficient from positive weights
Pneg : Nx1 np.ndarray
participation coefficient from negative weights
"""
(_, ci) = np.unique(ci, return_inverse=True)
ci += 1
n = len(W) # number of vertices
def pcoef(W_):
S = np.sum(W_, axis=1) # strength
# neighbor community affil.
Gc = np.dot(np.logical_not(W_ == 0), np.diag(ci))
Sc2 = np.zeros((n,))
for i in range(1, int(np.max(ci) + 1)):
Sc2 += np.square(np.sum(W_ * (Gc == i), axis=1)) # depends on [control=['for'], data=['i']]
P = np.ones((n,)) - Sc2 / np.square(S)
P[np.where(np.isnan(P))] = 0
P[np.where(np.logical_not(P))] = 0 # p_ind=0 if no (out)neighbors
return P
#explicitly ignore compiler warning for division by zero
with np.errstate(invalid='ignore'):
Ppos = pcoef(W * (W > 0))
Pneg = pcoef(-W * (W < 0)) # depends on [control=['with'], data=[]]
return (Ppos, Pneg) |
def setup(
hosts,
default_keyspace,
consistency=ConsistencyLevel.ONE,
lazy_connect=False,
retry_connect=False,
**kwargs):
"""
Records the hosts and connects to one of them
:param hosts: list of hosts, see http://datastax.github.io/python-driver/api/cassandra/cluster.html
:type hosts: list
:param default_keyspace: The default keyspace to use
:type default_keyspace: str
:param consistency: The global consistency level
:type consistency: int
:param lazy_connect: True if should not connect until first use
:type lazy_connect: bool
:param retry_connect: bool
:param retry_connect: True if we should retry to connect even if there was a connection failure initially
"""
global cluster, session, default_consistency_level, lazy_connect_args
if 'username' in kwargs or 'password' in kwargs:
raise CQLEngineException("Username & Password are now handled by using the native driver's auth_provider")
if not default_keyspace:
raise UndefinedKeyspaceException()
from cqlengine import models
models.DEFAULT_KEYSPACE = default_keyspace
default_consistency_level = consistency
if lazy_connect:
kwargs['default_keyspace'] = default_keyspace
kwargs['consistency'] = consistency
kwargs['lazy_connect'] = False
kwargs['retry_connect'] = retry_connect
lazy_connect_args = (hosts, kwargs)
return
cluster = Cluster(hosts, **kwargs)
try:
session = cluster.connect()
except NoHostAvailable:
if retry_connect:
kwargs['default_keyspace'] = default_keyspace
kwargs['consistency'] = consistency
kwargs['lazy_connect'] = False
kwargs['retry_connect'] = retry_connect
lazy_connect_args = (hosts, kwargs)
raise
session.row_factory = dict_factory | def function[setup, parameter[hosts, default_keyspace, consistency, lazy_connect, retry_connect]]:
constant[
Records the hosts and connects to one of them
:param hosts: list of hosts, see http://datastax.github.io/python-driver/api/cassandra/cluster.html
:type hosts: list
:param default_keyspace: The default keyspace to use
:type default_keyspace: str
:param consistency: The global consistency level
:type consistency: int
:param lazy_connect: True if should not connect until first use
:type lazy_connect: bool
:param retry_connect: bool
:param retry_connect: True if we should retry to connect even if there was a connection failure initially
]
<ast.Global object at 0x7da18f812650>
if <ast.BoolOp object at 0x7da18f813fa0> begin[:]
<ast.Raise object at 0x7da18f811c30>
if <ast.UnaryOp object at 0x7da18f811780> begin[:]
<ast.Raise object at 0x7da18f810b50>
from relative_module[cqlengine] import module[models]
name[models].DEFAULT_KEYSPACE assign[=] name[default_keyspace]
variable[default_consistency_level] assign[=] name[consistency]
if name[lazy_connect] begin[:]
call[name[kwargs]][constant[default_keyspace]] assign[=] name[default_keyspace]
call[name[kwargs]][constant[consistency]] assign[=] name[consistency]
call[name[kwargs]][constant[lazy_connect]] assign[=] constant[False]
call[name[kwargs]][constant[retry_connect]] assign[=] name[retry_connect]
variable[lazy_connect_args] assign[=] tuple[[<ast.Name object at 0x7da18f813580>, <ast.Name object at 0x7da18f810a60>]]
return[None]
variable[cluster] assign[=] call[name[Cluster], parameter[name[hosts]]]
<ast.Try object at 0x7da18f810730>
name[session].row_factory assign[=] name[dict_factory] | keyword[def] identifier[setup] (
identifier[hosts] ,
identifier[default_keyspace] ,
identifier[consistency] = identifier[ConsistencyLevel] . identifier[ONE] ,
identifier[lazy_connect] = keyword[False] ,
identifier[retry_connect] = keyword[False] ,
** identifier[kwargs] ):
literal[string]
keyword[global] identifier[cluster] , identifier[session] , identifier[default_consistency_level] , identifier[lazy_connect_args]
keyword[if] literal[string] keyword[in] identifier[kwargs] keyword[or] literal[string] keyword[in] identifier[kwargs] :
keyword[raise] identifier[CQLEngineException] ( literal[string] )
keyword[if] keyword[not] identifier[default_keyspace] :
keyword[raise] identifier[UndefinedKeyspaceException] ()
keyword[from] identifier[cqlengine] keyword[import] identifier[models]
identifier[models] . identifier[DEFAULT_KEYSPACE] = identifier[default_keyspace]
identifier[default_consistency_level] = identifier[consistency]
keyword[if] identifier[lazy_connect] :
identifier[kwargs] [ literal[string] ]= identifier[default_keyspace]
identifier[kwargs] [ literal[string] ]= identifier[consistency]
identifier[kwargs] [ literal[string] ]= keyword[False]
identifier[kwargs] [ literal[string] ]= identifier[retry_connect]
identifier[lazy_connect_args] =( identifier[hosts] , identifier[kwargs] )
keyword[return]
identifier[cluster] = identifier[Cluster] ( identifier[hosts] ,** identifier[kwargs] )
keyword[try] :
identifier[session] = identifier[cluster] . identifier[connect] ()
keyword[except] identifier[NoHostAvailable] :
keyword[if] identifier[retry_connect] :
identifier[kwargs] [ literal[string] ]= identifier[default_keyspace]
identifier[kwargs] [ literal[string] ]= identifier[consistency]
identifier[kwargs] [ literal[string] ]= keyword[False]
identifier[kwargs] [ literal[string] ]= identifier[retry_connect]
identifier[lazy_connect_args] =( identifier[hosts] , identifier[kwargs] )
keyword[raise]
identifier[session] . identifier[row_factory] = identifier[dict_factory] | def setup(hosts, default_keyspace, consistency=ConsistencyLevel.ONE, lazy_connect=False, retry_connect=False, **kwargs):
"""
Records the hosts and connects to one of them
:param hosts: list of hosts, see http://datastax.github.io/python-driver/api/cassandra/cluster.html
:type hosts: list
:param default_keyspace: The default keyspace to use
:type default_keyspace: str
:param consistency: The global consistency level
:type consistency: int
:param lazy_connect: True if should not connect until first use
:type lazy_connect: bool
:param retry_connect: bool
:param retry_connect: True if we should retry to connect even if there was a connection failure initially
"""
global cluster, session, default_consistency_level, lazy_connect_args
if 'username' in kwargs or 'password' in kwargs:
raise CQLEngineException("Username & Password are now handled by using the native driver's auth_provider") # depends on [control=['if'], data=[]]
if not default_keyspace:
raise UndefinedKeyspaceException() # depends on [control=['if'], data=[]]
from cqlengine import models
models.DEFAULT_KEYSPACE = default_keyspace
default_consistency_level = consistency
if lazy_connect:
kwargs['default_keyspace'] = default_keyspace
kwargs['consistency'] = consistency
kwargs['lazy_connect'] = False
kwargs['retry_connect'] = retry_connect
lazy_connect_args = (hosts, kwargs)
return # depends on [control=['if'], data=[]]
cluster = Cluster(hosts, **kwargs)
try:
session = cluster.connect() # depends on [control=['try'], data=[]]
except NoHostAvailable:
if retry_connect:
kwargs['default_keyspace'] = default_keyspace
kwargs['consistency'] = consistency
kwargs['lazy_connect'] = False
kwargs['retry_connect'] = retry_connect
lazy_connect_args = (hosts, kwargs) # depends on [control=['if'], data=[]]
raise # depends on [control=['except'], data=[]]
session.row_factory = dict_factory |
def read(self, n=None):
"""Read at most *n* characters from this stream.
If *n* is ``None``, return all available characters.
"""
response = b""
while n is None or n > 0:
c = self.stream.read(1)
if c == b"":
break
elif c == b"<":
c += self.stream.read(1)
if c == b"<?":
while True:
q = self.stream.read(1)
if q == b">":
break
else:
response += c
if n is not None:
n -= len(c)
else:
response += c
if n is not None:
n -= 1
return response | def function[read, parameter[self, n]]:
constant[Read at most *n* characters from this stream.
If *n* is ``None``, return all available characters.
]
variable[response] assign[=] constant[b'']
while <ast.BoolOp object at 0x7da1b178e530> begin[:]
variable[c] assign[=] call[name[self].stream.read, parameter[constant[1]]]
if compare[name[c] equal[==] constant[b'']] begin[:]
break
return[name[response]] | keyword[def] identifier[read] ( identifier[self] , identifier[n] = keyword[None] ):
literal[string]
identifier[response] = literal[string]
keyword[while] identifier[n] keyword[is] keyword[None] keyword[or] identifier[n] > literal[int] :
identifier[c] = identifier[self] . identifier[stream] . identifier[read] ( literal[int] )
keyword[if] identifier[c] == literal[string] :
keyword[break]
keyword[elif] identifier[c] == literal[string] :
identifier[c] += identifier[self] . identifier[stream] . identifier[read] ( literal[int] )
keyword[if] identifier[c] == literal[string] :
keyword[while] keyword[True] :
identifier[q] = identifier[self] . identifier[stream] . identifier[read] ( literal[int] )
keyword[if] identifier[q] == literal[string] :
keyword[break]
keyword[else] :
identifier[response] += identifier[c]
keyword[if] identifier[n] keyword[is] keyword[not] keyword[None] :
identifier[n] -= identifier[len] ( identifier[c] )
keyword[else] :
identifier[response] += identifier[c]
keyword[if] identifier[n] keyword[is] keyword[not] keyword[None] :
identifier[n] -= literal[int]
keyword[return] identifier[response] | def read(self, n=None):
"""Read at most *n* characters from this stream.
If *n* is ``None``, return all available characters.
"""
response = b''
while n is None or n > 0:
c = self.stream.read(1)
if c == b'':
break # depends on [control=['if'], data=[]]
elif c == b'<':
c += self.stream.read(1)
if c == b'<?':
while True:
q = self.stream.read(1)
if q == b'>':
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]]
else:
response += c
if n is not None:
n -= len(c) # depends on [control=['if'], data=['n']] # depends on [control=['if'], data=['c']]
else:
response += c
if n is not None:
n -= 1 # depends on [control=['if'], data=['n']] # depends on [control=['while'], data=[]]
return response |
def _calculate_hash(self, filename, **kwargs):
"""
Calculates the hash of the file and the hash of the file + metadata
(passed in ``kwargs``).
Args:
filename (str): Name of the file
testnet (bool): testnet flag. Defaults to False
**kwargs: Additional metadata to be encoded with the file. Only
the values are used to compute the hash. Values are
ordered using their keys, so that the computation of the
hash is consistent. As an example, given::
File('filename', title='piece title', artist='artist')
the values ``('artist', 'piece title')`` would be used in that
order for the computation of the hash.
"""
with open(filename, 'rb') as f:
file_hash = hashlib.md5(f.read()).hexdigest()
if kwargs:
data = str(
[urepr(kwargs[k]) for k in sorted(kwargs)] + [file_hash])
else:
data = file_hash
address_piece_with_metadata = str(
bin_to_b58check(bin_hash160(data.encode()),
magicbyte=self._magicbyte)
)
address_piece = str(bin_to_b58check(bin_hash160(file_hash.encode()),
magicbyte=self._magicbyte))
return address_piece, address_piece_with_metadata | def function[_calculate_hash, parameter[self, filename]]:
constant[
Calculates the hash of the file and the hash of the file + metadata
(passed in ``kwargs``).
Args:
filename (str): Name of the file
testnet (bool): testnet flag. Defaults to False
**kwargs: Additional metadata to be encoded with the file. Only
the values are used to compute the hash. Values are
ordered using their keys, so that the computation of the
hash is consistent. As an example, given::
File('filename', title='piece title', artist='artist')
the values ``('artist', 'piece title')`` would be used in that
order for the computation of the hash.
]
with call[name[open], parameter[name[filename], constant[rb]]] begin[:]
variable[file_hash] assign[=] call[call[name[hashlib].md5, parameter[call[name[f].read, parameter[]]]].hexdigest, parameter[]]
if name[kwargs] begin[:]
variable[data] assign[=] call[name[str], parameter[binary_operation[<ast.ListComp object at 0x7da1b09144f0> + list[[<ast.Name object at 0x7da1b0914160>]]]]]
variable[address_piece_with_metadata] assign[=] call[name[str], parameter[call[name[bin_to_b58check], parameter[call[name[bin_hash160], parameter[call[name[data].encode, parameter[]]]]]]]]
variable[address_piece] assign[=] call[name[str], parameter[call[name[bin_to_b58check], parameter[call[name[bin_hash160], parameter[call[name[file_hash].encode, parameter[]]]]]]]]
return[tuple[[<ast.Name object at 0x7da207f03190>, <ast.Name object at 0x7da207f03c10>]]] | keyword[def] identifier[_calculate_hash] ( identifier[self] , identifier[filename] ,** identifier[kwargs] ):
literal[string]
keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[f] :
identifier[file_hash] = identifier[hashlib] . identifier[md5] ( identifier[f] . identifier[read] ()). identifier[hexdigest] ()
keyword[if] identifier[kwargs] :
identifier[data] = identifier[str] (
[ identifier[urepr] ( identifier[kwargs] [ identifier[k] ]) keyword[for] identifier[k] keyword[in] identifier[sorted] ( identifier[kwargs] )]+[ identifier[file_hash] ])
keyword[else] :
identifier[data] = identifier[file_hash]
identifier[address_piece_with_metadata] = identifier[str] (
identifier[bin_to_b58check] ( identifier[bin_hash160] ( identifier[data] . identifier[encode] ()),
identifier[magicbyte] = identifier[self] . identifier[_magicbyte] )
)
identifier[address_piece] = identifier[str] ( identifier[bin_to_b58check] ( identifier[bin_hash160] ( identifier[file_hash] . identifier[encode] ()),
identifier[magicbyte] = identifier[self] . identifier[_magicbyte] ))
keyword[return] identifier[address_piece] , identifier[address_piece_with_metadata] | def _calculate_hash(self, filename, **kwargs):
"""
Calculates the hash of the file and the hash of the file + metadata
(passed in ``kwargs``).
Args:
filename (str): Name of the file
testnet (bool): testnet flag. Defaults to False
**kwargs: Additional metadata to be encoded with the file. Only
the values are used to compute the hash. Values are
ordered using their keys, so that the computation of the
hash is consistent. As an example, given::
File('filename', title='piece title', artist='artist')
the values ``('artist', 'piece title')`` would be used in that
order for the computation of the hash.
"""
with open(filename, 'rb') as f:
file_hash = hashlib.md5(f.read()).hexdigest() # depends on [control=['with'], data=['f']]
if kwargs:
data = str([urepr(kwargs[k]) for k in sorted(kwargs)] + [file_hash]) # depends on [control=['if'], data=[]]
else:
data = file_hash
address_piece_with_metadata = str(bin_to_b58check(bin_hash160(data.encode()), magicbyte=self._magicbyte))
address_piece = str(bin_to_b58check(bin_hash160(file_hash.encode()), magicbyte=self._magicbyte))
return (address_piece, address_piece_with_metadata) |
def apply_splice(a,splice):
"mutate a *and* return it. a as list, splice as diff.Delta."
a[splice.a:splice.b]=splice.text # text isn't always text. See diff comments.
return a | def function[apply_splice, parameter[a, splice]]:
constant[mutate a *and* return it. a as list, splice as diff.Delta.]
call[name[a]][<ast.Slice object at 0x7da20c6a9810>] assign[=] name[splice].text
return[name[a]] | keyword[def] identifier[apply_splice] ( identifier[a] , identifier[splice] ):
literal[string]
identifier[a] [ identifier[splice] . identifier[a] : identifier[splice] . identifier[b] ]= identifier[splice] . identifier[text]
keyword[return] identifier[a] | def apply_splice(a, splice):
"""mutate a *and* return it. a as list, splice as diff.Delta."""
a[splice.a:splice.b] = splice.text # text isn't always text. See diff comments.
return a |
def cell_has_code(lines):
"""Is there any code in this cell?"""
for i, line in enumerate(lines):
stripped_line = line.strip()
if stripped_line.startswith('#'):
continue
# Two consecutive blank lines?
if not stripped_line:
if i > 0 and not lines[i - 1].strip():
return False
continue
return True
return False | def function[cell_has_code, parameter[lines]]:
constant[Is there any code in this cell?]
for taget[tuple[[<ast.Name object at 0x7da18f58da20>, <ast.Name object at 0x7da18f58f490>]]] in starred[call[name[enumerate], parameter[name[lines]]]] begin[:]
variable[stripped_line] assign[=] call[name[line].strip, parameter[]]
if call[name[stripped_line].startswith, parameter[constant[#]]] begin[:]
continue
if <ast.UnaryOp object at 0x7da18dc07880> begin[:]
if <ast.BoolOp object at 0x7da18dc04df0> begin[:]
return[constant[False]]
continue
return[constant[True]]
return[constant[False]] | keyword[def] identifier[cell_has_code] ( identifier[lines] ):
literal[string]
keyword[for] identifier[i] , identifier[line] keyword[in] identifier[enumerate] ( identifier[lines] ):
identifier[stripped_line] = identifier[line] . identifier[strip] ()
keyword[if] identifier[stripped_line] . identifier[startswith] ( literal[string] ):
keyword[continue]
keyword[if] keyword[not] identifier[stripped_line] :
keyword[if] identifier[i] > literal[int] keyword[and] keyword[not] identifier[lines] [ identifier[i] - literal[int] ]. identifier[strip] ():
keyword[return] keyword[False]
keyword[continue]
keyword[return] keyword[True]
keyword[return] keyword[False] | def cell_has_code(lines):
"""Is there any code in this cell?"""
for (i, line) in enumerate(lines):
stripped_line = line.strip()
if stripped_line.startswith('#'):
continue # depends on [control=['if'], data=[]]
# Two consecutive blank lines?
if not stripped_line:
if i > 0 and (not lines[i - 1].strip()):
return False # depends on [control=['if'], data=[]]
continue # depends on [control=['if'], data=[]]
return True # depends on [control=['for'], data=[]]
return False |
def send_email_sns(sender, subject, message, topic_ARN, image_png):
"""
Sends notification through AWS SNS. Takes Topic ARN from recipients.
Does not handle access keys. Use either
1/ configuration file
2/ EC2 instance profile
See also https://boto3.readthedocs.io/en/latest/guide/configuration.html.
"""
from boto3 import resource as boto3_resource
sns = boto3_resource('sns')
topic = sns.Topic(topic_ARN[0])
# Subject is max 100 chars
if len(subject) > 100:
subject = subject[0:48] + '...' + subject[-49:]
response = topic.publish(Subject=subject, Message=message)
logger.debug(("Message sent to SNS.\nMessageId: {},\nRequestId: {},\n"
"HTTPSStatusCode: {}").format(response['MessageId'],
response['ResponseMetadata']['RequestId'],
response['ResponseMetadata']['HTTPStatusCode'])) | def function[send_email_sns, parameter[sender, subject, message, topic_ARN, image_png]]:
constant[
Sends notification through AWS SNS. Takes Topic ARN from recipients.
Does not handle access keys. Use either
1/ configuration file
2/ EC2 instance profile
See also https://boto3.readthedocs.io/en/latest/guide/configuration.html.
]
from relative_module[boto3] import module[resource]
variable[sns] assign[=] call[name[boto3_resource], parameter[constant[sns]]]
variable[topic] assign[=] call[name[sns].Topic, parameter[call[name[topic_ARN]][constant[0]]]]
if compare[call[name[len], parameter[name[subject]]] greater[>] constant[100]] begin[:]
variable[subject] assign[=] binary_operation[binary_operation[call[name[subject]][<ast.Slice object at 0x7da18dc05d20>] + constant[...]] + call[name[subject]][<ast.Slice object at 0x7da18dc07280>]]
variable[response] assign[=] call[name[topic].publish, parameter[]]
call[name[logger].debug, parameter[call[constant[Message sent to SNS.
MessageId: {},
RequestId: {},
HTTPSStatusCode: {}].format, parameter[call[name[response]][constant[MessageId]], call[call[name[response]][constant[ResponseMetadata]]][constant[RequestId]], call[call[name[response]][constant[ResponseMetadata]]][constant[HTTPStatusCode]]]]]] | keyword[def] identifier[send_email_sns] ( identifier[sender] , identifier[subject] , identifier[message] , identifier[topic_ARN] , identifier[image_png] ):
literal[string]
keyword[from] identifier[boto3] keyword[import] identifier[resource] keyword[as] identifier[boto3_resource]
identifier[sns] = identifier[boto3_resource] ( literal[string] )
identifier[topic] = identifier[sns] . identifier[Topic] ( identifier[topic_ARN] [ literal[int] ])
keyword[if] identifier[len] ( identifier[subject] )> literal[int] :
identifier[subject] = identifier[subject] [ literal[int] : literal[int] ]+ literal[string] + identifier[subject] [- literal[int] :]
identifier[response] = identifier[topic] . identifier[publish] ( identifier[Subject] = identifier[subject] , identifier[Message] = identifier[message] )
identifier[logger] . identifier[debug] (( literal[string]
literal[string] ). identifier[format] ( identifier[response] [ literal[string] ],
identifier[response] [ literal[string] ][ literal[string] ],
identifier[response] [ literal[string] ][ literal[string] ])) | def send_email_sns(sender, subject, message, topic_ARN, image_png):
"""
Sends notification through AWS SNS. Takes Topic ARN from recipients.
Does not handle access keys. Use either
1/ configuration file
2/ EC2 instance profile
See also https://boto3.readthedocs.io/en/latest/guide/configuration.html.
"""
from boto3 import resource as boto3_resource
sns = boto3_resource('sns')
topic = sns.Topic(topic_ARN[0])
# Subject is max 100 chars
if len(subject) > 100:
subject = subject[0:48] + '...' + subject[-49:] # depends on [control=['if'], data=[]]
response = topic.publish(Subject=subject, Message=message)
logger.debug('Message sent to SNS.\nMessageId: {},\nRequestId: {},\nHTTPSStatusCode: {}'.format(response['MessageId'], response['ResponseMetadata']['RequestId'], response['ResponseMetadata']['HTTPStatusCode'])) |
def len_cdc_tube(FlowPlant, ConcDoseMax, ConcStock,
DiamTubeAvail, HeadlossCDC, LenCDCTubeMax, temp,
en_chem, KMinor):
"""The length of tubing may be longer than the max specified if the stock
concentration is too high to give a viable solution with the specified
length of tubing."""
index = i_cdc(FlowPlant, ConcDoseMax, ConcStock,
DiamTubeAvail, HeadlossCDC, LenCDCTubeMax, temp,
en_chem, KMinor)
len_cdc_tube = (_length_cdc_tube_array(FlowPlant, ConcDoseMax, ConcStock,
DiamTubeAvail, HeadlossCDC, temp, en_chem,
KMinor))[index].magnitude
return len_cdc_tube | def function[len_cdc_tube, parameter[FlowPlant, ConcDoseMax, ConcStock, DiamTubeAvail, HeadlossCDC, LenCDCTubeMax, temp, en_chem, KMinor]]:
constant[The length of tubing may be longer than the max specified if the stock
concentration is too high to give a viable solution with the specified
length of tubing.]
variable[index] assign[=] call[name[i_cdc], parameter[name[FlowPlant], name[ConcDoseMax], name[ConcStock], name[DiamTubeAvail], name[HeadlossCDC], name[LenCDCTubeMax], name[temp], name[en_chem], name[KMinor]]]
variable[len_cdc_tube] assign[=] call[call[name[_length_cdc_tube_array], parameter[name[FlowPlant], name[ConcDoseMax], name[ConcStock], name[DiamTubeAvail], name[HeadlossCDC], name[temp], name[en_chem], name[KMinor]]]][name[index]].magnitude
return[name[len_cdc_tube]] | keyword[def] identifier[len_cdc_tube] ( identifier[FlowPlant] , identifier[ConcDoseMax] , identifier[ConcStock] ,
identifier[DiamTubeAvail] , identifier[HeadlossCDC] , identifier[LenCDCTubeMax] , identifier[temp] ,
identifier[en_chem] , identifier[KMinor] ):
literal[string]
identifier[index] = identifier[i_cdc] ( identifier[FlowPlant] , identifier[ConcDoseMax] , identifier[ConcStock] ,
identifier[DiamTubeAvail] , identifier[HeadlossCDC] , identifier[LenCDCTubeMax] , identifier[temp] ,
identifier[en_chem] , identifier[KMinor] )
identifier[len_cdc_tube] =( identifier[_length_cdc_tube_array] ( identifier[FlowPlant] , identifier[ConcDoseMax] , identifier[ConcStock] ,
identifier[DiamTubeAvail] , identifier[HeadlossCDC] , identifier[temp] , identifier[en_chem] ,
identifier[KMinor] ))[ identifier[index] ]. identifier[magnitude]
keyword[return] identifier[len_cdc_tube] | def len_cdc_tube(FlowPlant, ConcDoseMax, ConcStock, DiamTubeAvail, HeadlossCDC, LenCDCTubeMax, temp, en_chem, KMinor):
"""The length of tubing may be longer than the max specified if the stock
concentration is too high to give a viable solution with the specified
length of tubing."""
index = i_cdc(FlowPlant, ConcDoseMax, ConcStock, DiamTubeAvail, HeadlossCDC, LenCDCTubeMax, temp, en_chem, KMinor)
len_cdc_tube = _length_cdc_tube_array(FlowPlant, ConcDoseMax, ConcStock, DiamTubeAvail, HeadlossCDC, temp, en_chem, KMinor)[index].magnitude
return len_cdc_tube |
def old_indices(self, names, axis=None):
"""get the row and col indices of names. If axis is None, two ndarrays
are returned, corresponding the indices of names for each axis
Parameters
----------
names : iterable
column and/or row names
axis : (int) (optional)
the axis to search.
Returns
-------
numpy.ndarray : numpy.ndarray
indices of names.
"""
warnings.warn("Matrix.old_indices() is deprecated - only here for testing. Use Matrix.indices()",PyemuWarning)
row_idxs, col_idxs = [], []
for name in names:
if name.lower() not in self.col_names \
and name.lower() not in self.row_names:
raise Exception('Matrix.indices(): name not found: ' + name)
if name.lower() in self.col_names:
col_idxs.append(self.col_names.index(name))
if name.lower() in self.row_names:
row_idxs.append(self.row_names.index(name))
if axis is None:
return np.array(row_idxs, dtype=np.int32),\
np.array(col_idxs, dtype=np.int32)
elif axis == 0:
if len(row_idxs) != len(names):
raise Exception("Matrix.indices(): " +
"not all names found in row_names")
return np.array(row_idxs, dtype=np.int32)
elif axis == 1:
if len(col_idxs) != len(names):
raise Exception("Matrix.indices(): " +
"not all names found in col_names")
return np.array(col_idxs, dtype=np.int32)
else:
raise Exception("Matrix.indices(): " +
"axis argument must 0 or 1, not:" + str(axis)) | def function[old_indices, parameter[self, names, axis]]:
constant[get the row and col indices of names. If axis is None, two ndarrays
are returned, corresponding the indices of names for each axis
Parameters
----------
names : iterable
column and/or row names
axis : (int) (optional)
the axis to search.
Returns
-------
numpy.ndarray : numpy.ndarray
indices of names.
]
call[name[warnings].warn, parameter[constant[Matrix.old_indices() is deprecated - only here for testing. Use Matrix.indices()], name[PyemuWarning]]]
<ast.Tuple object at 0x7da18eb54a30> assign[=] tuple[[<ast.List object at 0x7da18eb558d0>, <ast.List object at 0x7da18eb55900>]]
for taget[name[name]] in starred[name[names]] begin[:]
if <ast.BoolOp object at 0x7da18eb55d50> begin[:]
<ast.Raise object at 0x7da18eb54490>
if compare[call[name[name].lower, parameter[]] in name[self].col_names] begin[:]
call[name[col_idxs].append, parameter[call[name[self].col_names.index, parameter[name[name]]]]]
if compare[call[name[name].lower, parameter[]] in name[self].row_names] begin[:]
call[name[row_idxs].append, parameter[call[name[self].row_names.index, parameter[name[name]]]]]
if compare[name[axis] is constant[None]] begin[:]
return[tuple[[<ast.Call object at 0x7da2054a5960>, <ast.Call object at 0x7da2054a4f40>]]] | keyword[def] identifier[old_indices] ( identifier[self] , identifier[names] , identifier[axis] = keyword[None] ):
literal[string]
identifier[warnings] . identifier[warn] ( literal[string] , identifier[PyemuWarning] )
identifier[row_idxs] , identifier[col_idxs] =[],[]
keyword[for] identifier[name] keyword[in] identifier[names] :
keyword[if] identifier[name] . identifier[lower] () keyword[not] keyword[in] identifier[self] . identifier[col_names] keyword[and] identifier[name] . identifier[lower] () keyword[not] keyword[in] identifier[self] . identifier[row_names] :
keyword[raise] identifier[Exception] ( literal[string] + identifier[name] )
keyword[if] identifier[name] . identifier[lower] () keyword[in] identifier[self] . identifier[col_names] :
identifier[col_idxs] . identifier[append] ( identifier[self] . identifier[col_names] . identifier[index] ( identifier[name] ))
keyword[if] identifier[name] . identifier[lower] () keyword[in] identifier[self] . identifier[row_names] :
identifier[row_idxs] . identifier[append] ( identifier[self] . identifier[row_names] . identifier[index] ( identifier[name] ))
keyword[if] identifier[axis] keyword[is] keyword[None] :
keyword[return] identifier[np] . identifier[array] ( identifier[row_idxs] , identifier[dtype] = identifier[np] . identifier[int32] ), identifier[np] . identifier[array] ( identifier[col_idxs] , identifier[dtype] = identifier[np] . identifier[int32] )
keyword[elif] identifier[axis] == literal[int] :
keyword[if] identifier[len] ( identifier[row_idxs] )!= identifier[len] ( identifier[names] ):
keyword[raise] identifier[Exception] ( literal[string] +
literal[string] )
keyword[return] identifier[np] . identifier[array] ( identifier[row_idxs] , identifier[dtype] = identifier[np] . identifier[int32] )
keyword[elif] identifier[axis] == literal[int] :
keyword[if] identifier[len] ( identifier[col_idxs] )!= identifier[len] ( identifier[names] ):
keyword[raise] identifier[Exception] ( literal[string] +
literal[string] )
keyword[return] identifier[np] . identifier[array] ( identifier[col_idxs] , identifier[dtype] = identifier[np] . identifier[int32] )
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] +
literal[string] + identifier[str] ( identifier[axis] )) | def old_indices(self, names, axis=None):
"""get the row and col indices of names. If axis is None, two ndarrays
are returned, corresponding the indices of names for each axis
Parameters
----------
names : iterable
column and/or row names
axis : (int) (optional)
the axis to search.
Returns
-------
numpy.ndarray : numpy.ndarray
indices of names.
"""
warnings.warn('Matrix.old_indices() is deprecated - only here for testing. Use Matrix.indices()', PyemuWarning)
(row_idxs, col_idxs) = ([], [])
for name in names:
if name.lower() not in self.col_names and name.lower() not in self.row_names:
raise Exception('Matrix.indices(): name not found: ' + name) # depends on [control=['if'], data=[]]
if name.lower() in self.col_names:
col_idxs.append(self.col_names.index(name)) # depends on [control=['if'], data=[]]
if name.lower() in self.row_names:
row_idxs.append(self.row_names.index(name)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']]
if axis is None:
return (np.array(row_idxs, dtype=np.int32), np.array(col_idxs, dtype=np.int32)) # depends on [control=['if'], data=[]]
elif axis == 0:
if len(row_idxs) != len(names):
raise Exception('Matrix.indices(): ' + 'not all names found in row_names') # depends on [control=['if'], data=[]]
return np.array(row_idxs, dtype=np.int32) # depends on [control=['if'], data=[]]
elif axis == 1:
if len(col_idxs) != len(names):
raise Exception('Matrix.indices(): ' + 'not all names found in col_names') # depends on [control=['if'], data=[]]
return np.array(col_idxs, dtype=np.int32) # depends on [control=['if'], data=[]]
else:
raise Exception('Matrix.indices(): ' + 'axis argument must 0 or 1, not:' + str(axis)) |
def _import(self, record_key, record_data, overwrite=True, last_modified=0.0, **kwargs):
'''
a helper method for other storage clients to import into appdata
:param record_key: string with key for record
:param record_data: byte data for body of record
:param overwrite: [optional] boolean to overwrite existing records
:param last_modified: [optional] float to record last modified date
:param kwargs: [optional] keyword arguments from other import methods
:return: boolean indicating whether record was imported
'''
title = '%s._import' % self.__class__.__name__
# verify permissions
if not self.permissions_write:
raise Exception('%s requires an access_token with write permissions.' % title)
# retrieve file id
file_id, parent_id = self._get_id(record_key)
# check overwrite condition
if file_id:
if overwrite:
try:
self.drive.delete(fileId=file_id).execute()
except:
raise DriveConnectionError(title)
else:
return False
# # check size of file
# import sys
# record_optimal = self.fields.metadata['record_optimal_bytes']
# record_size = sys.getsizeof(record_data)
# error_prefix = '%s(record_key="%s", record_data=b"...")' % (title, record_key)
# if record_size > record_optimal:
# print('[WARNING] %s exceeds optimal record data size of %s bytes.' % (error_prefix, record_optimal))
# prepare file body
from googleapiclient.http import MediaInMemoryUpload
media_body = MediaInMemoryUpload(body=record_data, resumable=True)
# determine path segments
path_segments = record_key.split(os.sep)
# construct upload kwargs
create_kwargs = {
'body': {
'name': path_segments.pop()
},
'media_body': media_body,
'fields': 'id'
}
# walk through parent directories
parent_id = ''
if path_segments:
# construct query and creation arguments
walk_folders = True
folder_kwargs = {
'body': {
'name': '',
'mimeType' : 'application/vnd.google-apps.folder'
},
'fields': 'id'
}
query_kwargs = {
'spaces': self.drive_space,
'fields': 'files(id, parents)'
}
while path_segments:
folder_name = path_segments.pop(0)
folder_kwargs['body']['name'] = folder_name
# search for folder id in existing hierarchy
if walk_folders:
walk_query = "name = '%s'" % folder_name
if parent_id:
walk_query += "and '%s' in parents" % parent_id
query_kwargs['q'] = walk_query
try:
response = self.drive.list(**query_kwargs).execute()
except:
raise DriveConnectionError(title)
file_list = response.get('files', [])
else:
file_list = []
if file_list:
parent_id = file_list[0].get('id')
# or create folder
# https://developers.google.com/drive/v3/web/folder
else:
try:
if not parent_id:
if self.drive_space == 'appDataFolder':
folder_kwargs['body']['parents'] = [ self.drive_space ]
else:
del folder_kwargs['body']['parents']
else:
folder_kwargs['body']['parents'] = [parent_id]
response = self.drive.create(**folder_kwargs).execute()
parent_id = response.get('id')
walk_folders = False
except:
raise DriveConnectionError(title)
# add parent id to file creation kwargs
if parent_id:
create_kwargs['body']['parents'] = [parent_id]
elif self.drive_space == 'appDataFolder':
create_kwargs['body']['parents'] = [self.drive_space]
# modify file time
import re
if re.search('\\.drep$', create_kwargs['body']['name']):
from labpack.records.time import labDT
drep_time = labDT.fromEpoch(1).isoformat()
create_kwargs['body']['modifiedTime'] = drep_time
elif last_modified:
from labpack.records.time import labDT
mod_time = labDT.fromEpoch(last_modified).isoformat()
create_kwargs['body']['modifiedTime'] = mod_time
# send create request
try:
self.drive.create(**create_kwargs).execute()
except:
raise DriveConnectionError(title)
return True | def function[_import, parameter[self, record_key, record_data, overwrite, last_modified]]:
constant[
a helper method for other storage clients to import into appdata
:param record_key: string with key for record
:param record_data: byte data for body of record
:param overwrite: [optional] boolean to overwrite existing records
:param last_modified: [optional] float to record last modified date
:param kwargs: [optional] keyword arguments from other import methods
:return: boolean indicating whether record was imported
]
variable[title] assign[=] binary_operation[constant[%s._import] <ast.Mod object at 0x7da2590d6920> name[self].__class__.__name__]
if <ast.UnaryOp object at 0x7da18eb544f0> begin[:]
<ast.Raise object at 0x7da18eb54dc0>
<ast.Tuple object at 0x7da18eb55b10> assign[=] call[name[self]._get_id, parameter[name[record_key]]]
if name[file_id] begin[:]
if name[overwrite] begin[:]
<ast.Try object at 0x7da18eb557b0>
from relative_module[googleapiclient.http] import module[MediaInMemoryUpload]
variable[media_body] assign[=] call[name[MediaInMemoryUpload], parameter[]]
variable[path_segments] assign[=] call[name[record_key].split, parameter[name[os].sep]]
variable[create_kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da18eb54550>, <ast.Constant object at 0x7da18eb56e30>, <ast.Constant object at 0x7da18eb55090>], [<ast.Dict object at 0x7da18eb54820>, <ast.Name object at 0x7da18eb566b0>, <ast.Constant object at 0x7da18eb567a0>]]
variable[parent_id] assign[=] constant[]
if name[path_segments] begin[:]
variable[walk_folders] assign[=] constant[True]
variable[folder_kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da18eb54f70>, <ast.Constant object at 0x7da18eb54d00>], [<ast.Dict object at 0x7da18eb54b80>, <ast.Constant object at 0x7da18eb566e0>]]
variable[query_kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da18eb56080>, <ast.Constant object at 0x7da18eb56f20>], [<ast.Attribute object at 0x7da18eb56ce0>, <ast.Constant object at 0x7da18eb54f10>]]
while name[path_segments] begin[:]
variable[folder_name] assign[=] call[name[path_segments].pop, parameter[constant[0]]]
call[call[name[folder_kwargs]][constant[body]]][constant[name]] assign[=] name[folder_name]
if name[walk_folders] begin[:]
variable[walk_query] assign[=] binary_operation[constant[name = '%s'] <ast.Mod object at 0x7da2590d6920> name[folder_name]]
if name[parent_id] begin[:]
<ast.AugAssign object at 0x7da18eb553f0>
call[name[query_kwargs]][constant[q]] assign[=] name[walk_query]
<ast.Try object at 0x7da18eb56620>
variable[file_list] assign[=] call[name[response].get, parameter[constant[files], list[[]]]]
if name[file_list] begin[:]
variable[parent_id] assign[=] call[call[name[file_list]][constant[0]].get, parameter[constant[id]]]
if name[parent_id] begin[:]
call[call[name[create_kwargs]][constant[body]]][constant[parents]] assign[=] list[[<ast.Name object at 0x7da18eb56c50>]]
import module[re]
if call[name[re].search, parameter[constant[\.drep$], call[call[name[create_kwargs]][constant[body]]][constant[name]]]] begin[:]
from relative_module[labpack.records.time] import module[labDT]
variable[drep_time] assign[=] call[call[name[labDT].fromEpoch, parameter[constant[1]]].isoformat, parameter[]]
call[call[name[create_kwargs]][constant[body]]][constant[modifiedTime]] assign[=] name[drep_time]
<ast.Try object at 0x7da18eb541f0>
return[constant[True]] | keyword[def] identifier[_import] ( identifier[self] , identifier[record_key] , identifier[record_data] , identifier[overwrite] = keyword[True] , identifier[last_modified] = literal[int] ,** identifier[kwargs] ):
literal[string]
identifier[title] = literal[string] % identifier[self] . identifier[__class__] . identifier[__name__]
keyword[if] keyword[not] identifier[self] . identifier[permissions_write] :
keyword[raise] identifier[Exception] ( literal[string] % identifier[title] )
identifier[file_id] , identifier[parent_id] = identifier[self] . identifier[_get_id] ( identifier[record_key] )
keyword[if] identifier[file_id] :
keyword[if] identifier[overwrite] :
keyword[try] :
identifier[self] . identifier[drive] . identifier[delete] ( identifier[fileId] = identifier[file_id] ). identifier[execute] ()
keyword[except] :
keyword[raise] identifier[DriveConnectionError] ( identifier[title] )
keyword[else] :
keyword[return] keyword[False]
keyword[from] identifier[googleapiclient] . identifier[http] keyword[import] identifier[MediaInMemoryUpload]
identifier[media_body] = identifier[MediaInMemoryUpload] ( identifier[body] = identifier[record_data] , identifier[resumable] = keyword[True] )
identifier[path_segments] = identifier[record_key] . identifier[split] ( identifier[os] . identifier[sep] )
identifier[create_kwargs] ={
literal[string] :{
literal[string] : identifier[path_segments] . identifier[pop] ()
},
literal[string] : identifier[media_body] ,
literal[string] : literal[string]
}
identifier[parent_id] = literal[string]
keyword[if] identifier[path_segments] :
identifier[walk_folders] = keyword[True]
identifier[folder_kwargs] ={
literal[string] :{
literal[string] : literal[string] ,
literal[string] : literal[string]
},
literal[string] : literal[string]
}
identifier[query_kwargs] ={
literal[string] : identifier[self] . identifier[drive_space] ,
literal[string] : literal[string]
}
keyword[while] identifier[path_segments] :
identifier[folder_name] = identifier[path_segments] . identifier[pop] ( literal[int] )
identifier[folder_kwargs] [ literal[string] ][ literal[string] ]= identifier[folder_name]
keyword[if] identifier[walk_folders] :
identifier[walk_query] = literal[string] % identifier[folder_name]
keyword[if] identifier[parent_id] :
identifier[walk_query] += literal[string] % identifier[parent_id]
identifier[query_kwargs] [ literal[string] ]= identifier[walk_query]
keyword[try] :
identifier[response] = identifier[self] . identifier[drive] . identifier[list] (** identifier[query_kwargs] ). identifier[execute] ()
keyword[except] :
keyword[raise] identifier[DriveConnectionError] ( identifier[title] )
identifier[file_list] = identifier[response] . identifier[get] ( literal[string] ,[])
keyword[else] :
identifier[file_list] =[]
keyword[if] identifier[file_list] :
identifier[parent_id] = identifier[file_list] [ literal[int] ]. identifier[get] ( literal[string] )
keyword[else] :
keyword[try] :
keyword[if] keyword[not] identifier[parent_id] :
keyword[if] identifier[self] . identifier[drive_space] == literal[string] :
identifier[folder_kwargs] [ literal[string] ][ literal[string] ]=[ identifier[self] . identifier[drive_space] ]
keyword[else] :
keyword[del] identifier[folder_kwargs] [ literal[string] ][ literal[string] ]
keyword[else] :
identifier[folder_kwargs] [ literal[string] ][ literal[string] ]=[ identifier[parent_id] ]
identifier[response] = identifier[self] . identifier[drive] . identifier[create] (** identifier[folder_kwargs] ). identifier[execute] ()
identifier[parent_id] = identifier[response] . identifier[get] ( literal[string] )
identifier[walk_folders] = keyword[False]
keyword[except] :
keyword[raise] identifier[DriveConnectionError] ( identifier[title] )
keyword[if] identifier[parent_id] :
identifier[create_kwargs] [ literal[string] ][ literal[string] ]=[ identifier[parent_id] ]
keyword[elif] identifier[self] . identifier[drive_space] == literal[string] :
identifier[create_kwargs] [ literal[string] ][ literal[string] ]=[ identifier[self] . identifier[drive_space] ]
keyword[import] identifier[re]
keyword[if] identifier[re] . identifier[search] ( literal[string] , identifier[create_kwargs] [ literal[string] ][ literal[string] ]):
keyword[from] identifier[labpack] . identifier[records] . identifier[time] keyword[import] identifier[labDT]
identifier[drep_time] = identifier[labDT] . identifier[fromEpoch] ( literal[int] ). identifier[isoformat] ()
identifier[create_kwargs] [ literal[string] ][ literal[string] ]= identifier[drep_time]
keyword[elif] identifier[last_modified] :
keyword[from] identifier[labpack] . identifier[records] . identifier[time] keyword[import] identifier[labDT]
identifier[mod_time] = identifier[labDT] . identifier[fromEpoch] ( identifier[last_modified] ). identifier[isoformat] ()
identifier[create_kwargs] [ literal[string] ][ literal[string] ]= identifier[mod_time]
keyword[try] :
identifier[self] . identifier[drive] . identifier[create] (** identifier[create_kwargs] ). identifier[execute] ()
keyword[except] :
keyword[raise] identifier[DriveConnectionError] ( identifier[title] )
keyword[return] keyword[True] | def _import(self, record_key, record_data, overwrite=True, last_modified=0.0, **kwargs):
"""
a helper method for other storage clients to import into appdata
:param record_key: string with key for record
:param record_data: byte data for body of record
:param overwrite: [optional] boolean to overwrite existing records
:param last_modified: [optional] float to record last modified date
:param kwargs: [optional] keyword arguments from other import methods
:return: boolean indicating whether record was imported
"""
title = '%s._import' % self.__class__.__name__
# verify permissions
if not self.permissions_write:
raise Exception('%s requires an access_token with write permissions.' % title) # depends on [control=['if'], data=[]]
# retrieve file id
(file_id, parent_id) = self._get_id(record_key)
# check overwrite condition
if file_id:
if overwrite:
try:
self.drive.delete(fileId=file_id).execute() # depends on [control=['try'], data=[]]
except:
raise DriveConnectionError(title) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
return False # depends on [control=['if'], data=[]]
# # check size of file
# import sys
# record_optimal = self.fields.metadata['record_optimal_bytes']
# record_size = sys.getsizeof(record_data)
# error_prefix = '%s(record_key="%s", record_data=b"...")' % (title, record_key)
# if record_size > record_optimal:
# print('[WARNING] %s exceeds optimal record data size of %s bytes.' % (error_prefix, record_optimal))
# prepare file body
from googleapiclient.http import MediaInMemoryUpload
media_body = MediaInMemoryUpload(body=record_data, resumable=True)
# determine path segments
path_segments = record_key.split(os.sep)
# construct upload kwargs
create_kwargs = {'body': {'name': path_segments.pop()}, 'media_body': media_body, 'fields': 'id'}
# walk through parent directories
parent_id = ''
if path_segments:
# construct query and creation arguments
walk_folders = True
folder_kwargs = {'body': {'name': '', 'mimeType': 'application/vnd.google-apps.folder'}, 'fields': 'id'}
query_kwargs = {'spaces': self.drive_space, 'fields': 'files(id, parents)'}
while path_segments:
folder_name = path_segments.pop(0)
folder_kwargs['body']['name'] = folder_name
# search for folder id in existing hierarchy
if walk_folders:
walk_query = "name = '%s'" % folder_name
if parent_id:
walk_query += "and '%s' in parents" % parent_id # depends on [control=['if'], data=[]]
query_kwargs['q'] = walk_query
try:
response = self.drive.list(**query_kwargs).execute() # depends on [control=['try'], data=[]]
except:
raise DriveConnectionError(title) # depends on [control=['except'], data=[]]
file_list = response.get('files', []) # depends on [control=['if'], data=[]]
else:
file_list = []
if file_list:
parent_id = file_list[0].get('id') # depends on [control=['if'], data=[]]
else:
# or create folder
# https://developers.google.com/drive/v3/web/folder
try:
if not parent_id:
if self.drive_space == 'appDataFolder':
folder_kwargs['body']['parents'] = [self.drive_space] # depends on [control=['if'], data=[]]
else:
del folder_kwargs['body']['parents'] # depends on [control=['if'], data=[]]
else:
folder_kwargs['body']['parents'] = [parent_id]
response = self.drive.create(**folder_kwargs).execute()
parent_id = response.get('id')
walk_folders = False # depends on [control=['try'], data=[]]
except:
raise DriveConnectionError(title) # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]]
# add parent id to file creation kwargs
if parent_id:
create_kwargs['body']['parents'] = [parent_id] # depends on [control=['if'], data=[]]
elif self.drive_space == 'appDataFolder':
create_kwargs['body']['parents'] = [self.drive_space] # depends on [control=['if'], data=[]]
# modify file time
import re
if re.search('\\.drep$', create_kwargs['body']['name']):
from labpack.records.time import labDT
drep_time = labDT.fromEpoch(1).isoformat()
create_kwargs['body']['modifiedTime'] = drep_time # depends on [control=['if'], data=[]]
elif last_modified:
from labpack.records.time import labDT
mod_time = labDT.fromEpoch(last_modified).isoformat()
create_kwargs['body']['modifiedTime'] = mod_time # depends on [control=['if'], data=[]]
# send create request
try:
self.drive.create(**create_kwargs).execute() # depends on [control=['try'], data=[]]
except:
raise DriveConnectionError(title) # depends on [control=['except'], data=[]]
return True |
def create_cloudtrail(self, region):
"""Creates a new CloudTrail Trail
Args:
region (str): Name of the AWS region
Returns:
`None`
"""
ct = self.session.client('cloudtrail', region_name=region)
# Creating the sns topic for the trail prior to creation
self.create_sns_topic(region)
ct.create_trail(
Name=self.trail_name,
S3BucketName=self.bucket_name,
S3KeyPrefix=self.account.account_name,
IsMultiRegionTrail=True,
IncludeGlobalServiceEvents=True,
SnsTopicName=self.topic_name
)
self.subscribe_sns_topic_to_sqs(region)
auditlog(
event='cloudtrail.create_cloudtrail',
actor=self.ns,
data={
'account': self.account.account_name,
'region': region
}
)
self.log.info('Created CloudTrail for {} in {} ({})'.format(self.account, region, self.bucket_name)) | def function[create_cloudtrail, parameter[self, region]]:
constant[Creates a new CloudTrail Trail
Args:
region (str): Name of the AWS region
Returns:
`None`
]
variable[ct] assign[=] call[name[self].session.client, parameter[constant[cloudtrail]]]
call[name[self].create_sns_topic, parameter[name[region]]]
call[name[ct].create_trail, parameter[]]
call[name[self].subscribe_sns_topic_to_sqs, parameter[name[region]]]
call[name[auditlog], parameter[]]
call[name[self].log.info, parameter[call[constant[Created CloudTrail for {} in {} ({})].format, parameter[name[self].account, name[region], name[self].bucket_name]]]] | keyword[def] identifier[create_cloudtrail] ( identifier[self] , identifier[region] ):
literal[string]
identifier[ct] = identifier[self] . identifier[session] . identifier[client] ( literal[string] , identifier[region_name] = identifier[region] )
identifier[self] . identifier[create_sns_topic] ( identifier[region] )
identifier[ct] . identifier[create_trail] (
identifier[Name] = identifier[self] . identifier[trail_name] ,
identifier[S3BucketName] = identifier[self] . identifier[bucket_name] ,
identifier[S3KeyPrefix] = identifier[self] . identifier[account] . identifier[account_name] ,
identifier[IsMultiRegionTrail] = keyword[True] ,
identifier[IncludeGlobalServiceEvents] = keyword[True] ,
identifier[SnsTopicName] = identifier[self] . identifier[topic_name]
)
identifier[self] . identifier[subscribe_sns_topic_to_sqs] ( identifier[region] )
identifier[auditlog] (
identifier[event] = literal[string] ,
identifier[actor] = identifier[self] . identifier[ns] ,
identifier[data] ={
literal[string] : identifier[self] . identifier[account] . identifier[account_name] ,
literal[string] : identifier[region]
}
)
identifier[self] . identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[self] . identifier[account] , identifier[region] , identifier[self] . identifier[bucket_name] )) | def create_cloudtrail(self, region):
"""Creates a new CloudTrail Trail
Args:
region (str): Name of the AWS region
Returns:
`None`
"""
ct = self.session.client('cloudtrail', region_name=region)
# Creating the sns topic for the trail prior to creation
self.create_sns_topic(region)
ct.create_trail(Name=self.trail_name, S3BucketName=self.bucket_name, S3KeyPrefix=self.account.account_name, IsMultiRegionTrail=True, IncludeGlobalServiceEvents=True, SnsTopicName=self.topic_name)
self.subscribe_sns_topic_to_sqs(region)
auditlog(event='cloudtrail.create_cloudtrail', actor=self.ns, data={'account': self.account.account_name, 'region': region})
self.log.info('Created CloudTrail for {} in {} ({})'.format(self.account, region, self.bucket_name)) |
def chunks(data, n):
"""Yield successive n-sized chunks from a slice-able iterable."""
for i in range(0, len(data), n):
yield data[i:i+n] | def function[chunks, parameter[data, n]]:
constant[Yield successive n-sized chunks from a slice-able iterable.]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[data]]], name[n]]]] begin[:]
<ast.Yield object at 0x7da1b0abb130> | keyword[def] identifier[chunks] ( identifier[data] , identifier[n] ):
literal[string]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[data] ), identifier[n] ):
keyword[yield] identifier[data] [ identifier[i] : identifier[i] + identifier[n] ] | def chunks(data, n):
"""Yield successive n-sized chunks from a slice-able iterable."""
for i in range(0, len(data), n):
yield data[i:i + n] # depends on [control=['for'], data=['i']] |
def get_vpnv4fs_table(self):
"""Returns global VPNv4 Flow Specification table.
Creates the table if it does not exist.
"""
vpnv4fs_table = self._global_tables.get(RF_VPNv4_FLOWSPEC)
# Lazy initialization of the table.
if not vpnv4fs_table:
vpnv4fs_table = VPNv4FlowSpecTable(self._core_service,
self._signal_bus)
self._global_tables[RF_VPNv4_FLOWSPEC] = vpnv4fs_table
self._tables[(None, RF_VPNv4_FLOWSPEC)] = vpnv4fs_table
return vpnv4fs_table | def function[get_vpnv4fs_table, parameter[self]]:
constant[Returns global VPNv4 Flow Specification table.
Creates the table if it does not exist.
]
variable[vpnv4fs_table] assign[=] call[name[self]._global_tables.get, parameter[name[RF_VPNv4_FLOWSPEC]]]
if <ast.UnaryOp object at 0x7da1b1b0e200> begin[:]
variable[vpnv4fs_table] assign[=] call[name[VPNv4FlowSpecTable], parameter[name[self]._core_service, name[self]._signal_bus]]
call[name[self]._global_tables][name[RF_VPNv4_FLOWSPEC]] assign[=] name[vpnv4fs_table]
call[name[self]._tables][tuple[[<ast.Constant object at 0x7da1b1b0ee00>, <ast.Name object at 0x7da1b1b0ead0>]]] assign[=] name[vpnv4fs_table]
return[name[vpnv4fs_table]] | keyword[def] identifier[get_vpnv4fs_table] ( identifier[self] ):
literal[string]
identifier[vpnv4fs_table] = identifier[self] . identifier[_global_tables] . identifier[get] ( identifier[RF_VPNv4_FLOWSPEC] )
keyword[if] keyword[not] identifier[vpnv4fs_table] :
identifier[vpnv4fs_table] = identifier[VPNv4FlowSpecTable] ( identifier[self] . identifier[_core_service] ,
identifier[self] . identifier[_signal_bus] )
identifier[self] . identifier[_global_tables] [ identifier[RF_VPNv4_FLOWSPEC] ]= identifier[vpnv4fs_table]
identifier[self] . identifier[_tables] [( keyword[None] , identifier[RF_VPNv4_FLOWSPEC] )]= identifier[vpnv4fs_table]
keyword[return] identifier[vpnv4fs_table] | def get_vpnv4fs_table(self):
"""Returns global VPNv4 Flow Specification table.
Creates the table if it does not exist.
"""
vpnv4fs_table = self._global_tables.get(RF_VPNv4_FLOWSPEC)
# Lazy initialization of the table.
if not vpnv4fs_table:
vpnv4fs_table = VPNv4FlowSpecTable(self._core_service, self._signal_bus)
self._global_tables[RF_VPNv4_FLOWSPEC] = vpnv4fs_table
self._tables[None, RF_VPNv4_FLOWSPEC] = vpnv4fs_table # depends on [control=['if'], data=[]]
return vpnv4fs_table |
def positionMaxError(G, vmini, extension=0.0):
"""
Calculate the maximum position errors from G and (V-I). These correspond to the sky regions with the
largest astrometric errors.
NOTE! THE ERRORS ARE FOR SKY POSITIONS IN THE ICRS (I.E., RIGHT ASCENSION, DECLINATION). MAKE SURE YOUR
SIMULATED ASTROMETRY IS ALSO ON THE ICRS.
Parameters
----------
G - Value(s) of G-band magnitude.
vmini - Value(s) of (V-I) colour.
Keywords
--------
extension - Add this amount of years to the mission lifetime and scale the errors accordingly.
Returns
-------
The maximum error in alpha* and the error in delta, in that order, in micro-arcsecond.
"""
parallaxError = parallaxErrorSkyAvg(G, vmini, extension)
return _astrometricErrorFactors['alphaStar'].max()*parallaxError, \
_astrometricErrorFactors['delta'].max()*parallaxError | def function[positionMaxError, parameter[G, vmini, extension]]:
constant[
Calculate the maximum position errors from G and (V-I). These correspond to the sky regions with the
largest astrometric errors.
NOTE! THE ERRORS ARE FOR SKY POSITIONS IN THE ICRS (I.E., RIGHT ASCENSION, DECLINATION). MAKE SURE YOUR
SIMULATED ASTROMETRY IS ALSO ON THE ICRS.
Parameters
----------
G - Value(s) of G-band magnitude.
vmini - Value(s) of (V-I) colour.
Keywords
--------
extension - Add this amount of years to the mission lifetime and scale the errors accordingly.
Returns
-------
The maximum error in alpha* and the error in delta, in that order, in micro-arcsecond.
]
variable[parallaxError] assign[=] call[name[parallaxErrorSkyAvg], parameter[name[G], name[vmini], name[extension]]]
return[tuple[[<ast.BinOp object at 0x7da18f812950>, <ast.BinOp object at 0x7da18f810700>]]] | keyword[def] identifier[positionMaxError] ( identifier[G] , identifier[vmini] , identifier[extension] = literal[int] ):
literal[string]
identifier[parallaxError] = identifier[parallaxErrorSkyAvg] ( identifier[G] , identifier[vmini] , identifier[extension] )
keyword[return] identifier[_astrometricErrorFactors] [ literal[string] ]. identifier[max] ()* identifier[parallaxError] , identifier[_astrometricErrorFactors] [ literal[string] ]. identifier[max] ()* identifier[parallaxError] | def positionMaxError(G, vmini, extension=0.0):
"""
Calculate the maximum position errors from G and (V-I). These correspond to the sky regions with the
largest astrometric errors.
NOTE! THE ERRORS ARE FOR SKY POSITIONS IN THE ICRS (I.E., RIGHT ASCENSION, DECLINATION). MAKE SURE YOUR
SIMULATED ASTROMETRY IS ALSO ON THE ICRS.
Parameters
----------
G - Value(s) of G-band magnitude.
vmini - Value(s) of (V-I) colour.
Keywords
--------
extension - Add this amount of years to the mission lifetime and scale the errors accordingly.
Returns
-------
The maximum error in alpha* and the error in delta, in that order, in micro-arcsecond.
"""
parallaxError = parallaxErrorSkyAvg(G, vmini, extension)
return (_astrometricErrorFactors['alphaStar'].max() * parallaxError, _astrometricErrorFactors['delta'].max() * parallaxError) |
def send(self, message, *args, **kwargs):
'''
Sends provided message to all listeners. Message is only added to
queue and will be processed on next tick.
:param Message message:
Message to send.
'''
self._messages.put((message, args, kwargs), False) | def function[send, parameter[self, message]]:
constant[
Sends provided message to all listeners. Message is only added to
queue and will be processed on next tick.
:param Message message:
Message to send.
]
call[name[self]._messages.put, parameter[tuple[[<ast.Name object at 0x7da18bcc9de0>, <ast.Name object at 0x7da18bcc8b80>, <ast.Name object at 0x7da18bcc8f10>]], constant[False]]] | keyword[def] identifier[send] ( identifier[self] , identifier[message] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[_messages] . identifier[put] (( identifier[message] , identifier[args] , identifier[kwargs] ), keyword[False] ) | def send(self, message, *args, **kwargs):
"""
Sends provided message to all listeners. Message is only added to
queue and will be processed on next tick.
:param Message message:
Message to send.
"""
self._messages.put((message, args, kwargs), False) |
def make_generic_validator(validator: AnyCallable) -> 'ValidatorCallable':
"""
Make a generic function which calls a validator with the right arguments.
Unfortunately other approaches (eg. return a partial of a function that builds the arguments) is slow,
hence this laborious way of doing things.
It's done like this so validators don't all need **kwargs in their signature, eg. any combination of
the arguments "values", "fields" and/or "config" are permitted.
"""
sig = signature(validator)
args = list(sig.parameters.keys())
first_arg = args.pop(0)
if first_arg == 'self':
raise ConfigError(
f'Invalid signature for validator {validator}: {sig}, "self" not permitted as first argument, '
f'should be: (cls, value, values, config, field), "values", "config" and "field" are all optional.'
)
elif first_arg == 'cls':
# assume the second argument is value
return wraps(validator)(_generic_validator_cls(validator, sig, set(args[1:])))
else:
# assume the first argument was value which has already been removed
return wraps(validator)(_generic_validator_basic(validator, sig, set(args))) | def function[make_generic_validator, parameter[validator]]:
constant[
Make a generic function which calls a validator with the right arguments.
Unfortunately other approaches (eg. return a partial of a function that builds the arguments) is slow,
hence this laborious way of doing things.
It's done like this so validators don't all need **kwargs in their signature, eg. any combination of
the arguments "values", "fields" and/or "config" are permitted.
]
variable[sig] assign[=] call[name[signature], parameter[name[validator]]]
variable[args] assign[=] call[name[list], parameter[call[name[sig].parameters.keys, parameter[]]]]
variable[first_arg] assign[=] call[name[args].pop, parameter[constant[0]]]
if compare[name[first_arg] equal[==] constant[self]] begin[:]
<ast.Raise object at 0x7da1b21babc0> | keyword[def] identifier[make_generic_validator] ( identifier[validator] : identifier[AnyCallable] )-> literal[string] :
literal[string]
identifier[sig] = identifier[signature] ( identifier[validator] )
identifier[args] = identifier[list] ( identifier[sig] . identifier[parameters] . identifier[keys] ())
identifier[first_arg] = identifier[args] . identifier[pop] ( literal[int] )
keyword[if] identifier[first_arg] == literal[string] :
keyword[raise] identifier[ConfigError] (
literal[string]
literal[string]
)
keyword[elif] identifier[first_arg] == literal[string] :
keyword[return] identifier[wraps] ( identifier[validator] )( identifier[_generic_validator_cls] ( identifier[validator] , identifier[sig] , identifier[set] ( identifier[args] [ literal[int] :])))
keyword[else] :
keyword[return] identifier[wraps] ( identifier[validator] )( identifier[_generic_validator_basic] ( identifier[validator] , identifier[sig] , identifier[set] ( identifier[args] ))) | def make_generic_validator(validator: AnyCallable) -> 'ValidatorCallable':
"""
Make a generic function which calls a validator with the right arguments.
Unfortunately other approaches (eg. return a partial of a function that builds the arguments) is slow,
hence this laborious way of doing things.
It's done like this so validators don't all need **kwargs in their signature, eg. any combination of
the arguments "values", "fields" and/or "config" are permitted.
"""
sig = signature(validator)
args = list(sig.parameters.keys())
first_arg = args.pop(0)
if first_arg == 'self':
raise ConfigError(f'Invalid signature for validator {validator}: {sig}, "self" not permitted as first argument, should be: (cls, value, values, config, field), "values", "config" and "field" are all optional.') # depends on [control=['if'], data=[]]
elif first_arg == 'cls':
# assume the second argument is value
return wraps(validator)(_generic_validator_cls(validator, sig, set(args[1:]))) # depends on [control=['if'], data=[]]
else:
# assume the first argument was value which has already been removed
return wraps(validator)(_generic_validator_basic(validator, sig, set(args))) |
def is_special_orthogonal(
matrix: np.ndarray,
*,
rtol: float = 1e-5,
atol: float = 1e-8) -> bool:
"""Determines if a matrix is approximately special orthogonal.
A matrix is special orthogonal if it is square and real and its transpose
is its inverse and its determinant is one.
Args:
matrix: The matrix to check.
rtol: The per-matrix-entry relative tolerance on equality.
atol: The per-matrix-entry absolute tolerance on equality.
Returns:
Whether the matrix is special orthogonal within the given tolerance.
"""
return (is_orthogonal(matrix, rtol=rtol, atol=atol) and
(matrix.shape[0] == 0 or
np.allclose(np.linalg.det(matrix), 1, rtol=rtol, atol=atol))) | def function[is_special_orthogonal, parameter[matrix]]:
constant[Determines if a matrix is approximately special orthogonal.
A matrix is special orthogonal if it is square and real and its transpose
is its inverse and its determinant is one.
Args:
matrix: The matrix to check.
rtol: The per-matrix-entry relative tolerance on equality.
atol: The per-matrix-entry absolute tolerance on equality.
Returns:
Whether the matrix is special orthogonal within the given tolerance.
]
return[<ast.BoolOp object at 0x7da1b1c9a890>] | keyword[def] identifier[is_special_orthogonal] (
identifier[matrix] : identifier[np] . identifier[ndarray] ,
*,
identifier[rtol] : identifier[float] = literal[int] ,
identifier[atol] : identifier[float] = literal[int] )-> identifier[bool] :
literal[string]
keyword[return] ( identifier[is_orthogonal] ( identifier[matrix] , identifier[rtol] = identifier[rtol] , identifier[atol] = identifier[atol] ) keyword[and]
( identifier[matrix] . identifier[shape] [ literal[int] ]== literal[int] keyword[or]
identifier[np] . identifier[allclose] ( identifier[np] . identifier[linalg] . identifier[det] ( identifier[matrix] ), literal[int] , identifier[rtol] = identifier[rtol] , identifier[atol] = identifier[atol] ))) | def is_special_orthogonal(matrix: np.ndarray, *, rtol: float=1e-05, atol: float=1e-08) -> bool:
"""Determines if a matrix is approximately special orthogonal.
A matrix is special orthogonal if it is square and real and its transpose
is its inverse and its determinant is one.
Args:
matrix: The matrix to check.
rtol: The per-matrix-entry relative tolerance on equality.
atol: The per-matrix-entry absolute tolerance on equality.
Returns:
Whether the matrix is special orthogonal within the given tolerance.
"""
return is_orthogonal(matrix, rtol=rtol, atol=atol) and (matrix.shape[0] == 0 or np.allclose(np.linalg.det(matrix), 1, rtol=rtol, atol=atol)) |
def is_parameter(self):
"""Whether this is a function parameter."""
return (isinstance(self.scope, CodeFunction)
and self in self.scope.parameters) | def function[is_parameter, parameter[self]]:
constant[Whether this is a function parameter.]
return[<ast.BoolOp object at 0x7da18dc04580>] | keyword[def] identifier[is_parameter] ( identifier[self] ):
literal[string]
keyword[return] ( identifier[isinstance] ( identifier[self] . identifier[scope] , identifier[CodeFunction] )
keyword[and] identifier[self] keyword[in] identifier[self] . identifier[scope] . identifier[parameters] ) | def is_parameter(self):
"""Whether this is a function parameter."""
return isinstance(self.scope, CodeFunction) and self in self.scope.parameters |
def translate(cls, val):
"""Translate each of the standard json/yaml types to appropiate objects."""
if val is None:
return cls.translate_none(val)
elif isinstance(val, string_types):
return cls.translate_str(val)
# Needs to be before integer checks
elif isinstance(val, bool):
return cls.translate_bool(val)
elif isinstance(val, integer_types):
return cls.translate_int(val)
elif isinstance(val, float):
return cls.translate_float(val)
elif isinstance(val, dict):
return cls.translate_dict(val)
elif isinstance(val, list):
return cls.translate_list(val)
# Use this generic translation as a last resort
return cls.translate_escaped_str(val) | def function[translate, parameter[cls, val]]:
constant[Translate each of the standard json/yaml types to appropiate objects.]
if compare[name[val] is constant[None]] begin[:]
return[call[name[cls].translate_none, parameter[name[val]]]]
return[call[name[cls].translate_escaped_str, parameter[name[val]]]] | keyword[def] identifier[translate] ( identifier[cls] , identifier[val] ):
literal[string]
keyword[if] identifier[val] keyword[is] keyword[None] :
keyword[return] identifier[cls] . identifier[translate_none] ( identifier[val] )
keyword[elif] identifier[isinstance] ( identifier[val] , identifier[string_types] ):
keyword[return] identifier[cls] . identifier[translate_str] ( identifier[val] )
keyword[elif] identifier[isinstance] ( identifier[val] , identifier[bool] ):
keyword[return] identifier[cls] . identifier[translate_bool] ( identifier[val] )
keyword[elif] identifier[isinstance] ( identifier[val] , identifier[integer_types] ):
keyword[return] identifier[cls] . identifier[translate_int] ( identifier[val] )
keyword[elif] identifier[isinstance] ( identifier[val] , identifier[float] ):
keyword[return] identifier[cls] . identifier[translate_float] ( identifier[val] )
keyword[elif] identifier[isinstance] ( identifier[val] , identifier[dict] ):
keyword[return] identifier[cls] . identifier[translate_dict] ( identifier[val] )
keyword[elif] identifier[isinstance] ( identifier[val] , identifier[list] ):
keyword[return] identifier[cls] . identifier[translate_list] ( identifier[val] )
keyword[return] identifier[cls] . identifier[translate_escaped_str] ( identifier[val] ) | def translate(cls, val):
"""Translate each of the standard json/yaml types to appropiate objects."""
if val is None:
return cls.translate_none(val) # depends on [control=['if'], data=['val']]
elif isinstance(val, string_types):
return cls.translate_str(val) # depends on [control=['if'], data=[]]
# Needs to be before integer checks
elif isinstance(val, bool):
return cls.translate_bool(val) # depends on [control=['if'], data=[]]
elif isinstance(val, integer_types):
return cls.translate_int(val) # depends on [control=['if'], data=[]]
elif isinstance(val, float):
return cls.translate_float(val) # depends on [control=['if'], data=[]]
elif isinstance(val, dict):
return cls.translate_dict(val) # depends on [control=['if'], data=[]]
elif isinstance(val, list):
return cls.translate_list(val) # depends on [control=['if'], data=[]]
# Use this generic translation as a last resort
return cls.translate_escaped_str(val) |
def lookup(self, dotted_path, lineno=None):
"""Given a dotted path in the format ``class_name`` or
``class_name:method_name`` this performs an alias lookup. For
methods the line number must be supplied or the result is
unreliable.
"""
rv = None
try:
rv = rustcall(
_lib.lsm_proguard_mapping_convert_dotted_path,
self._get_ptr(),
dotted_path.encode('utf-8'), lineno or 0)
return _ffi.string(rv).decode('utf-8', 'replace')
finally:
if rv is not None:
_lib.lsm_buffer_free(rv) | def function[lookup, parameter[self, dotted_path, lineno]]:
constant[Given a dotted path in the format ``class_name`` or
``class_name:method_name`` this performs an alias lookup. For
methods the line number must be supplied or the result is
unreliable.
]
variable[rv] assign[=] constant[None]
<ast.Try object at 0x7da18f00ff70> | keyword[def] identifier[lookup] ( identifier[self] , identifier[dotted_path] , identifier[lineno] = keyword[None] ):
literal[string]
identifier[rv] = keyword[None]
keyword[try] :
identifier[rv] = identifier[rustcall] (
identifier[_lib] . identifier[lsm_proguard_mapping_convert_dotted_path] ,
identifier[self] . identifier[_get_ptr] (),
identifier[dotted_path] . identifier[encode] ( literal[string] ), identifier[lineno] keyword[or] literal[int] )
keyword[return] identifier[_ffi] . identifier[string] ( identifier[rv] ). identifier[decode] ( literal[string] , literal[string] )
keyword[finally] :
keyword[if] identifier[rv] keyword[is] keyword[not] keyword[None] :
identifier[_lib] . identifier[lsm_buffer_free] ( identifier[rv] ) | def lookup(self, dotted_path, lineno=None):
"""Given a dotted path in the format ``class_name`` or
``class_name:method_name`` this performs an alias lookup. For
methods the line number must be supplied or the result is
unreliable.
"""
rv = None
try:
rv = rustcall(_lib.lsm_proguard_mapping_convert_dotted_path, self._get_ptr(), dotted_path.encode('utf-8'), lineno or 0)
return _ffi.string(rv).decode('utf-8', 'replace') # depends on [control=['try'], data=[]]
finally:
if rv is not None:
_lib.lsm_buffer_free(rv) # depends on [control=['if'], data=['rv']] |
def is_type_of(self, some_type):
"""Asserts that val is of the given type."""
if type(some_type) is not type and\
not issubclass(type(some_type), type):
raise TypeError('given arg must be a type')
if type(self.val) is not some_type:
if hasattr(self.val, '__name__'):
t = self.val.__name__
elif hasattr(self.val, '__class__'):
t = self.val.__class__.__name__
else:
t = 'unknown'
self._err('Expected <%s:%s> to be of type <%s>, but was not.' % (self.val, t, some_type.__name__))
return self | def function[is_type_of, parameter[self, some_type]]:
constant[Asserts that val is of the given type.]
if <ast.BoolOp object at 0x7da1b016cd30> begin[:]
<ast.Raise object at 0x7da1b016dde0>
if compare[call[name[type], parameter[name[self].val]] is_not name[some_type]] begin[:]
if call[name[hasattr], parameter[name[self].val, constant[__name__]]] begin[:]
variable[t] assign[=] name[self].val.__name__
call[name[self]._err, parameter[binary_operation[constant[Expected <%s:%s> to be of type <%s>, but was not.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b016e5f0>, <ast.Name object at 0x7da1b016cfa0>, <ast.Attribute object at 0x7da1b016e830>]]]]]
return[name[self]] | keyword[def] identifier[is_type_of] ( identifier[self] , identifier[some_type] ):
literal[string]
keyword[if] identifier[type] ( identifier[some_type] ) keyword[is] keyword[not] identifier[type] keyword[and] keyword[not] identifier[issubclass] ( identifier[type] ( identifier[some_type] ), identifier[type] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[type] ( identifier[self] . identifier[val] ) keyword[is] keyword[not] identifier[some_type] :
keyword[if] identifier[hasattr] ( identifier[self] . identifier[val] , literal[string] ):
identifier[t] = identifier[self] . identifier[val] . identifier[__name__]
keyword[elif] identifier[hasattr] ( identifier[self] . identifier[val] , literal[string] ):
identifier[t] = identifier[self] . identifier[val] . identifier[__class__] . identifier[__name__]
keyword[else] :
identifier[t] = literal[string]
identifier[self] . identifier[_err] ( literal[string] %( identifier[self] . identifier[val] , identifier[t] , identifier[some_type] . identifier[__name__] ))
keyword[return] identifier[self] | def is_type_of(self, some_type):
"""Asserts that val is of the given type."""
if type(some_type) is not type and (not issubclass(type(some_type), type)):
raise TypeError('given arg must be a type') # depends on [control=['if'], data=[]]
if type(self.val) is not some_type:
if hasattr(self.val, '__name__'):
t = self.val.__name__ # depends on [control=['if'], data=[]]
elif hasattr(self.val, '__class__'):
t = self.val.__class__.__name__ # depends on [control=['if'], data=[]]
else:
t = 'unknown'
self._err('Expected <%s:%s> to be of type <%s>, but was not.' % (self.val, t, some_type.__name__)) # depends on [control=['if'], data=['some_type']]
return self |
def _main(instaloader: Instaloader, targetlist: List[str],
username: Optional[str] = None, password: Optional[str] = None,
sessionfile: Optional[str] = None,
download_profile_pic: bool = True, download_posts=True,
download_stories: bool = False, download_highlights: bool = False, download_tagged: bool = False,
fast_update: bool = False,
max_count: Optional[int] = None, post_filter_str: Optional[str] = None,
storyitem_filter_str: Optional[str] = None) -> None:
"""Download set of profiles, hashtags etc. and handle logging in and session files if desired."""
# Parse and generate filter function
post_filter = None
if post_filter_str is not None:
post_filter = filterstr_to_filterfunc(post_filter_str, Post)
instaloader.context.log('Only download posts with property "{}".'.format(post_filter_str))
storyitem_filter = None
if storyitem_filter_str is not None:
storyitem_filter = filterstr_to_filterfunc(storyitem_filter_str, StoryItem)
instaloader.context.log('Only download storyitems with property "{}".'.format(storyitem_filter_str))
# Login, if desired
if username is not None:
try:
instaloader.load_session_from_file(username, sessionfile)
except FileNotFoundError as err:
if sessionfile is not None:
print(err, file=sys.stderr)
instaloader.context.log("Session file does not exist yet - Logging in.")
if not instaloader.context.is_logged_in or username != instaloader.test_login():
if password is not None:
try:
instaloader.login(username, password)
except TwoFactorAuthRequiredException:
while True:
try:
code = input("Enter 2FA verification code: ")
instaloader.two_factor_login(code)
break
except BadCredentialsException:
pass
else:
instaloader.interactive_login(username)
instaloader.context.log("Logged in as %s." % username)
# Try block for KeyboardInterrupt (save session on ^C)
profiles = set()
anonymous_retry_profiles = set()
try:
# Generate set of profiles, already downloading non-profile targets
for target in targetlist:
if (target.endswith('.json') or target.endswith('.json.xz')) and os.path.isfile(target):
with instaloader.context.error_catcher(target):
structure = load_structure_from_file(instaloader.context, target)
if isinstance(structure, Post):
if post_filter is not None and not post_filter(structure):
instaloader.context.log("<{} ({}) skipped>".format(structure, target), flush=True)
continue
instaloader.context.log("Downloading {} ({})".format(structure, target))
instaloader.download_post(structure, os.path.dirname(target))
elif isinstance(structure, StoryItem):
if storyitem_filter is not None and not storyitem_filter(structure):
instaloader.context.log("<{} ({}) skipped>".format(structure, target), flush=True)
continue
instaloader.context.log("Attempting to download {} ({})".format(structure, target))
instaloader.download_storyitem(structure, os.path.dirname(target))
elif isinstance(structure, Profile):
raise InvalidArgumentException("Profile JSON are ignored. Pass \"{}\" to download that profile"
.format(structure.username))
else:
raise InvalidArgumentException("{} JSON file not supported as target"
.format(structure.__class__.__name__))
continue
# strip '/' characters to be more shell-autocompletion-friendly
target = target.rstrip('/')
with instaloader.context.error_catcher(target):
if target[0] == '@':
instaloader.context.log("Retrieving followees of %s..." % target[1:])
profile = Profile.from_username(instaloader.context, target[1:])
for followee in profile.get_followees():
instaloader.save_profile_id(followee)
profiles.add(followee)
elif target[0] == '#':
instaloader.download_hashtag(hashtag=target[1:], max_count=max_count, fast_update=fast_update,
post_filter=post_filter)
elif target[0] == '-':
instaloader.download_post(Post.from_shortcode(instaloader.context, target[1:]), target)
elif target[0] == "%":
instaloader.download_location(location=target[1:], max_count=max_count, fast_update=fast_update,
post_filter=post_filter)
elif target == ":feed":
instaloader.download_feed_posts(fast_update=fast_update, max_count=max_count,
post_filter=post_filter)
elif target == ":stories":
instaloader.download_stories(fast_update=fast_update, storyitem_filter=storyitem_filter)
elif target == ":saved":
instaloader.download_saved_posts(fast_update=fast_update, max_count=max_count,
post_filter=post_filter)
else:
try:
profile = instaloader.check_profile_id(target)
if instaloader.context.is_logged_in and profile.has_blocked_viewer:
if download_profile_pic or ((download_posts or download_tagged) and not profile.is_private):
raise ProfileNotExistsException("{} blocked you; But we download her anonymously."
.format(target))
else:
instaloader.context.error("{} blocked you.".format(target))
else:
profiles.add(profile)
except ProfileNotExistsException as err:
# Not only our profile.has_blocked_viewer condition raises ProfileNotExistsException,
# check_profile_id() also does, since access to blocked profile may be responded with 404.
if instaloader.context.is_logged_in and (download_profile_pic or download_posts or
download_tagged):
instaloader.context.log(err)
instaloader.context.log("Trying again anonymously, helps in case you are just blocked.")
with instaloader.anonymous_copy() as anonymous_loader:
with instaloader.context.error_catcher():
anonymous_retry_profiles.add(anonymous_loader.check_profile_id(target))
instaloader.context.error("Warning: {} will be downloaded anonymously (\"{}\")."
.format(target, err))
else:
raise
if len(profiles) > 1:
instaloader.context.log("Downloading {} profiles: {}".format(len(profiles),
' '.join([p.username for p in profiles])))
if profiles and download_profile_pic and not instaloader.context.is_logged_in:
instaloader.context.error("Warning: Use --login to download HD version of profile pictures.")
instaloader.download_profiles(profiles,
download_profile_pic, download_posts, download_tagged, download_highlights,
download_stories, fast_update, post_filter, storyitem_filter)
if anonymous_retry_profiles:
instaloader.context.log("Downloading anonymously: {}"
.format(' '.join([p.username for p in anonymous_retry_profiles])))
with instaloader.anonymous_copy() as anonymous_loader:
anonymous_loader.download_profiles(anonymous_retry_profiles,
download_profile_pic, download_posts, download_tagged,
fast_update=fast_update, post_filter=post_filter)
except KeyboardInterrupt:
print("\nInterrupted by user.", file=sys.stderr)
# Save session if it is useful
if instaloader.context.is_logged_in:
instaloader.save_session_to_file(sessionfile)
# User might be confused if Instaloader does nothing
if not targetlist:
if instaloader.context.is_logged_in:
# Instaloader did at least save a session file
instaloader.context.log("No targets were specified, thus nothing has been downloaded.")
else:
# Instloader did not do anything
instaloader.context.log("usage:" + usage_string()) | def function[_main, parameter[instaloader, targetlist, username, password, sessionfile, download_profile_pic, download_posts, download_stories, download_highlights, download_tagged, fast_update, max_count, post_filter_str, storyitem_filter_str]]:
constant[Download set of profiles, hashtags etc. and handle logging in and session files if desired.]
variable[post_filter] assign[=] constant[None]
if compare[name[post_filter_str] is_not constant[None]] begin[:]
variable[post_filter] assign[=] call[name[filterstr_to_filterfunc], parameter[name[post_filter_str], name[Post]]]
call[name[instaloader].context.log, parameter[call[constant[Only download posts with property "{}".].format, parameter[name[post_filter_str]]]]]
variable[storyitem_filter] assign[=] constant[None]
if compare[name[storyitem_filter_str] is_not constant[None]] begin[:]
variable[storyitem_filter] assign[=] call[name[filterstr_to_filterfunc], parameter[name[storyitem_filter_str], name[StoryItem]]]
call[name[instaloader].context.log, parameter[call[constant[Only download storyitems with property "{}".].format, parameter[name[storyitem_filter_str]]]]]
if compare[name[username] is_not constant[None]] begin[:]
<ast.Try object at 0x7da2044c1300>
if <ast.BoolOp object at 0x7da2044c3d60> begin[:]
if compare[name[password] is_not constant[None]] begin[:]
<ast.Try object at 0x7da20c7c9270>
call[name[instaloader].context.log, parameter[binary_operation[constant[Logged in as %s.] <ast.Mod object at 0x7da2590d6920> name[username]]]]
variable[profiles] assign[=] call[name[set], parameter[]]
variable[anonymous_retry_profiles] assign[=] call[name[set], parameter[]]
<ast.Try object at 0x7da2044c2bc0>
if name[instaloader].context.is_logged_in begin[:]
call[name[instaloader].save_session_to_file, parameter[name[sessionfile]]]
if <ast.UnaryOp object at 0x7da20cabf790> begin[:]
if name[instaloader].context.is_logged_in begin[:]
call[name[instaloader].context.log, parameter[constant[No targets were specified, thus nothing has been downloaded.]]] | keyword[def] identifier[_main] ( identifier[instaloader] : identifier[Instaloader] , identifier[targetlist] : identifier[List] [ identifier[str] ],
identifier[username] : identifier[Optional] [ identifier[str] ]= keyword[None] , identifier[password] : identifier[Optional] [ identifier[str] ]= keyword[None] ,
identifier[sessionfile] : identifier[Optional] [ identifier[str] ]= keyword[None] ,
identifier[download_profile_pic] : identifier[bool] = keyword[True] , identifier[download_posts] = keyword[True] ,
identifier[download_stories] : identifier[bool] = keyword[False] , identifier[download_highlights] : identifier[bool] = keyword[False] , identifier[download_tagged] : identifier[bool] = keyword[False] ,
identifier[fast_update] : identifier[bool] = keyword[False] ,
identifier[max_count] : identifier[Optional] [ identifier[int] ]= keyword[None] , identifier[post_filter_str] : identifier[Optional] [ identifier[str] ]= keyword[None] ,
identifier[storyitem_filter_str] : identifier[Optional] [ identifier[str] ]= keyword[None] )-> keyword[None] :
literal[string]
identifier[post_filter] = keyword[None]
keyword[if] identifier[post_filter_str] keyword[is] keyword[not] keyword[None] :
identifier[post_filter] = identifier[filterstr_to_filterfunc] ( identifier[post_filter_str] , identifier[Post] )
identifier[instaloader] . identifier[context] . identifier[log] ( literal[string] . identifier[format] ( identifier[post_filter_str] ))
identifier[storyitem_filter] = keyword[None]
keyword[if] identifier[storyitem_filter_str] keyword[is] keyword[not] keyword[None] :
identifier[storyitem_filter] = identifier[filterstr_to_filterfunc] ( identifier[storyitem_filter_str] , identifier[StoryItem] )
identifier[instaloader] . identifier[context] . identifier[log] ( literal[string] . identifier[format] ( identifier[storyitem_filter_str] ))
keyword[if] identifier[username] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[instaloader] . identifier[load_session_from_file] ( identifier[username] , identifier[sessionfile] )
keyword[except] identifier[FileNotFoundError] keyword[as] identifier[err] :
keyword[if] identifier[sessionfile] keyword[is] keyword[not] keyword[None] :
identifier[print] ( identifier[err] , identifier[file] = identifier[sys] . identifier[stderr] )
identifier[instaloader] . identifier[context] . identifier[log] ( literal[string] )
keyword[if] keyword[not] identifier[instaloader] . identifier[context] . identifier[is_logged_in] keyword[or] identifier[username] != identifier[instaloader] . identifier[test_login] ():
keyword[if] identifier[password] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[instaloader] . identifier[login] ( identifier[username] , identifier[password] )
keyword[except] identifier[TwoFactorAuthRequiredException] :
keyword[while] keyword[True] :
keyword[try] :
identifier[code] = identifier[input] ( literal[string] )
identifier[instaloader] . identifier[two_factor_login] ( identifier[code] )
keyword[break]
keyword[except] identifier[BadCredentialsException] :
keyword[pass]
keyword[else] :
identifier[instaloader] . identifier[interactive_login] ( identifier[username] )
identifier[instaloader] . identifier[context] . identifier[log] ( literal[string] % identifier[username] )
identifier[profiles] = identifier[set] ()
identifier[anonymous_retry_profiles] = identifier[set] ()
keyword[try] :
keyword[for] identifier[target] keyword[in] identifier[targetlist] :
keyword[if] ( identifier[target] . identifier[endswith] ( literal[string] ) keyword[or] identifier[target] . identifier[endswith] ( literal[string] )) keyword[and] identifier[os] . identifier[path] . identifier[isfile] ( identifier[target] ):
keyword[with] identifier[instaloader] . identifier[context] . identifier[error_catcher] ( identifier[target] ):
identifier[structure] = identifier[load_structure_from_file] ( identifier[instaloader] . identifier[context] , identifier[target] )
keyword[if] identifier[isinstance] ( identifier[structure] , identifier[Post] ):
keyword[if] identifier[post_filter] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[post_filter] ( identifier[structure] ):
identifier[instaloader] . identifier[context] . identifier[log] ( literal[string] . identifier[format] ( identifier[structure] , identifier[target] ), identifier[flush] = keyword[True] )
keyword[continue]
identifier[instaloader] . identifier[context] . identifier[log] ( literal[string] . identifier[format] ( identifier[structure] , identifier[target] ))
identifier[instaloader] . identifier[download_post] ( identifier[structure] , identifier[os] . identifier[path] . identifier[dirname] ( identifier[target] ))
keyword[elif] identifier[isinstance] ( identifier[structure] , identifier[StoryItem] ):
keyword[if] identifier[storyitem_filter] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[storyitem_filter] ( identifier[structure] ):
identifier[instaloader] . identifier[context] . identifier[log] ( literal[string] . identifier[format] ( identifier[structure] , identifier[target] ), identifier[flush] = keyword[True] )
keyword[continue]
identifier[instaloader] . identifier[context] . identifier[log] ( literal[string] . identifier[format] ( identifier[structure] , identifier[target] ))
identifier[instaloader] . identifier[download_storyitem] ( identifier[structure] , identifier[os] . identifier[path] . identifier[dirname] ( identifier[target] ))
keyword[elif] identifier[isinstance] ( identifier[structure] , identifier[Profile] ):
keyword[raise] identifier[InvalidArgumentException] ( literal[string]
. identifier[format] ( identifier[structure] . identifier[username] ))
keyword[else] :
keyword[raise] identifier[InvalidArgumentException] ( literal[string]
. identifier[format] ( identifier[structure] . identifier[__class__] . identifier[__name__] ))
keyword[continue]
identifier[target] = identifier[target] . identifier[rstrip] ( literal[string] )
keyword[with] identifier[instaloader] . identifier[context] . identifier[error_catcher] ( identifier[target] ):
keyword[if] identifier[target] [ literal[int] ]== literal[string] :
identifier[instaloader] . identifier[context] . identifier[log] ( literal[string] % identifier[target] [ literal[int] :])
identifier[profile] = identifier[Profile] . identifier[from_username] ( identifier[instaloader] . identifier[context] , identifier[target] [ literal[int] :])
keyword[for] identifier[followee] keyword[in] identifier[profile] . identifier[get_followees] ():
identifier[instaloader] . identifier[save_profile_id] ( identifier[followee] )
identifier[profiles] . identifier[add] ( identifier[followee] )
keyword[elif] identifier[target] [ literal[int] ]== literal[string] :
identifier[instaloader] . identifier[download_hashtag] ( identifier[hashtag] = identifier[target] [ literal[int] :], identifier[max_count] = identifier[max_count] , identifier[fast_update] = identifier[fast_update] ,
identifier[post_filter] = identifier[post_filter] )
keyword[elif] identifier[target] [ literal[int] ]== literal[string] :
identifier[instaloader] . identifier[download_post] ( identifier[Post] . identifier[from_shortcode] ( identifier[instaloader] . identifier[context] , identifier[target] [ literal[int] :]), identifier[target] )
keyword[elif] identifier[target] [ literal[int] ]== literal[string] :
identifier[instaloader] . identifier[download_location] ( identifier[location] = identifier[target] [ literal[int] :], identifier[max_count] = identifier[max_count] , identifier[fast_update] = identifier[fast_update] ,
identifier[post_filter] = identifier[post_filter] )
keyword[elif] identifier[target] == literal[string] :
identifier[instaloader] . identifier[download_feed_posts] ( identifier[fast_update] = identifier[fast_update] , identifier[max_count] = identifier[max_count] ,
identifier[post_filter] = identifier[post_filter] )
keyword[elif] identifier[target] == literal[string] :
identifier[instaloader] . identifier[download_stories] ( identifier[fast_update] = identifier[fast_update] , identifier[storyitem_filter] = identifier[storyitem_filter] )
keyword[elif] identifier[target] == literal[string] :
identifier[instaloader] . identifier[download_saved_posts] ( identifier[fast_update] = identifier[fast_update] , identifier[max_count] = identifier[max_count] ,
identifier[post_filter] = identifier[post_filter] )
keyword[else] :
keyword[try] :
identifier[profile] = identifier[instaloader] . identifier[check_profile_id] ( identifier[target] )
keyword[if] identifier[instaloader] . identifier[context] . identifier[is_logged_in] keyword[and] identifier[profile] . identifier[has_blocked_viewer] :
keyword[if] identifier[download_profile_pic] keyword[or] (( identifier[download_posts] keyword[or] identifier[download_tagged] ) keyword[and] keyword[not] identifier[profile] . identifier[is_private] ):
keyword[raise] identifier[ProfileNotExistsException] ( literal[string]
. identifier[format] ( identifier[target] ))
keyword[else] :
identifier[instaloader] . identifier[context] . identifier[error] ( literal[string] . identifier[format] ( identifier[target] ))
keyword[else] :
identifier[profiles] . identifier[add] ( identifier[profile] )
keyword[except] identifier[ProfileNotExistsException] keyword[as] identifier[err] :
keyword[if] identifier[instaloader] . identifier[context] . identifier[is_logged_in] keyword[and] ( identifier[download_profile_pic] keyword[or] identifier[download_posts] keyword[or]
identifier[download_tagged] ):
identifier[instaloader] . identifier[context] . identifier[log] ( identifier[err] )
identifier[instaloader] . identifier[context] . identifier[log] ( literal[string] )
keyword[with] identifier[instaloader] . identifier[anonymous_copy] () keyword[as] identifier[anonymous_loader] :
keyword[with] identifier[instaloader] . identifier[context] . identifier[error_catcher] ():
identifier[anonymous_retry_profiles] . identifier[add] ( identifier[anonymous_loader] . identifier[check_profile_id] ( identifier[target] ))
identifier[instaloader] . identifier[context] . identifier[error] ( literal[string]
. identifier[format] ( identifier[target] , identifier[err] ))
keyword[else] :
keyword[raise]
keyword[if] identifier[len] ( identifier[profiles] )> literal[int] :
identifier[instaloader] . identifier[context] . identifier[log] ( literal[string] . identifier[format] ( identifier[len] ( identifier[profiles] ),
literal[string] . identifier[join] ([ identifier[p] . identifier[username] keyword[for] identifier[p] keyword[in] identifier[profiles] ])))
keyword[if] identifier[profiles] keyword[and] identifier[download_profile_pic] keyword[and] keyword[not] identifier[instaloader] . identifier[context] . identifier[is_logged_in] :
identifier[instaloader] . identifier[context] . identifier[error] ( literal[string] )
identifier[instaloader] . identifier[download_profiles] ( identifier[profiles] ,
identifier[download_profile_pic] , identifier[download_posts] , identifier[download_tagged] , identifier[download_highlights] ,
identifier[download_stories] , identifier[fast_update] , identifier[post_filter] , identifier[storyitem_filter] )
keyword[if] identifier[anonymous_retry_profiles] :
identifier[instaloader] . identifier[context] . identifier[log] ( literal[string]
. identifier[format] ( literal[string] . identifier[join] ([ identifier[p] . identifier[username] keyword[for] identifier[p] keyword[in] identifier[anonymous_retry_profiles] ])))
keyword[with] identifier[instaloader] . identifier[anonymous_copy] () keyword[as] identifier[anonymous_loader] :
identifier[anonymous_loader] . identifier[download_profiles] ( identifier[anonymous_retry_profiles] ,
identifier[download_profile_pic] , identifier[download_posts] , identifier[download_tagged] ,
identifier[fast_update] = identifier[fast_update] , identifier[post_filter] = identifier[post_filter] )
keyword[except] identifier[KeyboardInterrupt] :
identifier[print] ( literal[string] , identifier[file] = identifier[sys] . identifier[stderr] )
keyword[if] identifier[instaloader] . identifier[context] . identifier[is_logged_in] :
identifier[instaloader] . identifier[save_session_to_file] ( identifier[sessionfile] )
keyword[if] keyword[not] identifier[targetlist] :
keyword[if] identifier[instaloader] . identifier[context] . identifier[is_logged_in] :
identifier[instaloader] . identifier[context] . identifier[log] ( literal[string] )
keyword[else] :
identifier[instaloader] . identifier[context] . identifier[log] ( literal[string] + identifier[usage_string] ()) | def _main(instaloader: Instaloader, targetlist: List[str], username: Optional[str]=None, password: Optional[str]=None, sessionfile: Optional[str]=None, download_profile_pic: bool=True, download_posts=True, download_stories: bool=False, download_highlights: bool=False, download_tagged: bool=False, fast_update: bool=False, max_count: Optional[int]=None, post_filter_str: Optional[str]=None, storyitem_filter_str: Optional[str]=None) -> None:
"""Download set of profiles, hashtags etc. and handle logging in and session files if desired."""
# Parse and generate filter function
post_filter = None
if post_filter_str is not None:
post_filter = filterstr_to_filterfunc(post_filter_str, Post)
instaloader.context.log('Only download posts with property "{}".'.format(post_filter_str)) # depends on [control=['if'], data=['post_filter_str']]
storyitem_filter = None
if storyitem_filter_str is not None:
storyitem_filter = filterstr_to_filterfunc(storyitem_filter_str, StoryItem)
instaloader.context.log('Only download storyitems with property "{}".'.format(storyitem_filter_str)) # depends on [control=['if'], data=['storyitem_filter_str']]
# Login, if desired
if username is not None:
try:
instaloader.load_session_from_file(username, sessionfile) # depends on [control=['try'], data=[]]
except FileNotFoundError as err:
if sessionfile is not None:
print(err, file=sys.stderr) # depends on [control=['if'], data=[]]
instaloader.context.log('Session file does not exist yet - Logging in.') # depends on [control=['except'], data=['err']]
if not instaloader.context.is_logged_in or username != instaloader.test_login():
if password is not None:
try:
instaloader.login(username, password) # depends on [control=['try'], data=[]]
except TwoFactorAuthRequiredException:
while True:
try:
code = input('Enter 2FA verification code: ')
instaloader.two_factor_login(code)
break # depends on [control=['try'], data=[]]
except BadCredentialsException:
pass # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['password']]
else:
instaloader.interactive_login(username) # depends on [control=['if'], data=[]]
instaloader.context.log('Logged in as %s.' % username) # depends on [control=['if'], data=['username']]
# Try block for KeyboardInterrupt (save session on ^C)
profiles = set()
anonymous_retry_profiles = set()
try:
# Generate set of profiles, already downloading non-profile targets
for target in targetlist:
if (target.endswith('.json') or target.endswith('.json.xz')) and os.path.isfile(target):
with instaloader.context.error_catcher(target):
structure = load_structure_from_file(instaloader.context, target)
if isinstance(structure, Post):
if post_filter is not None and (not post_filter(structure)):
instaloader.context.log('<{} ({}) skipped>'.format(structure, target), flush=True)
continue # depends on [control=['if'], data=[]]
instaloader.context.log('Downloading {} ({})'.format(structure, target))
instaloader.download_post(structure, os.path.dirname(target)) # depends on [control=['if'], data=[]]
elif isinstance(structure, StoryItem):
if storyitem_filter is not None and (not storyitem_filter(structure)):
instaloader.context.log('<{} ({}) skipped>'.format(structure, target), flush=True)
continue # depends on [control=['if'], data=[]]
instaloader.context.log('Attempting to download {} ({})'.format(structure, target))
instaloader.download_storyitem(structure, os.path.dirname(target)) # depends on [control=['if'], data=[]]
elif isinstance(structure, Profile):
raise InvalidArgumentException('Profile JSON are ignored. Pass "{}" to download that profile'.format(structure.username)) # depends on [control=['if'], data=[]]
else:
raise InvalidArgumentException('{} JSON file not supported as target'.format(structure.__class__.__name__)) # depends on [control=['with'], data=[]]
continue # depends on [control=['if'], data=[]]
# strip '/' characters to be more shell-autocompletion-friendly
target = target.rstrip('/')
with instaloader.context.error_catcher(target):
if target[0] == '@':
instaloader.context.log('Retrieving followees of %s...' % target[1:])
profile = Profile.from_username(instaloader.context, target[1:])
for followee in profile.get_followees():
instaloader.save_profile_id(followee)
profiles.add(followee) # depends on [control=['for'], data=['followee']] # depends on [control=['if'], data=[]]
elif target[0] == '#':
instaloader.download_hashtag(hashtag=target[1:], max_count=max_count, fast_update=fast_update, post_filter=post_filter) # depends on [control=['if'], data=[]]
elif target[0] == '-':
instaloader.download_post(Post.from_shortcode(instaloader.context, target[1:]), target) # depends on [control=['if'], data=[]]
elif target[0] == '%':
instaloader.download_location(location=target[1:], max_count=max_count, fast_update=fast_update, post_filter=post_filter) # depends on [control=['if'], data=[]]
elif target == ':feed':
instaloader.download_feed_posts(fast_update=fast_update, max_count=max_count, post_filter=post_filter) # depends on [control=['if'], data=[]]
elif target == ':stories':
instaloader.download_stories(fast_update=fast_update, storyitem_filter=storyitem_filter) # depends on [control=['if'], data=[]]
elif target == ':saved':
instaloader.download_saved_posts(fast_update=fast_update, max_count=max_count, post_filter=post_filter) # depends on [control=['if'], data=[]]
else:
try:
profile = instaloader.check_profile_id(target)
if instaloader.context.is_logged_in and profile.has_blocked_viewer:
if download_profile_pic or ((download_posts or download_tagged) and (not profile.is_private)):
raise ProfileNotExistsException('{} blocked you; But we download her anonymously.'.format(target)) # depends on [control=['if'], data=[]]
else:
instaloader.context.error('{} blocked you.'.format(target)) # depends on [control=['if'], data=[]]
else:
profiles.add(profile) # depends on [control=['try'], data=[]]
except ProfileNotExistsException as err:
# Not only our profile.has_blocked_viewer condition raises ProfileNotExistsException,
# check_profile_id() also does, since access to blocked profile may be responded with 404.
if instaloader.context.is_logged_in and (download_profile_pic or download_posts or download_tagged):
instaloader.context.log(err)
instaloader.context.log('Trying again anonymously, helps in case you are just blocked.')
with instaloader.anonymous_copy() as anonymous_loader:
with instaloader.context.error_catcher():
anonymous_retry_profiles.add(anonymous_loader.check_profile_id(target))
instaloader.context.error('Warning: {} will be downloaded anonymously ("{}").'.format(target, err)) # depends on [control=['with'], data=[]] # depends on [control=['with'], data=['anonymous_loader']] # depends on [control=['if'], data=[]]
else:
raise # depends on [control=['except'], data=['err']] # depends on [control=['with'], data=[]] # depends on [control=['for'], data=['target']]
if len(profiles) > 1:
instaloader.context.log('Downloading {} profiles: {}'.format(len(profiles), ' '.join([p.username for p in profiles]))) # depends on [control=['if'], data=[]]
if profiles and download_profile_pic and (not instaloader.context.is_logged_in):
instaloader.context.error('Warning: Use --login to download HD version of profile pictures.') # depends on [control=['if'], data=[]]
instaloader.download_profiles(profiles, download_profile_pic, download_posts, download_tagged, download_highlights, download_stories, fast_update, post_filter, storyitem_filter)
if anonymous_retry_profiles:
instaloader.context.log('Downloading anonymously: {}'.format(' '.join([p.username for p in anonymous_retry_profiles])))
with instaloader.anonymous_copy() as anonymous_loader:
anonymous_loader.download_profiles(anonymous_retry_profiles, download_profile_pic, download_posts, download_tagged, fast_update=fast_update, post_filter=post_filter) # depends on [control=['with'], data=['anonymous_loader']] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except KeyboardInterrupt:
print('\nInterrupted by user.', file=sys.stderr) # depends on [control=['except'], data=[]]
# Save session if it is useful
if instaloader.context.is_logged_in:
instaloader.save_session_to_file(sessionfile) # depends on [control=['if'], data=[]]
# User might be confused if Instaloader does nothing
if not targetlist:
if instaloader.context.is_logged_in:
# Instaloader did at least save a session file
instaloader.context.log('No targets were specified, thus nothing has been downloaded.') # depends on [control=['if'], data=[]]
else:
# Instloader did not do anything
instaloader.context.log('usage:' + usage_string()) # depends on [control=['if'], data=[]] |
def figure_protocol(self):
"""plot the current sweep protocol."""
self.log.debug("creating overlayed protocols plot")
self.figure()
plt.plot(self.abf.protoX,self.abf.protoY,color='r')
self.marginX=0
self.decorate(protocol=True) | def function[figure_protocol, parameter[self]]:
constant[plot the current sweep protocol.]
call[name[self].log.debug, parameter[constant[creating overlayed protocols plot]]]
call[name[self].figure, parameter[]]
call[name[plt].plot, parameter[name[self].abf.protoX, name[self].abf.protoY]]
name[self].marginX assign[=] constant[0]
call[name[self].decorate, parameter[]] | keyword[def] identifier[figure_protocol] ( identifier[self] ):
literal[string]
identifier[self] . identifier[log] . identifier[debug] ( literal[string] )
identifier[self] . identifier[figure] ()
identifier[plt] . identifier[plot] ( identifier[self] . identifier[abf] . identifier[protoX] , identifier[self] . identifier[abf] . identifier[protoY] , identifier[color] = literal[string] )
identifier[self] . identifier[marginX] = literal[int]
identifier[self] . identifier[decorate] ( identifier[protocol] = keyword[True] ) | def figure_protocol(self):
"""plot the current sweep protocol."""
self.log.debug('creating overlayed protocols plot')
self.figure()
plt.plot(self.abf.protoX, self.abf.protoY, color='r')
self.marginX = 0
self.decorate(protocol=True) |
def toggle_pac(self):
"""Enable and disable PAC options."""
if Pac is not None:
pac_on = self.pac['pac_on'].get_value()
self.pac['prep'].setEnabled(pac_on)
self.pac['box_metric'].setEnabled(pac_on)
self.pac['box_complex'].setEnabled(pac_on)
self.pac['box_surro'].setEnabled(pac_on)
self.pac['box_opts'].setEnabled(pac_on)
if not pac_on:
self.pac['prep'].set_value(False)
if Pac is not None and pac_on:
pac = self.pac
hilb_on = pac['hilbert_on'].isChecked()
wav_on = pac['wavelet_on'].isChecked()
for button in pac['hilbert'].values():
button[0].setEnabled(hilb_on)
if button[1] is not None:
button[1].setEnabled(hilb_on)
pac['wav_width'][0].setEnabled(wav_on)
pac['wav_width'][1].setEnabled(wav_on)
if pac['metric'].get_value() in [
'Kullback-Leibler Distance',
'Heights ratio']:
pac['nbin'][0].setEnabled(True)
pac['nbin'][1].setEnabled(True)
else:
pac['nbin'][0].setEnabled(False)
pac['nbin'][1].setEnabled(False)
if pac['metric'] == 'ndPac':
for button in pac['surro'].values():
button[0].setEnabled(False)
if button[1] is not None:
button[1].setEnabled(False)
pac['surro']['pval'][0].setEnabled(True)
ndpac_on = pac['metric'].get_value() == 'ndPac'
surro_on = logical_and(pac['surro_method'].get_value() != ''
'No surrogates', not ndpac_on)
norm_on = pac['surro_norm'].get_value() != 'No normalization'
blocks_on = 'across time' in pac['surro_method'].get_value()
pac['surro_method'].setEnabled(not ndpac_on)
for button in pac['surro'].values():
button[0].setEnabled(surro_on and norm_on)
if button[1] is not None:
button[1].setEnabled(surro_on and norm_on)
pac['surro']['nblocks'][0].setEnabled(blocks_on)
pac['surro']['nblocks'][1].setEnabled(blocks_on)
if ndpac_on:
pac['surro_method'].set_value('No surrogates')
pac['surro']['pval'][0].setEnabled(True) | def function[toggle_pac, parameter[self]]:
constant[Enable and disable PAC options.]
if compare[name[Pac] is_not constant[None]] begin[:]
variable[pac_on] assign[=] call[call[name[self].pac][constant[pac_on]].get_value, parameter[]]
call[call[name[self].pac][constant[prep]].setEnabled, parameter[name[pac_on]]]
call[call[name[self].pac][constant[box_metric]].setEnabled, parameter[name[pac_on]]]
call[call[name[self].pac][constant[box_complex]].setEnabled, parameter[name[pac_on]]]
call[call[name[self].pac][constant[box_surro]].setEnabled, parameter[name[pac_on]]]
call[call[name[self].pac][constant[box_opts]].setEnabled, parameter[name[pac_on]]]
if <ast.UnaryOp object at 0x7da1b0c95e70> begin[:]
call[call[name[self].pac][constant[prep]].set_value, parameter[constant[False]]]
if <ast.BoolOp object at 0x7da1b0c95d20> begin[:]
variable[pac] assign[=] name[self].pac
variable[hilb_on] assign[=] call[call[name[pac]][constant[hilbert_on]].isChecked, parameter[]]
variable[wav_on] assign[=] call[call[name[pac]][constant[wavelet_on]].isChecked, parameter[]]
for taget[name[button]] in starred[call[call[name[pac]][constant[hilbert]].values, parameter[]]] begin[:]
call[call[name[button]][constant[0]].setEnabled, parameter[name[hilb_on]]]
if compare[call[name[button]][constant[1]] is_not constant[None]] begin[:]
call[call[name[button]][constant[1]].setEnabled, parameter[name[hilb_on]]]
call[call[call[name[pac]][constant[wav_width]]][constant[0]].setEnabled, parameter[name[wav_on]]]
call[call[call[name[pac]][constant[wav_width]]][constant[1]].setEnabled, parameter[name[wav_on]]]
if compare[call[call[name[pac]][constant[metric]].get_value, parameter[]] in list[[<ast.Constant object at 0x7da1b0c96e00>, <ast.Constant object at 0x7da1b0c96da0>]]] begin[:]
call[call[call[name[pac]][constant[nbin]]][constant[0]].setEnabled, parameter[constant[True]]]
call[call[call[name[pac]][constant[nbin]]][constant[1]].setEnabled, parameter[constant[True]]]
if compare[call[name[pac]][constant[metric]] equal[==] constant[ndPac]] begin[:]
for taget[name[button]] in starred[call[call[name[pac]][constant[surro]].values, parameter[]]] begin[:]
call[call[name[button]][constant[0]].setEnabled, parameter[constant[False]]]
if compare[call[name[button]][constant[1]] is_not constant[None]] begin[:]
call[call[name[button]][constant[1]].setEnabled, parameter[constant[False]]]
call[call[call[call[name[pac]][constant[surro]]][constant[pval]]][constant[0]].setEnabled, parameter[constant[True]]]
variable[ndpac_on] assign[=] compare[call[call[name[pac]][constant[metric]].get_value, parameter[]] equal[==] constant[ndPac]]
variable[surro_on] assign[=] call[name[logical_and], parameter[compare[call[call[name[pac]][constant[surro_method]].get_value, parameter[]] not_equal[!=] constant[No surrogates]], <ast.UnaryOp object at 0x7da1b0c276a0>]]
variable[norm_on] assign[=] compare[call[call[name[pac]][constant[surro_norm]].get_value, parameter[]] not_equal[!=] constant[No normalization]]
variable[blocks_on] assign[=] compare[constant[across time] in call[call[name[pac]][constant[surro_method]].get_value, parameter[]]]
call[call[name[pac]][constant[surro_method]].setEnabled, parameter[<ast.UnaryOp object at 0x7da1b0c26a70>]]
for taget[name[button]] in starred[call[call[name[pac]][constant[surro]].values, parameter[]]] begin[:]
call[call[name[button]][constant[0]].setEnabled, parameter[<ast.BoolOp object at 0x7da1b0c26770>]]
if compare[call[name[button]][constant[1]] is_not constant[None]] begin[:]
call[call[name[button]][constant[1]].setEnabled, parameter[<ast.BoolOp object at 0x7da1b0c26470>]]
call[call[call[call[name[pac]][constant[surro]]][constant[nblocks]]][constant[0]].setEnabled, parameter[name[blocks_on]]]
call[call[call[call[name[pac]][constant[surro]]][constant[nblocks]]][constant[1]].setEnabled, parameter[name[blocks_on]]]
if name[ndpac_on] begin[:]
call[call[name[pac]][constant[surro_method]].set_value, parameter[constant[No surrogates]]]
call[call[call[call[name[pac]][constant[surro]]][constant[pval]]][constant[0]].setEnabled, parameter[constant[True]]] | keyword[def] identifier[toggle_pac] ( identifier[self] ):
literal[string]
keyword[if] identifier[Pac] keyword[is] keyword[not] keyword[None] :
identifier[pac_on] = identifier[self] . identifier[pac] [ literal[string] ]. identifier[get_value] ()
identifier[self] . identifier[pac] [ literal[string] ]. identifier[setEnabled] ( identifier[pac_on] )
identifier[self] . identifier[pac] [ literal[string] ]. identifier[setEnabled] ( identifier[pac_on] )
identifier[self] . identifier[pac] [ literal[string] ]. identifier[setEnabled] ( identifier[pac_on] )
identifier[self] . identifier[pac] [ literal[string] ]. identifier[setEnabled] ( identifier[pac_on] )
identifier[self] . identifier[pac] [ literal[string] ]. identifier[setEnabled] ( identifier[pac_on] )
keyword[if] keyword[not] identifier[pac_on] :
identifier[self] . identifier[pac] [ literal[string] ]. identifier[set_value] ( keyword[False] )
keyword[if] identifier[Pac] keyword[is] keyword[not] keyword[None] keyword[and] identifier[pac_on] :
identifier[pac] = identifier[self] . identifier[pac]
identifier[hilb_on] = identifier[pac] [ literal[string] ]. identifier[isChecked] ()
identifier[wav_on] = identifier[pac] [ literal[string] ]. identifier[isChecked] ()
keyword[for] identifier[button] keyword[in] identifier[pac] [ literal[string] ]. identifier[values] ():
identifier[button] [ literal[int] ]. identifier[setEnabled] ( identifier[hilb_on] )
keyword[if] identifier[button] [ literal[int] ] keyword[is] keyword[not] keyword[None] :
identifier[button] [ literal[int] ]. identifier[setEnabled] ( identifier[hilb_on] )
identifier[pac] [ literal[string] ][ literal[int] ]. identifier[setEnabled] ( identifier[wav_on] )
identifier[pac] [ literal[string] ][ literal[int] ]. identifier[setEnabled] ( identifier[wav_on] )
keyword[if] identifier[pac] [ literal[string] ]. identifier[get_value] () keyword[in] [
literal[string] ,
literal[string] ]:
identifier[pac] [ literal[string] ][ literal[int] ]. identifier[setEnabled] ( keyword[True] )
identifier[pac] [ literal[string] ][ literal[int] ]. identifier[setEnabled] ( keyword[True] )
keyword[else] :
identifier[pac] [ literal[string] ][ literal[int] ]. identifier[setEnabled] ( keyword[False] )
identifier[pac] [ literal[string] ][ literal[int] ]. identifier[setEnabled] ( keyword[False] )
keyword[if] identifier[pac] [ literal[string] ]== literal[string] :
keyword[for] identifier[button] keyword[in] identifier[pac] [ literal[string] ]. identifier[values] ():
identifier[button] [ literal[int] ]. identifier[setEnabled] ( keyword[False] )
keyword[if] identifier[button] [ literal[int] ] keyword[is] keyword[not] keyword[None] :
identifier[button] [ literal[int] ]. identifier[setEnabled] ( keyword[False] )
identifier[pac] [ literal[string] ][ literal[string] ][ literal[int] ]. identifier[setEnabled] ( keyword[True] )
identifier[ndpac_on] = identifier[pac] [ literal[string] ]. identifier[get_value] ()== literal[string]
identifier[surro_on] = identifier[logical_and] ( identifier[pac] [ literal[string] ]. identifier[get_value] ()!= literal[string]
literal[string] , keyword[not] identifier[ndpac_on] )
identifier[norm_on] = identifier[pac] [ literal[string] ]. identifier[get_value] ()!= literal[string]
identifier[blocks_on] = literal[string] keyword[in] identifier[pac] [ literal[string] ]. identifier[get_value] ()
identifier[pac] [ literal[string] ]. identifier[setEnabled] ( keyword[not] identifier[ndpac_on] )
keyword[for] identifier[button] keyword[in] identifier[pac] [ literal[string] ]. identifier[values] ():
identifier[button] [ literal[int] ]. identifier[setEnabled] ( identifier[surro_on] keyword[and] identifier[norm_on] )
keyword[if] identifier[button] [ literal[int] ] keyword[is] keyword[not] keyword[None] :
identifier[button] [ literal[int] ]. identifier[setEnabled] ( identifier[surro_on] keyword[and] identifier[norm_on] )
identifier[pac] [ literal[string] ][ literal[string] ][ literal[int] ]. identifier[setEnabled] ( identifier[blocks_on] )
identifier[pac] [ literal[string] ][ literal[string] ][ literal[int] ]. identifier[setEnabled] ( identifier[blocks_on] )
keyword[if] identifier[ndpac_on] :
identifier[pac] [ literal[string] ]. identifier[set_value] ( literal[string] )
identifier[pac] [ literal[string] ][ literal[string] ][ literal[int] ]. identifier[setEnabled] ( keyword[True] ) | def toggle_pac(self):
"""Enable and disable PAC options."""
if Pac is not None:
pac_on = self.pac['pac_on'].get_value()
self.pac['prep'].setEnabled(pac_on)
self.pac['box_metric'].setEnabled(pac_on)
self.pac['box_complex'].setEnabled(pac_on)
self.pac['box_surro'].setEnabled(pac_on)
self.pac['box_opts'].setEnabled(pac_on)
if not pac_on:
self.pac['prep'].set_value(False) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if Pac is not None and pac_on:
pac = self.pac
hilb_on = pac['hilbert_on'].isChecked()
wav_on = pac['wavelet_on'].isChecked()
for button in pac['hilbert'].values():
button[0].setEnabled(hilb_on)
if button[1] is not None:
button[1].setEnabled(hilb_on) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['button']]
pac['wav_width'][0].setEnabled(wav_on)
pac['wav_width'][1].setEnabled(wav_on)
if pac['metric'].get_value() in ['Kullback-Leibler Distance', 'Heights ratio']:
pac['nbin'][0].setEnabled(True)
pac['nbin'][1].setEnabled(True) # depends on [control=['if'], data=[]]
else:
pac['nbin'][0].setEnabled(False)
pac['nbin'][1].setEnabled(False)
if pac['metric'] == 'ndPac':
for button in pac['surro'].values():
button[0].setEnabled(False)
if button[1] is not None:
button[1].setEnabled(False) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['button']]
pac['surro']['pval'][0].setEnabled(True) # depends on [control=['if'], data=[]]
ndpac_on = pac['metric'].get_value() == 'ndPac'
surro_on = logical_and(pac['surro_method'].get_value() != 'No surrogates', not ndpac_on)
norm_on = pac['surro_norm'].get_value() != 'No normalization'
blocks_on = 'across time' in pac['surro_method'].get_value()
pac['surro_method'].setEnabled(not ndpac_on)
for button in pac['surro'].values():
button[0].setEnabled(surro_on and norm_on)
if button[1] is not None:
button[1].setEnabled(surro_on and norm_on) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['button']]
pac['surro']['nblocks'][0].setEnabled(blocks_on)
pac['surro']['nblocks'][1].setEnabled(blocks_on)
if ndpac_on:
pac['surro_method'].set_value('No surrogates')
pac['surro']['pval'][0].setEnabled(True) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def vpn_sites(self):
"""Instance depends on the API version:
* 2018-04-01: :class:`VpnSitesOperations<azure.mgmt.network.v2018_04_01.operations.VpnSitesOperations>`
"""
api_version = self._get_api_version('vpn_sites')
if api_version == '2018-04-01':
from .v2018_04_01.operations import VpnSitesOperations as OperationClass
else:
raise NotImplementedError("APIVersion {} is not available".format(api_version))
return OperationClass(self._client, self.config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) | def function[vpn_sites, parameter[self]]:
constant[Instance depends on the API version:
* 2018-04-01: :class:`VpnSitesOperations<azure.mgmt.network.v2018_04_01.operations.VpnSitesOperations>`
]
variable[api_version] assign[=] call[name[self]._get_api_version, parameter[constant[vpn_sites]]]
if compare[name[api_version] equal[==] constant[2018-04-01]] begin[:]
from relative_module[v2018_04_01.operations] import module[VpnSitesOperations]
return[call[name[OperationClass], parameter[name[self]._client, name[self].config, call[name[Serializer], parameter[call[name[self]._models_dict, parameter[name[api_version]]]]], call[name[Deserializer], parameter[call[name[self]._models_dict, parameter[name[api_version]]]]]]]] | keyword[def] identifier[vpn_sites] ( identifier[self] ):
literal[string]
identifier[api_version] = identifier[self] . identifier[_get_api_version] ( literal[string] )
keyword[if] identifier[api_version] == literal[string] :
keyword[from] . identifier[v2018_04_01] . identifier[operations] keyword[import] identifier[VpnSitesOperations] keyword[as] identifier[OperationClass]
keyword[else] :
keyword[raise] identifier[NotImplementedError] ( literal[string] . identifier[format] ( identifier[api_version] ))
keyword[return] identifier[OperationClass] ( identifier[self] . identifier[_client] , identifier[self] . identifier[config] , identifier[Serializer] ( identifier[self] . identifier[_models_dict] ( identifier[api_version] )), identifier[Deserializer] ( identifier[self] . identifier[_models_dict] ( identifier[api_version] ))) | def vpn_sites(self):
"""Instance depends on the API version:
* 2018-04-01: :class:`VpnSitesOperations<azure.mgmt.network.v2018_04_01.operations.VpnSitesOperations>`
"""
api_version = self._get_api_version('vpn_sites')
if api_version == '2018-04-01':
from .v2018_04_01.operations import VpnSitesOperations as OperationClass # depends on [control=['if'], data=[]]
else:
raise NotImplementedError('APIVersion {} is not available'.format(api_version))
return OperationClass(self._client, self.config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) |
def istft(stft_matrix, hop_length=None, win_length=None, window='hann',
center=True, dtype=np.float32, length=None):
"""
Inverse short-time Fourier transform (ISTFT).
Converts a complex-valued spectrogram `stft_matrix` to time-series `y`
by minimizing the mean squared error between `stft_matrix` and STFT of
`y` as described in [1]_ up to Section 2 (reconstruction from MSTFT).
In general, window function, hop length and other parameters should be same
as in stft, which mostly leads to perfect reconstruction of a signal from
unmodified `stft_matrix`.
.. [1] D. W. Griffin and J. S. Lim,
"Signal estimation from modified short-time Fourier transform,"
IEEE Trans. ASSP, vol.32, no.2, pp.236–243, Apr. 1984.
Parameters
----------
stft_matrix : np.ndarray [shape=(1 + n_fft/2, t)]
STFT matrix from `stft`
hop_length : int > 0 [scalar]
Number of frames between STFT columns.
If unspecified, defaults to `win_length / 4`.
win_length : int <= n_fft = 2 * (stft_matrix.shape[0] - 1)
When reconstructing the time series, each frame is windowed
and each sample is normalized by the sum of squared window
according to the `window` function (see below).
If unspecified, defaults to `n_fft`.
window : string, tuple, number, function, np.ndarray [shape=(n_fft,)]
- a window specification (string, tuple, or number);
see `scipy.signal.get_window`
- a window function, such as `scipy.signal.hanning`
- a user-specified window vector of length `n_fft`
.. see also:: `filters.get_window`
center : boolean
- If `True`, `D` is assumed to have centered frames.
- If `False`, `D` is assumed to have left-aligned frames.
dtype : numeric type
Real numeric type for `y`. Default is 32-bit float.
length : int > 0, optional
If provided, the output `y` is zero-padded or clipped to exactly
`length` samples.
Returns
-------
y : np.ndarray [shape=(n,)]
time domain signal reconstructed from `stft_matrix`
See Also
--------
stft : Short-time Fourier Transform
Notes
-----
This function caches at level 30.
Examples
--------
>>> y, sr = librosa.load(librosa.util.example_audio_file())
>>> D = librosa.stft(y)
>>> y_hat = librosa.istft(D)
>>> y_hat
array([ -4.812e-06, -4.267e-06, ..., 6.271e-06, 2.827e-07], dtype=float32)
Exactly preserving length of the input signal requires explicit padding.
Otherwise, a partial frame at the end of `y` will not be represented.
>>> n = len(y)
>>> n_fft = 2048
>>> y_pad = librosa.util.fix_length(y, n + n_fft // 2)
>>> D = librosa.stft(y_pad, n_fft=n_fft)
>>> y_out = librosa.istft(D, length=n)
>>> np.max(np.abs(y - y_out))
1.4901161e-07
"""
n_fft = 2 * (stft_matrix.shape[0] - 1)
# By default, use the entire frame
if win_length is None:
win_length = n_fft
# Set the default hop, if it's not already specified
if hop_length is None:
hop_length = int(win_length // 4)
ifft_window = get_window(window, win_length, fftbins=True)
# Pad out to match n_fft, and add a broadcasting axis
ifft_window = util.pad_center(ifft_window, n_fft)[:, np.newaxis]
n_frames = stft_matrix.shape[1]
expected_signal_len = n_fft + hop_length * (n_frames - 1)
y = np.zeros(expected_signal_len, dtype=dtype)
n_columns = int(util.MAX_MEM_BLOCK // (stft_matrix.shape[0] *
stft_matrix.itemsize))
fft = get_fftlib()
frame = 0
for bl_s in range(0, n_frames, n_columns):
bl_t = min(bl_s + n_columns, n_frames)
# invert the block and apply the window function
ytmp = ifft_window * fft.irfft(stft_matrix[:, bl_s:bl_t], axis=0)
# Overlap-add the istft block starting at the i'th frame
__overlap_add(y[frame * hop_length:], ytmp, hop_length)
frame += (bl_t - bl_s)
# Normalize by sum of squared window
ifft_window_sum = window_sumsquare(window,
n_frames,
win_length=win_length,
n_fft=n_fft,
hop_length=hop_length,
dtype=dtype)
approx_nonzero_indices = ifft_window_sum > util.tiny(ifft_window_sum)
y[approx_nonzero_indices] /= ifft_window_sum[approx_nonzero_indices]
if length is None:
# If we don't need to control length, just do the usual center trimming
# to eliminate padded data
if center:
y = y[int(n_fft // 2):-int(n_fft // 2)]
else:
if center:
# If we're centering, crop off the first n_fft//2 samples
# and then trim/pad to the target length.
# We don't trim the end here, so that if the signal is zero-padded
# to a longer duration, the decay is smooth by windowing
start = int(n_fft // 2)
else:
# If we're not centering, start at 0 and trim/pad as necessary
start = 0
y = util.fix_length(y[start:], length)
return y | def function[istft, parameter[stft_matrix, hop_length, win_length, window, center, dtype, length]]:
constant[
Inverse short-time Fourier transform (ISTFT).
Converts a complex-valued spectrogram `stft_matrix` to time-series `y`
by minimizing the mean squared error between `stft_matrix` and STFT of
`y` as described in [1]_ up to Section 2 (reconstruction from MSTFT).
In general, window function, hop length and other parameters should be same
as in stft, which mostly leads to perfect reconstruction of a signal from
unmodified `stft_matrix`.
.. [1] D. W. Griffin and J. S. Lim,
"Signal estimation from modified short-time Fourier transform,"
IEEE Trans. ASSP, vol.32, no.2, pp.236–243, Apr. 1984.
Parameters
----------
stft_matrix : np.ndarray [shape=(1 + n_fft/2, t)]
STFT matrix from `stft`
hop_length : int > 0 [scalar]
Number of frames between STFT columns.
If unspecified, defaults to `win_length / 4`.
win_length : int <= n_fft = 2 * (stft_matrix.shape[0] - 1)
When reconstructing the time series, each frame is windowed
and each sample is normalized by the sum of squared window
according to the `window` function (see below).
If unspecified, defaults to `n_fft`.
window : string, tuple, number, function, np.ndarray [shape=(n_fft,)]
- a window specification (string, tuple, or number);
see `scipy.signal.get_window`
- a window function, such as `scipy.signal.hanning`
- a user-specified window vector of length `n_fft`
.. see also:: `filters.get_window`
center : boolean
- If `True`, `D` is assumed to have centered frames.
- If `False`, `D` is assumed to have left-aligned frames.
dtype : numeric type
Real numeric type for `y`. Default is 32-bit float.
length : int > 0, optional
If provided, the output `y` is zero-padded or clipped to exactly
`length` samples.
Returns
-------
y : np.ndarray [shape=(n,)]
time domain signal reconstructed from `stft_matrix`
See Also
--------
stft : Short-time Fourier Transform
Notes
-----
This function caches at level 30.
Examples
--------
>>> y, sr = librosa.load(librosa.util.example_audio_file())
>>> D = librosa.stft(y)
>>> y_hat = librosa.istft(D)
>>> y_hat
array([ -4.812e-06, -4.267e-06, ..., 6.271e-06, 2.827e-07], dtype=float32)
Exactly preserving length of the input signal requires explicit padding.
Otherwise, a partial frame at the end of `y` will not be represented.
>>> n = len(y)
>>> n_fft = 2048
>>> y_pad = librosa.util.fix_length(y, n + n_fft // 2)
>>> D = librosa.stft(y_pad, n_fft=n_fft)
>>> y_out = librosa.istft(D, length=n)
>>> np.max(np.abs(y - y_out))
1.4901161e-07
]
variable[n_fft] assign[=] binary_operation[constant[2] * binary_operation[call[name[stft_matrix].shape][constant[0]] - constant[1]]]
if compare[name[win_length] is constant[None]] begin[:]
variable[win_length] assign[=] name[n_fft]
if compare[name[hop_length] is constant[None]] begin[:]
variable[hop_length] assign[=] call[name[int], parameter[binary_operation[name[win_length] <ast.FloorDiv object at 0x7da2590d6bc0> constant[4]]]]
variable[ifft_window] assign[=] call[name[get_window], parameter[name[window], name[win_length]]]
variable[ifft_window] assign[=] call[call[name[util].pad_center, parameter[name[ifft_window], name[n_fft]]]][tuple[[<ast.Slice object at 0x7da1b055ea10>, <ast.Attribute object at 0x7da1b055c8e0>]]]
variable[n_frames] assign[=] call[name[stft_matrix].shape][constant[1]]
variable[expected_signal_len] assign[=] binary_operation[name[n_fft] + binary_operation[name[hop_length] * binary_operation[name[n_frames] - constant[1]]]]
variable[y] assign[=] call[name[np].zeros, parameter[name[expected_signal_len]]]
variable[n_columns] assign[=] call[name[int], parameter[binary_operation[name[util].MAX_MEM_BLOCK <ast.FloorDiv object at 0x7da2590d6bc0> binary_operation[call[name[stft_matrix].shape][constant[0]] * name[stft_matrix].itemsize]]]]
variable[fft] assign[=] call[name[get_fftlib], parameter[]]
variable[frame] assign[=] constant[0]
for taget[name[bl_s]] in starred[call[name[range], parameter[constant[0], name[n_frames], name[n_columns]]]] begin[:]
variable[bl_t] assign[=] call[name[min], parameter[binary_operation[name[bl_s] + name[n_columns]], name[n_frames]]]
variable[ytmp] assign[=] binary_operation[name[ifft_window] * call[name[fft].irfft, parameter[call[name[stft_matrix]][tuple[[<ast.Slice object at 0x7da1b055cc10>, <ast.Slice object at 0x7da1b055c8b0>]]]]]]
call[name[__overlap_add], parameter[call[name[y]][<ast.Slice object at 0x7da1b055e140>], name[ytmp], name[hop_length]]]
<ast.AugAssign object at 0x7da1b055d630>
variable[ifft_window_sum] assign[=] call[name[window_sumsquare], parameter[name[window], name[n_frames]]]
variable[approx_nonzero_indices] assign[=] compare[name[ifft_window_sum] greater[>] call[name[util].tiny, parameter[name[ifft_window_sum]]]]
<ast.AugAssign object at 0x7da207f98fa0>
if compare[name[length] is constant[None]] begin[:]
if name[center] begin[:]
variable[y] assign[=] call[name[y]][<ast.Slice object at 0x7da207f99de0>]
return[name[y]] | keyword[def] identifier[istft] ( identifier[stft_matrix] , identifier[hop_length] = keyword[None] , identifier[win_length] = keyword[None] , identifier[window] = literal[string] ,
identifier[center] = keyword[True] , identifier[dtype] = identifier[np] . identifier[float32] , identifier[length] = keyword[None] ):
literal[string]
identifier[n_fft] = literal[int] *( identifier[stft_matrix] . identifier[shape] [ literal[int] ]- literal[int] )
keyword[if] identifier[win_length] keyword[is] keyword[None] :
identifier[win_length] = identifier[n_fft]
keyword[if] identifier[hop_length] keyword[is] keyword[None] :
identifier[hop_length] = identifier[int] ( identifier[win_length] // literal[int] )
identifier[ifft_window] = identifier[get_window] ( identifier[window] , identifier[win_length] , identifier[fftbins] = keyword[True] )
identifier[ifft_window] = identifier[util] . identifier[pad_center] ( identifier[ifft_window] , identifier[n_fft] )[:, identifier[np] . identifier[newaxis] ]
identifier[n_frames] = identifier[stft_matrix] . identifier[shape] [ literal[int] ]
identifier[expected_signal_len] = identifier[n_fft] + identifier[hop_length] *( identifier[n_frames] - literal[int] )
identifier[y] = identifier[np] . identifier[zeros] ( identifier[expected_signal_len] , identifier[dtype] = identifier[dtype] )
identifier[n_columns] = identifier[int] ( identifier[util] . identifier[MAX_MEM_BLOCK] //( identifier[stft_matrix] . identifier[shape] [ literal[int] ]*
identifier[stft_matrix] . identifier[itemsize] ))
identifier[fft] = identifier[get_fftlib] ()
identifier[frame] = literal[int]
keyword[for] identifier[bl_s] keyword[in] identifier[range] ( literal[int] , identifier[n_frames] , identifier[n_columns] ):
identifier[bl_t] = identifier[min] ( identifier[bl_s] + identifier[n_columns] , identifier[n_frames] )
identifier[ytmp] = identifier[ifft_window] * identifier[fft] . identifier[irfft] ( identifier[stft_matrix] [:, identifier[bl_s] : identifier[bl_t] ], identifier[axis] = literal[int] )
identifier[__overlap_add] ( identifier[y] [ identifier[frame] * identifier[hop_length] :], identifier[ytmp] , identifier[hop_length] )
identifier[frame] +=( identifier[bl_t] - identifier[bl_s] )
identifier[ifft_window_sum] = identifier[window_sumsquare] ( identifier[window] ,
identifier[n_frames] ,
identifier[win_length] = identifier[win_length] ,
identifier[n_fft] = identifier[n_fft] ,
identifier[hop_length] = identifier[hop_length] ,
identifier[dtype] = identifier[dtype] )
identifier[approx_nonzero_indices] = identifier[ifft_window_sum] > identifier[util] . identifier[tiny] ( identifier[ifft_window_sum] )
identifier[y] [ identifier[approx_nonzero_indices] ]/= identifier[ifft_window_sum] [ identifier[approx_nonzero_indices] ]
keyword[if] identifier[length] keyword[is] keyword[None] :
keyword[if] identifier[center] :
identifier[y] = identifier[y] [ identifier[int] ( identifier[n_fft] // literal[int] ):- identifier[int] ( identifier[n_fft] // literal[int] )]
keyword[else] :
keyword[if] identifier[center] :
identifier[start] = identifier[int] ( identifier[n_fft] // literal[int] )
keyword[else] :
identifier[start] = literal[int]
identifier[y] = identifier[util] . identifier[fix_length] ( identifier[y] [ identifier[start] :], identifier[length] )
keyword[return] identifier[y] | def istft(stft_matrix, hop_length=None, win_length=None, window='hann', center=True, dtype=np.float32, length=None):
"""
Inverse short-time Fourier transform (ISTFT).
Converts a complex-valued spectrogram `stft_matrix` to time-series `y`
by minimizing the mean squared error between `stft_matrix` and STFT of
`y` as described in [1]_ up to Section 2 (reconstruction from MSTFT).
In general, window function, hop length and other parameters should be same
as in stft, which mostly leads to perfect reconstruction of a signal from
unmodified `stft_matrix`.
.. [1] D. W. Griffin and J. S. Lim,
"Signal estimation from modified short-time Fourier transform,"
IEEE Trans. ASSP, vol.32, no.2, pp.236–243, Apr. 1984.
Parameters
----------
stft_matrix : np.ndarray [shape=(1 + n_fft/2, t)]
STFT matrix from `stft`
hop_length : int > 0 [scalar]
Number of frames between STFT columns.
If unspecified, defaults to `win_length / 4`.
win_length : int <= n_fft = 2 * (stft_matrix.shape[0] - 1)
When reconstructing the time series, each frame is windowed
and each sample is normalized by the sum of squared window
according to the `window` function (see below).
If unspecified, defaults to `n_fft`.
window : string, tuple, number, function, np.ndarray [shape=(n_fft,)]
- a window specification (string, tuple, or number);
see `scipy.signal.get_window`
- a window function, such as `scipy.signal.hanning`
- a user-specified window vector of length `n_fft`
.. see also:: `filters.get_window`
center : boolean
- If `True`, `D` is assumed to have centered frames.
- If `False`, `D` is assumed to have left-aligned frames.
dtype : numeric type
Real numeric type for `y`. Default is 32-bit float.
length : int > 0, optional
If provided, the output `y` is zero-padded or clipped to exactly
`length` samples.
Returns
-------
y : np.ndarray [shape=(n,)]
time domain signal reconstructed from `stft_matrix`
See Also
--------
stft : Short-time Fourier Transform
Notes
-----
This function caches at level 30.
Examples
--------
>>> y, sr = librosa.load(librosa.util.example_audio_file())
>>> D = librosa.stft(y)
>>> y_hat = librosa.istft(D)
>>> y_hat
array([ -4.812e-06, -4.267e-06, ..., 6.271e-06, 2.827e-07], dtype=float32)
Exactly preserving length of the input signal requires explicit padding.
Otherwise, a partial frame at the end of `y` will not be represented.
>>> n = len(y)
>>> n_fft = 2048
>>> y_pad = librosa.util.fix_length(y, n + n_fft // 2)
>>> D = librosa.stft(y_pad, n_fft=n_fft)
>>> y_out = librosa.istft(D, length=n)
>>> np.max(np.abs(y - y_out))
1.4901161e-07
"""
n_fft = 2 * (stft_matrix.shape[0] - 1)
# By default, use the entire frame
if win_length is None:
win_length = n_fft # depends on [control=['if'], data=['win_length']]
# Set the default hop, if it's not already specified
if hop_length is None:
hop_length = int(win_length // 4) # depends on [control=['if'], data=['hop_length']]
ifft_window = get_window(window, win_length, fftbins=True)
# Pad out to match n_fft, and add a broadcasting axis
ifft_window = util.pad_center(ifft_window, n_fft)[:, np.newaxis]
n_frames = stft_matrix.shape[1]
expected_signal_len = n_fft + hop_length * (n_frames - 1)
y = np.zeros(expected_signal_len, dtype=dtype)
n_columns = int(util.MAX_MEM_BLOCK // (stft_matrix.shape[0] * stft_matrix.itemsize))
fft = get_fftlib()
frame = 0
for bl_s in range(0, n_frames, n_columns):
bl_t = min(bl_s + n_columns, n_frames)
# invert the block and apply the window function
ytmp = ifft_window * fft.irfft(stft_matrix[:, bl_s:bl_t], axis=0)
# Overlap-add the istft block starting at the i'th frame
__overlap_add(y[frame * hop_length:], ytmp, hop_length)
frame += bl_t - bl_s # depends on [control=['for'], data=['bl_s']]
# Normalize by sum of squared window
ifft_window_sum = window_sumsquare(window, n_frames, win_length=win_length, n_fft=n_fft, hop_length=hop_length, dtype=dtype)
approx_nonzero_indices = ifft_window_sum > util.tiny(ifft_window_sum)
y[approx_nonzero_indices] /= ifft_window_sum[approx_nonzero_indices]
if length is None:
# If we don't need to control length, just do the usual center trimming
# to eliminate padded data
if center:
y = y[int(n_fft // 2):-int(n_fft // 2)] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
if center:
# If we're centering, crop off the first n_fft//2 samples
# and then trim/pad to the target length.
# We don't trim the end here, so that if the signal is zero-padded
# to a longer duration, the decay is smooth by windowing
start = int(n_fft // 2) # depends on [control=['if'], data=[]]
else:
# If we're not centering, start at 0 and trim/pad as necessary
start = 0
y = util.fix_length(y[start:], length)
return y |
def uniquified_mesh(self):
"""This function returns a copy of the mesh in which vertices are copied such that
each vertex appears in only one face, and hence has only one texture"""
import numpy as np
from lace.mesh import Mesh
new_mesh = Mesh(v=self.v[self.f.flatten()], f=np.array(range(len(self.f.flatten()))).reshape(-1, 3))
if self.vn is None:
self.reset_normals()
new_mesh.vn = self.vn[self.f.flatten()]
if self.vt is not None:
new_mesh.vt = self.vt[self.ft.flatten()]
new_mesh.ft = new_mesh.f.copy()
return new_mesh | def function[uniquified_mesh, parameter[self]]:
constant[This function returns a copy of the mesh in which vertices are copied such that
each vertex appears in only one face, and hence has only one texture]
import module[numpy] as alias[np]
from relative_module[lace.mesh] import module[Mesh]
variable[new_mesh] assign[=] call[name[Mesh], parameter[]]
if compare[name[self].vn is constant[None]] begin[:]
call[name[self].reset_normals, parameter[]]
name[new_mesh].vn assign[=] call[name[self].vn][call[name[self].f.flatten, parameter[]]]
if compare[name[self].vt is_not constant[None]] begin[:]
name[new_mesh].vt assign[=] call[name[self].vt][call[name[self].ft.flatten, parameter[]]]
name[new_mesh].ft assign[=] call[name[new_mesh].f.copy, parameter[]]
return[name[new_mesh]] | keyword[def] identifier[uniquified_mesh] ( identifier[self] ):
literal[string]
keyword[import] identifier[numpy] keyword[as] identifier[np]
keyword[from] identifier[lace] . identifier[mesh] keyword[import] identifier[Mesh]
identifier[new_mesh] = identifier[Mesh] ( identifier[v] = identifier[self] . identifier[v] [ identifier[self] . identifier[f] . identifier[flatten] ()], identifier[f] = identifier[np] . identifier[array] ( identifier[range] ( identifier[len] ( identifier[self] . identifier[f] . identifier[flatten] ()))). identifier[reshape] (- literal[int] , literal[int] ))
keyword[if] identifier[self] . identifier[vn] keyword[is] keyword[None] :
identifier[self] . identifier[reset_normals] ()
identifier[new_mesh] . identifier[vn] = identifier[self] . identifier[vn] [ identifier[self] . identifier[f] . identifier[flatten] ()]
keyword[if] identifier[self] . identifier[vt] keyword[is] keyword[not] keyword[None] :
identifier[new_mesh] . identifier[vt] = identifier[self] . identifier[vt] [ identifier[self] . identifier[ft] . identifier[flatten] ()]
identifier[new_mesh] . identifier[ft] = identifier[new_mesh] . identifier[f] . identifier[copy] ()
keyword[return] identifier[new_mesh] | def uniquified_mesh(self):
"""This function returns a copy of the mesh in which vertices are copied such that
each vertex appears in only one face, and hence has only one texture"""
import numpy as np
from lace.mesh import Mesh
new_mesh = Mesh(v=self.v[self.f.flatten()], f=np.array(range(len(self.f.flatten()))).reshape(-1, 3))
if self.vn is None:
self.reset_normals() # depends on [control=['if'], data=[]]
new_mesh.vn = self.vn[self.f.flatten()]
if self.vt is not None:
new_mesh.vt = self.vt[self.ft.flatten()]
new_mesh.ft = new_mesh.f.copy() # depends on [control=['if'], data=[]]
return new_mesh |
def create_roles(apps, schema_editor):
"""Create the enterprise roles if they do not already exist."""
EnterpriseFeatureRole = apps.get_model('enterprise', 'EnterpriseFeatureRole')
EnterpriseFeatureRole.objects.update_or_create(name=ENTERPRISE_CATALOG_ADMIN_ROLE)
EnterpriseFeatureRole.objects.update_or_create(name=ENTERPRISE_DASHBOARD_ADMIN_ROLE)
EnterpriseFeatureRole.objects.update_or_create(name=ENTERPRISE_ENROLLMENT_API_ADMIN_ROLE) | def function[create_roles, parameter[apps, schema_editor]]:
constant[Create the enterprise roles if they do not already exist.]
variable[EnterpriseFeatureRole] assign[=] call[name[apps].get_model, parameter[constant[enterprise], constant[EnterpriseFeatureRole]]]
call[name[EnterpriseFeatureRole].objects.update_or_create, parameter[]]
call[name[EnterpriseFeatureRole].objects.update_or_create, parameter[]]
call[name[EnterpriseFeatureRole].objects.update_or_create, parameter[]] | keyword[def] identifier[create_roles] ( identifier[apps] , identifier[schema_editor] ):
literal[string]
identifier[EnterpriseFeatureRole] = identifier[apps] . identifier[get_model] ( literal[string] , literal[string] )
identifier[EnterpriseFeatureRole] . identifier[objects] . identifier[update_or_create] ( identifier[name] = identifier[ENTERPRISE_CATALOG_ADMIN_ROLE] )
identifier[EnterpriseFeatureRole] . identifier[objects] . identifier[update_or_create] ( identifier[name] = identifier[ENTERPRISE_DASHBOARD_ADMIN_ROLE] )
identifier[EnterpriseFeatureRole] . identifier[objects] . identifier[update_or_create] ( identifier[name] = identifier[ENTERPRISE_ENROLLMENT_API_ADMIN_ROLE] ) | def create_roles(apps, schema_editor):
"""Create the enterprise roles if they do not already exist."""
EnterpriseFeatureRole = apps.get_model('enterprise', 'EnterpriseFeatureRole')
EnterpriseFeatureRole.objects.update_or_create(name=ENTERPRISE_CATALOG_ADMIN_ROLE)
EnterpriseFeatureRole.objects.update_or_create(name=ENTERPRISE_DASHBOARD_ADMIN_ROLE)
EnterpriseFeatureRole.objects.update_or_create(name=ENTERPRISE_ENROLLMENT_API_ADMIN_ROLE) |
def execute(self, query, args=None):
"""
:return: Future[Cursor]
:rtype: Future
"""
self._ensure_conn()
cur = self._conn.cursor()
yield cur.execute(query, args)
raise Return(cur) | def function[execute, parameter[self, query, args]]:
constant[
:return: Future[Cursor]
:rtype: Future
]
call[name[self]._ensure_conn, parameter[]]
variable[cur] assign[=] call[name[self]._conn.cursor, parameter[]]
<ast.Yield object at 0x7da20e9b1ba0>
<ast.Raise object at 0x7da20e9b0370> | keyword[def] identifier[execute] ( identifier[self] , identifier[query] , identifier[args] = keyword[None] ):
literal[string]
identifier[self] . identifier[_ensure_conn] ()
identifier[cur] = identifier[self] . identifier[_conn] . identifier[cursor] ()
keyword[yield] identifier[cur] . identifier[execute] ( identifier[query] , identifier[args] )
keyword[raise] identifier[Return] ( identifier[cur] ) | def execute(self, query, args=None):
"""
:return: Future[Cursor]
:rtype: Future
"""
self._ensure_conn()
cur = self._conn.cursor()
yield cur.execute(query, args)
raise Return(cur) |
def peep_hash(argv):
"""Return the peep hash of one or more files, returning a shell status code
or raising a PipException.
:arg argv: The commandline args, starting after the subcommand
"""
parser = OptionParser(
usage='usage: %prog hash file [file ...]',
description='Print a peep hash line for one or more files: for '
'example, "# sha256: '
'oz42dZy6Gowxw8AelDtO4gRgTW_xPdooH484k7I5EOY".')
_, paths = parser.parse_args(args=argv)
if paths:
for path in paths:
print('# sha256:', hash_of_file(path))
return ITS_FINE_ITS_FINE
else:
parser.print_usage()
return COMMAND_LINE_ERROR | def function[peep_hash, parameter[argv]]:
constant[Return the peep hash of one or more files, returning a shell status code
or raising a PipException.
:arg argv: The commandline args, starting after the subcommand
]
variable[parser] assign[=] call[name[OptionParser], parameter[]]
<ast.Tuple object at 0x7da18f09e4a0> assign[=] call[name[parser].parse_args, parameter[]]
if name[paths] begin[:]
for taget[name[path]] in starred[name[paths]] begin[:]
call[name[print], parameter[constant[# sha256:], call[name[hash_of_file], parameter[name[path]]]]]
return[name[ITS_FINE_ITS_FINE]] | keyword[def] identifier[peep_hash] ( identifier[argv] ):
literal[string]
identifier[parser] = identifier[OptionParser] (
identifier[usage] = literal[string] ,
identifier[description] = literal[string]
literal[string]
literal[string] )
identifier[_] , identifier[paths] = identifier[parser] . identifier[parse_args] ( identifier[args] = identifier[argv] )
keyword[if] identifier[paths] :
keyword[for] identifier[path] keyword[in] identifier[paths] :
identifier[print] ( literal[string] , identifier[hash_of_file] ( identifier[path] ))
keyword[return] identifier[ITS_FINE_ITS_FINE]
keyword[else] :
identifier[parser] . identifier[print_usage] ()
keyword[return] identifier[COMMAND_LINE_ERROR] | def peep_hash(argv):
"""Return the peep hash of one or more files, returning a shell status code
or raising a PipException.
:arg argv: The commandline args, starting after the subcommand
"""
parser = OptionParser(usage='usage: %prog hash file [file ...]', description='Print a peep hash line for one or more files: for example, "# sha256: oz42dZy6Gowxw8AelDtO4gRgTW_xPdooH484k7I5EOY".')
(_, paths) = parser.parse_args(args=argv)
if paths:
for path in paths:
print('# sha256:', hash_of_file(path)) # depends on [control=['for'], data=['path']]
return ITS_FINE_ITS_FINE # depends on [control=['if'], data=[]]
else:
parser.print_usage()
return COMMAND_LINE_ERROR |
def error(self, amplexception):
"""
Receives notification of an error.
"""
msg = '\t'+str(amplexception).replace('\n', '\n\t')
print('Error:\n{:s}'.format(msg))
raise amplexception | def function[error, parameter[self, amplexception]]:
constant[
Receives notification of an error.
]
variable[msg] assign[=] binary_operation[constant[ ] + call[call[name[str], parameter[name[amplexception]]].replace, parameter[constant[
], constant[
]]]]
call[name[print], parameter[call[constant[Error:
{:s}].format, parameter[name[msg]]]]]
<ast.Raise object at 0x7da18f813f40> | keyword[def] identifier[error] ( identifier[self] , identifier[amplexception] ):
literal[string]
identifier[msg] = literal[string] + identifier[str] ( identifier[amplexception] ). identifier[replace] ( literal[string] , literal[string] )
identifier[print] ( literal[string] . identifier[format] ( identifier[msg] ))
keyword[raise] identifier[amplexception] | def error(self, amplexception):
"""
Receives notification of an error.
"""
msg = '\t' + str(amplexception).replace('\n', '\n\t')
print('Error:\n{:s}'.format(msg))
raise amplexception |
def ipv6_acl_ipv6_access_list_standard_seq_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ipv6_acl = ET.SubElement(config, "ipv6-acl", xmlns="urn:brocade.com:mgmt:brocade-ipv6-access-list")
ipv6 = ET.SubElement(ipv6_acl, "ipv6")
access_list = ET.SubElement(ipv6, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
seq = ET.SubElement(standard, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
action = ET.SubElement(seq, "action")
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[ipv6_acl_ipv6_access_list_standard_seq_action, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[ipv6_acl] assign[=] call[name[ET].SubElement, parameter[name[config], constant[ipv6-acl]]]
variable[ipv6] assign[=] call[name[ET].SubElement, parameter[name[ipv6_acl], constant[ipv6]]]
variable[access_list] assign[=] call[name[ET].SubElement, parameter[name[ipv6], constant[access-list]]]
variable[standard] assign[=] call[name[ET].SubElement, parameter[name[access_list], constant[standard]]]
variable[name_key] assign[=] call[name[ET].SubElement, parameter[name[standard], constant[name]]]
name[name_key].text assign[=] call[name[kwargs].pop, parameter[constant[name]]]
variable[seq] assign[=] call[name[ET].SubElement, parameter[name[standard], constant[seq]]]
variable[seq_id_key] assign[=] call[name[ET].SubElement, parameter[name[seq], constant[seq-id]]]
name[seq_id_key].text assign[=] call[name[kwargs].pop, parameter[constant[seq_id]]]
variable[action] assign[=] call[name[ET].SubElement, parameter[name[seq], constant[action]]]
name[action].text assign[=] call[name[kwargs].pop, parameter[constant[action]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[ipv6_acl_ipv6_access_list_standard_seq_action] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[ipv6_acl] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[ipv6] = identifier[ET] . identifier[SubElement] ( identifier[ipv6_acl] , literal[string] )
identifier[access_list] = identifier[ET] . identifier[SubElement] ( identifier[ipv6] , literal[string] )
identifier[standard] = identifier[ET] . identifier[SubElement] ( identifier[access_list] , literal[string] )
identifier[name_key] = identifier[ET] . identifier[SubElement] ( identifier[standard] , literal[string] )
identifier[name_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[seq] = identifier[ET] . identifier[SubElement] ( identifier[standard] , literal[string] )
identifier[seq_id_key] = identifier[ET] . identifier[SubElement] ( identifier[seq] , literal[string] )
identifier[seq_id_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[action] = identifier[ET] . identifier[SubElement] ( identifier[seq] , literal[string] )
identifier[action] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def ipv6_acl_ipv6_access_list_standard_seq_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
ipv6_acl = ET.SubElement(config, 'ipv6-acl', xmlns='urn:brocade.com:mgmt:brocade-ipv6-access-list')
ipv6 = ET.SubElement(ipv6_acl, 'ipv6')
access_list = ET.SubElement(ipv6, 'access-list')
standard = ET.SubElement(access_list, 'standard')
name_key = ET.SubElement(standard, 'name')
name_key.text = kwargs.pop('name')
seq = ET.SubElement(standard, 'seq')
seq_id_key = ET.SubElement(seq, 'seq-id')
seq_id_key.text = kwargs.pop('seq_id')
action = ET.SubElement(seq, 'action')
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def _getOpenChoices(self):
""" Go through all possible sites to find applicable .cfg files.
Return as an iterable. """
tsk = self._taskParsObj.getName()
taskFiles = set()
dirsSoFar = [] # this helps speed this up (skip unneeded globs)
# last dir
aDir = os.path.dirname(self._taskParsObj.filename)
if len(aDir) < 1: aDir = os.curdir
dirsSoFar.append(aDir)
taskFiles.update(cfgpars.getCfgFilesInDirForTask(aDir, tsk))
# current dir
aDir = os.getcwd()
if aDir not in dirsSoFar:
dirsSoFar.append(aDir)
taskFiles.update(cfgpars.getCfgFilesInDirForTask(aDir, tsk))
# task's python pkg dir (if tsk == python pkg name)
try:
x, pkgf = cfgpars.findCfgFileForPkg(tsk, '.cfg', taskName=tsk,
pkgObj=self._taskParsObj.getAssocPkg())
taskFiles.update( (pkgf,) )
except cfgpars.NoCfgFileError:
pass # no big deal - maybe there is no python package
# user's own resourceDir
aDir = self._rcDir
if aDir not in dirsSoFar:
dirsSoFar.append(aDir)
taskFiles.update(cfgpars.getCfgFilesInDirForTask(aDir, tsk))
# extra loc - see if they used the app's env. var
aDir = dirsSoFar[0] # flag to skip this if no env var found
envVarName = APP_NAME.upper()+'_CFG'
if envVarName in os.environ: aDir = os.environ[envVarName]
if aDir not in dirsSoFar:
dirsSoFar.append(aDir)
taskFiles.update(cfgpars.getCfgFilesInDirForTask(aDir, tsk))
# At the very end, add an option which we will later interpret to mean
# to open the file dialog.
taskFiles = list(taskFiles) # so as to keep next item at end of seq
taskFiles.sort()
taskFiles.append("Other ...")
return taskFiles | def function[_getOpenChoices, parameter[self]]:
constant[ Go through all possible sites to find applicable .cfg files.
Return as an iterable. ]
variable[tsk] assign[=] call[name[self]._taskParsObj.getName, parameter[]]
variable[taskFiles] assign[=] call[name[set], parameter[]]
variable[dirsSoFar] assign[=] list[[]]
variable[aDir] assign[=] call[name[os].path.dirname, parameter[name[self]._taskParsObj.filename]]
if compare[call[name[len], parameter[name[aDir]]] less[<] constant[1]] begin[:]
variable[aDir] assign[=] name[os].curdir
call[name[dirsSoFar].append, parameter[name[aDir]]]
call[name[taskFiles].update, parameter[call[name[cfgpars].getCfgFilesInDirForTask, parameter[name[aDir], name[tsk]]]]]
variable[aDir] assign[=] call[name[os].getcwd, parameter[]]
if compare[name[aDir] <ast.NotIn object at 0x7da2590d7190> name[dirsSoFar]] begin[:]
call[name[dirsSoFar].append, parameter[name[aDir]]]
call[name[taskFiles].update, parameter[call[name[cfgpars].getCfgFilesInDirForTask, parameter[name[aDir], name[tsk]]]]]
<ast.Try object at 0x7da1b0e80f40>
variable[aDir] assign[=] name[self]._rcDir
if compare[name[aDir] <ast.NotIn object at 0x7da2590d7190> name[dirsSoFar]] begin[:]
call[name[dirsSoFar].append, parameter[name[aDir]]]
call[name[taskFiles].update, parameter[call[name[cfgpars].getCfgFilesInDirForTask, parameter[name[aDir], name[tsk]]]]]
variable[aDir] assign[=] call[name[dirsSoFar]][constant[0]]
variable[envVarName] assign[=] binary_operation[call[name[APP_NAME].upper, parameter[]] + constant[_CFG]]
if compare[name[envVarName] in name[os].environ] begin[:]
variable[aDir] assign[=] call[name[os].environ][name[envVarName]]
if compare[name[aDir] <ast.NotIn object at 0x7da2590d7190> name[dirsSoFar]] begin[:]
call[name[dirsSoFar].append, parameter[name[aDir]]]
call[name[taskFiles].update, parameter[call[name[cfgpars].getCfgFilesInDirForTask, parameter[name[aDir], name[tsk]]]]]
variable[taskFiles] assign[=] call[name[list], parameter[name[taskFiles]]]
call[name[taskFiles].sort, parameter[]]
call[name[taskFiles].append, parameter[constant[Other ...]]]
return[name[taskFiles]] | keyword[def] identifier[_getOpenChoices] ( identifier[self] ):
literal[string]
identifier[tsk] = identifier[self] . identifier[_taskParsObj] . identifier[getName] ()
identifier[taskFiles] = identifier[set] ()
identifier[dirsSoFar] =[]
identifier[aDir] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[self] . identifier[_taskParsObj] . identifier[filename] )
keyword[if] identifier[len] ( identifier[aDir] )< literal[int] : identifier[aDir] = identifier[os] . identifier[curdir]
identifier[dirsSoFar] . identifier[append] ( identifier[aDir] )
identifier[taskFiles] . identifier[update] ( identifier[cfgpars] . identifier[getCfgFilesInDirForTask] ( identifier[aDir] , identifier[tsk] ))
identifier[aDir] = identifier[os] . identifier[getcwd] ()
keyword[if] identifier[aDir] keyword[not] keyword[in] identifier[dirsSoFar] :
identifier[dirsSoFar] . identifier[append] ( identifier[aDir] )
identifier[taskFiles] . identifier[update] ( identifier[cfgpars] . identifier[getCfgFilesInDirForTask] ( identifier[aDir] , identifier[tsk] ))
keyword[try] :
identifier[x] , identifier[pkgf] = identifier[cfgpars] . identifier[findCfgFileForPkg] ( identifier[tsk] , literal[string] , identifier[taskName] = identifier[tsk] ,
identifier[pkgObj] = identifier[self] . identifier[_taskParsObj] . identifier[getAssocPkg] ())
identifier[taskFiles] . identifier[update] (( identifier[pkgf] ,))
keyword[except] identifier[cfgpars] . identifier[NoCfgFileError] :
keyword[pass]
identifier[aDir] = identifier[self] . identifier[_rcDir]
keyword[if] identifier[aDir] keyword[not] keyword[in] identifier[dirsSoFar] :
identifier[dirsSoFar] . identifier[append] ( identifier[aDir] )
identifier[taskFiles] . identifier[update] ( identifier[cfgpars] . identifier[getCfgFilesInDirForTask] ( identifier[aDir] , identifier[tsk] ))
identifier[aDir] = identifier[dirsSoFar] [ literal[int] ]
identifier[envVarName] = identifier[APP_NAME] . identifier[upper] ()+ literal[string]
keyword[if] identifier[envVarName] keyword[in] identifier[os] . identifier[environ] : identifier[aDir] = identifier[os] . identifier[environ] [ identifier[envVarName] ]
keyword[if] identifier[aDir] keyword[not] keyword[in] identifier[dirsSoFar] :
identifier[dirsSoFar] . identifier[append] ( identifier[aDir] )
identifier[taskFiles] . identifier[update] ( identifier[cfgpars] . identifier[getCfgFilesInDirForTask] ( identifier[aDir] , identifier[tsk] ))
identifier[taskFiles] = identifier[list] ( identifier[taskFiles] )
identifier[taskFiles] . identifier[sort] ()
identifier[taskFiles] . identifier[append] ( literal[string] )
keyword[return] identifier[taskFiles] | def _getOpenChoices(self):
""" Go through all possible sites to find applicable .cfg files.
Return as an iterable. """
tsk = self._taskParsObj.getName()
taskFiles = set()
dirsSoFar = [] # this helps speed this up (skip unneeded globs)
# last dir
aDir = os.path.dirname(self._taskParsObj.filename)
if len(aDir) < 1:
aDir = os.curdir # depends on [control=['if'], data=[]]
dirsSoFar.append(aDir)
taskFiles.update(cfgpars.getCfgFilesInDirForTask(aDir, tsk))
# current dir
aDir = os.getcwd()
if aDir not in dirsSoFar:
dirsSoFar.append(aDir)
taskFiles.update(cfgpars.getCfgFilesInDirForTask(aDir, tsk)) # depends on [control=['if'], data=['aDir', 'dirsSoFar']]
# task's python pkg dir (if tsk == python pkg name)
try:
(x, pkgf) = cfgpars.findCfgFileForPkg(tsk, '.cfg', taskName=tsk, pkgObj=self._taskParsObj.getAssocPkg())
taskFiles.update((pkgf,)) # depends on [control=['try'], data=[]]
except cfgpars.NoCfgFileError:
pass # no big deal - maybe there is no python package # depends on [control=['except'], data=[]]
# user's own resourceDir
aDir = self._rcDir
if aDir not in dirsSoFar:
dirsSoFar.append(aDir)
taskFiles.update(cfgpars.getCfgFilesInDirForTask(aDir, tsk)) # depends on [control=['if'], data=['aDir', 'dirsSoFar']]
# extra loc - see if they used the app's env. var
aDir = dirsSoFar[0] # flag to skip this if no env var found
envVarName = APP_NAME.upper() + '_CFG'
if envVarName in os.environ:
aDir = os.environ[envVarName] # depends on [control=['if'], data=['envVarName']]
if aDir not in dirsSoFar:
dirsSoFar.append(aDir)
taskFiles.update(cfgpars.getCfgFilesInDirForTask(aDir, tsk)) # depends on [control=['if'], data=['aDir', 'dirsSoFar']]
# At the very end, add an option which we will later interpret to mean
# to open the file dialog.
taskFiles = list(taskFiles) # so as to keep next item at end of seq
taskFiles.sort()
taskFiles.append('Other ...')
return taskFiles |
def _get_param_iterator(self):
"""Return ParameterSampler instance for the given distributions"""
return model_selection.ParameterSampler(
self.param_distributions, self.n_iter, random_state=self.random_state
) | def function[_get_param_iterator, parameter[self]]:
constant[Return ParameterSampler instance for the given distributions]
return[call[name[model_selection].ParameterSampler, parameter[name[self].param_distributions, name[self].n_iter]]] | keyword[def] identifier[_get_param_iterator] ( identifier[self] ):
literal[string]
keyword[return] identifier[model_selection] . identifier[ParameterSampler] (
identifier[self] . identifier[param_distributions] , identifier[self] . identifier[n_iter] , identifier[random_state] = identifier[self] . identifier[random_state]
) | def _get_param_iterator(self):
"""Return ParameterSampler instance for the given distributions"""
return model_selection.ParameterSampler(self.param_distributions, self.n_iter, random_state=self.random_state) |
def save(self, **kwargs):
"""
Extract the translations and save them after main object save.
By default all translations will be saved no matter if creating
or updating an object. Users with more complex needs might define
their own save and handle translation saving themselves.
"""
translated_data = self._pop_translated_data()
instance = super(TranslatableModelSerializer, self).save(**kwargs)
self.save_translations(instance, translated_data)
return instance | def function[save, parameter[self]]:
constant[
Extract the translations and save them after main object save.
By default all translations will be saved no matter if creating
or updating an object. Users with more complex needs might define
their own save and handle translation saving themselves.
]
variable[translated_data] assign[=] call[name[self]._pop_translated_data, parameter[]]
variable[instance] assign[=] call[call[name[super], parameter[name[TranslatableModelSerializer], name[self]]].save, parameter[]]
call[name[self].save_translations, parameter[name[instance], name[translated_data]]]
return[name[instance]] | keyword[def] identifier[save] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[translated_data] = identifier[self] . identifier[_pop_translated_data] ()
identifier[instance] = identifier[super] ( identifier[TranslatableModelSerializer] , identifier[self] ). identifier[save] (** identifier[kwargs] )
identifier[self] . identifier[save_translations] ( identifier[instance] , identifier[translated_data] )
keyword[return] identifier[instance] | def save(self, **kwargs):
"""
Extract the translations and save them after main object save.
By default all translations will be saved no matter if creating
or updating an object. Users with more complex needs might define
their own save and handle translation saving themselves.
"""
translated_data = self._pop_translated_data()
instance = super(TranslatableModelSerializer, self).save(**kwargs)
self.save_translations(instance, translated_data)
return instance |
def search_info(self, search_index):
"""
Retrieves information about a specified search index within the design
document, returns dictionary
GET databasename/_design/{ddoc}/_search_info/{search_index}
"""
ddoc_search_info = self.r_session.get(
'/'.join([self.document_url, '_search_info', search_index]))
ddoc_search_info.raise_for_status()
return response_to_json_dict(ddoc_search_info) | def function[search_info, parameter[self, search_index]]:
constant[
Retrieves information about a specified search index within the design
document, returns dictionary
GET databasename/_design/{ddoc}/_search_info/{search_index}
]
variable[ddoc_search_info] assign[=] call[name[self].r_session.get, parameter[call[constant[/].join, parameter[list[[<ast.Attribute object at 0x7da20e9568c0>, <ast.Constant object at 0x7da20e957f70>, <ast.Name object at 0x7da20e957010>]]]]]]
call[name[ddoc_search_info].raise_for_status, parameter[]]
return[call[name[response_to_json_dict], parameter[name[ddoc_search_info]]]] | keyword[def] identifier[search_info] ( identifier[self] , identifier[search_index] ):
literal[string]
identifier[ddoc_search_info] = identifier[self] . identifier[r_session] . identifier[get] (
literal[string] . identifier[join] ([ identifier[self] . identifier[document_url] , literal[string] , identifier[search_index] ]))
identifier[ddoc_search_info] . identifier[raise_for_status] ()
keyword[return] identifier[response_to_json_dict] ( identifier[ddoc_search_info] ) | def search_info(self, search_index):
"""
Retrieves information about a specified search index within the design
document, returns dictionary
GET databasename/_design/{ddoc}/_search_info/{search_index}
"""
ddoc_search_info = self.r_session.get('/'.join([self.document_url, '_search_info', search_index]))
ddoc_search_info.raise_for_status()
return response_to_json_dict(ddoc_search_info) |
def _svd_step(self, X, shrinkage_value, max_rank=None):
"""
Returns reconstructed X from low-rank thresholded SVD and
the rank achieved.
"""
if max_rank:
# if we have a max rank then perform the faster randomized SVD
(U, s, V) = randomized_svd(
X,
max_rank,
n_iter=self.n_power_iterations)
else:
# perform a full rank SVD using ARPACK
(U, s, V) = np.linalg.svd(
X,
full_matrices=False,
compute_uv=True)
s_thresh = np.maximum(s - shrinkage_value, 0)
rank = (s_thresh > 0).sum()
s_thresh = s_thresh[:rank]
U_thresh = U[:, :rank]
V_thresh = V[:rank, :]
S_thresh = np.diag(s_thresh)
X_reconstruction = np.dot(U_thresh, np.dot(S_thresh, V_thresh))
return X_reconstruction, rank | def function[_svd_step, parameter[self, X, shrinkage_value, max_rank]]:
constant[
Returns reconstructed X from low-rank thresholded SVD and
the rank achieved.
]
if name[max_rank] begin[:]
<ast.Tuple object at 0x7da18bc71150> assign[=] call[name[randomized_svd], parameter[name[X], name[max_rank]]]
variable[s_thresh] assign[=] call[name[np].maximum, parameter[binary_operation[name[s] - name[shrinkage_value]], constant[0]]]
variable[rank] assign[=] call[compare[name[s_thresh] greater[>] constant[0]].sum, parameter[]]
variable[s_thresh] assign[=] call[name[s_thresh]][<ast.Slice object at 0x7da18bc72ad0>]
variable[U_thresh] assign[=] call[name[U]][tuple[[<ast.Slice object at 0x7da18bc736d0>, <ast.Slice object at 0x7da18bc72950>]]]
variable[V_thresh] assign[=] call[name[V]][tuple[[<ast.Slice object at 0x7da18bc730d0>, <ast.Slice object at 0x7da18bc71120>]]]
variable[S_thresh] assign[=] call[name[np].diag, parameter[name[s_thresh]]]
variable[X_reconstruction] assign[=] call[name[np].dot, parameter[name[U_thresh], call[name[np].dot, parameter[name[S_thresh], name[V_thresh]]]]]
return[tuple[[<ast.Name object at 0x7da18bc70550>, <ast.Name object at 0x7da18bc73850>]]] | keyword[def] identifier[_svd_step] ( identifier[self] , identifier[X] , identifier[shrinkage_value] , identifier[max_rank] = keyword[None] ):
literal[string]
keyword[if] identifier[max_rank] :
( identifier[U] , identifier[s] , identifier[V] )= identifier[randomized_svd] (
identifier[X] ,
identifier[max_rank] ,
identifier[n_iter] = identifier[self] . identifier[n_power_iterations] )
keyword[else] :
( identifier[U] , identifier[s] , identifier[V] )= identifier[np] . identifier[linalg] . identifier[svd] (
identifier[X] ,
identifier[full_matrices] = keyword[False] ,
identifier[compute_uv] = keyword[True] )
identifier[s_thresh] = identifier[np] . identifier[maximum] ( identifier[s] - identifier[shrinkage_value] , literal[int] )
identifier[rank] =( identifier[s_thresh] > literal[int] ). identifier[sum] ()
identifier[s_thresh] = identifier[s_thresh] [: identifier[rank] ]
identifier[U_thresh] = identifier[U] [:,: identifier[rank] ]
identifier[V_thresh] = identifier[V] [: identifier[rank] ,:]
identifier[S_thresh] = identifier[np] . identifier[diag] ( identifier[s_thresh] )
identifier[X_reconstruction] = identifier[np] . identifier[dot] ( identifier[U_thresh] , identifier[np] . identifier[dot] ( identifier[S_thresh] , identifier[V_thresh] ))
keyword[return] identifier[X_reconstruction] , identifier[rank] | def _svd_step(self, X, shrinkage_value, max_rank=None):
"""
Returns reconstructed X from low-rank thresholded SVD and
the rank achieved.
"""
if max_rank:
# if we have a max rank then perform the faster randomized SVD
(U, s, V) = randomized_svd(X, max_rank, n_iter=self.n_power_iterations) # depends on [control=['if'], data=[]]
else:
# perform a full rank SVD using ARPACK
(U, s, V) = np.linalg.svd(X, full_matrices=False, compute_uv=True)
s_thresh = np.maximum(s - shrinkage_value, 0)
rank = (s_thresh > 0).sum()
s_thresh = s_thresh[:rank]
U_thresh = U[:, :rank]
V_thresh = V[:rank, :]
S_thresh = np.diag(s_thresh)
X_reconstruction = np.dot(U_thresh, np.dot(S_thresh, V_thresh))
return (X_reconstruction, rank) |
def stmts_to_json(stmts_in, use_sbo=False):
"""Return the JSON-serialized form of one or more INDRA Statements.
Parameters
----------
stmts_in : Statement or list[Statement]
A Statement or list of Statement objects to serialize into JSON.
use_sbo : Optional[bool]
If True, SBO annotations are added to each applicable element of the
JSON. Default: False
Returns
-------
json_dict : dict
JSON-serialized INDRA Statements.
"""
if not isinstance(stmts_in, list):
json_dict = stmts_in.to_json(use_sbo=use_sbo)
return json_dict
else:
json_dict = [st.to_json(use_sbo=use_sbo) for st in stmts_in]
return json_dict | def function[stmts_to_json, parameter[stmts_in, use_sbo]]:
constant[Return the JSON-serialized form of one or more INDRA Statements.
Parameters
----------
stmts_in : Statement or list[Statement]
A Statement or list of Statement objects to serialize into JSON.
use_sbo : Optional[bool]
If True, SBO annotations are added to each applicable element of the
JSON. Default: False
Returns
-------
json_dict : dict
JSON-serialized INDRA Statements.
]
if <ast.UnaryOp object at 0x7da204566980> begin[:]
variable[json_dict] assign[=] call[name[stmts_in].to_json, parameter[]]
return[name[json_dict]]
return[name[json_dict]] | keyword[def] identifier[stmts_to_json] ( identifier[stmts_in] , identifier[use_sbo] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[stmts_in] , identifier[list] ):
identifier[json_dict] = identifier[stmts_in] . identifier[to_json] ( identifier[use_sbo] = identifier[use_sbo] )
keyword[return] identifier[json_dict]
keyword[else] :
identifier[json_dict] =[ identifier[st] . identifier[to_json] ( identifier[use_sbo] = identifier[use_sbo] ) keyword[for] identifier[st] keyword[in] identifier[stmts_in] ]
keyword[return] identifier[json_dict] | def stmts_to_json(stmts_in, use_sbo=False):
"""Return the JSON-serialized form of one or more INDRA Statements.
Parameters
----------
stmts_in : Statement or list[Statement]
A Statement or list of Statement objects to serialize into JSON.
use_sbo : Optional[bool]
If True, SBO annotations are added to each applicable element of the
JSON. Default: False
Returns
-------
json_dict : dict
JSON-serialized INDRA Statements.
"""
if not isinstance(stmts_in, list):
json_dict = stmts_in.to_json(use_sbo=use_sbo)
return json_dict # depends on [control=['if'], data=[]]
else:
json_dict = [st.to_json(use_sbo=use_sbo) for st in stmts_in]
return json_dict |
def get_completion_args(self, is_completion=False, comp_line=None): # pylint: disable=no-self-use
""" Get the args that will be used to tab completion if completion is active. """
is_completion = is_completion or os.environ.get(ARGCOMPLETE_ENV_NAME)
comp_line = comp_line or os.environ.get('COMP_LINE')
# The first item is the exe name so ignore that.
return comp_line.split()[1:] if is_completion and comp_line else None | def function[get_completion_args, parameter[self, is_completion, comp_line]]:
constant[ Get the args that will be used to tab completion if completion is active. ]
variable[is_completion] assign[=] <ast.BoolOp object at 0x7da20c6aba30>
variable[comp_line] assign[=] <ast.BoolOp object at 0x7da20c6a9690>
return[<ast.IfExp object at 0x7da20c6aafe0>] | keyword[def] identifier[get_completion_args] ( identifier[self] , identifier[is_completion] = keyword[False] , identifier[comp_line] = keyword[None] ):
literal[string]
identifier[is_completion] = identifier[is_completion] keyword[or] identifier[os] . identifier[environ] . identifier[get] ( identifier[ARGCOMPLETE_ENV_NAME] )
identifier[comp_line] = identifier[comp_line] keyword[or] identifier[os] . identifier[environ] . identifier[get] ( literal[string] )
keyword[return] identifier[comp_line] . identifier[split] ()[ literal[int] :] keyword[if] identifier[is_completion] keyword[and] identifier[comp_line] keyword[else] keyword[None] | def get_completion_args(self, is_completion=False, comp_line=None): # pylint: disable=no-self-use
' Get the args that will be used to tab completion if completion is active. '
is_completion = is_completion or os.environ.get(ARGCOMPLETE_ENV_NAME)
comp_line = comp_line or os.environ.get('COMP_LINE')
# The first item is the exe name so ignore that.
return comp_line.split()[1:] if is_completion and comp_line else None |
def analyze_eigenvalues(path,name,Ne):
r'''This function can be called after calling ``run_diagonalization`` if the option ``save_eigenvalues``
is set to ``True``. It will return the shortest and longest oscillation period, and the shortest and
longest half life. The results are lists ordered as:
``[detunings, shortest_oscillations, longest_oscillations, shortest_half_lifes, longest_half_lifes]``.
The shortest oscillation period can be interpreted as a suggested size for the time step needed for a
fine-grained picture of time evolution (an order of magnitude smaller is likely to be optimal).
The longest half life can be interpreted as a suggested time for the stationary state to be reached
(an order of magnitude larger is likely to be optimal).
'''
times=characteristic_times(path,name)
min_osci=min([min(times[1+i]) for i in range(Ne**2-1)])
max_osci=max([max(times[1+i]) for i in range(Ne**2-1)])
min_half=min([min(times[1+Ne**2-1+i]) for i in range(Ne**2-1)])
max_half=max([max(times[1+Ne**2-1+i]) for i in range(Ne**2-1)])
return min_osci,max_osci,min_half,max_half | def function[analyze_eigenvalues, parameter[path, name, Ne]]:
constant[This function can be called after calling ``run_diagonalization`` if the option ``save_eigenvalues``
is set to ``True``. It will return the shortest and longest oscillation period, and the shortest and
longest half life. The results are lists ordered as:
``[detunings, shortest_oscillations, longest_oscillations, shortest_half_lifes, longest_half_lifes]``.
The shortest oscillation period can be interpreted as a suggested size for the time step needed for a
fine-grained picture of time evolution (an order of magnitude smaller is likely to be optimal).
The longest half life can be interpreted as a suggested time for the stationary state to be reached
(an order of magnitude larger is likely to be optimal).
]
variable[times] assign[=] call[name[characteristic_times], parameter[name[path], name[name]]]
variable[min_osci] assign[=] call[name[min], parameter[<ast.ListComp object at 0x7da18ede7e80>]]
variable[max_osci] assign[=] call[name[max], parameter[<ast.ListComp object at 0x7da18ede7130>]]
variable[min_half] assign[=] call[name[min], parameter[<ast.ListComp object at 0x7da18f58cbe0>]]
variable[max_half] assign[=] call[name[max], parameter[<ast.ListComp object at 0x7da18f58d420>]]
return[tuple[[<ast.Name object at 0x7da20c6a96c0>, <ast.Name object at 0x7da20c6a9180>, <ast.Name object at 0x7da20c6a83d0>, <ast.Name object at 0x7da20c6a9ae0>]]] | keyword[def] identifier[analyze_eigenvalues] ( identifier[path] , identifier[name] , identifier[Ne] ):
literal[string]
identifier[times] = identifier[characteristic_times] ( identifier[path] , identifier[name] )
identifier[min_osci] = identifier[min] ([ identifier[min] ( identifier[times] [ literal[int] + identifier[i] ]) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[Ne] ** literal[int] - literal[int] )])
identifier[max_osci] = identifier[max] ([ identifier[max] ( identifier[times] [ literal[int] + identifier[i] ]) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[Ne] ** literal[int] - literal[int] )])
identifier[min_half] = identifier[min] ([ identifier[min] ( identifier[times] [ literal[int] + identifier[Ne] ** literal[int] - literal[int] + identifier[i] ]) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[Ne] ** literal[int] - literal[int] )])
identifier[max_half] = identifier[max] ([ identifier[max] ( identifier[times] [ literal[int] + identifier[Ne] ** literal[int] - literal[int] + identifier[i] ]) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[Ne] ** literal[int] - literal[int] )])
keyword[return] identifier[min_osci] , identifier[max_osci] , identifier[min_half] , identifier[max_half] | def analyze_eigenvalues(path, name, Ne):
"""This function can be called after calling ``run_diagonalization`` if the option ``save_eigenvalues``
is set to ``True``. It will return the shortest and longest oscillation period, and the shortest and
longest half life. The results are lists ordered as:
``[detunings, shortest_oscillations, longest_oscillations, shortest_half_lifes, longest_half_lifes]``.
The shortest oscillation period can be interpreted as a suggested size for the time step needed for a
fine-grained picture of time evolution (an order of magnitude smaller is likely to be optimal).
The longest half life can be interpreted as a suggested time for the stationary state to be reached
(an order of magnitude larger is likely to be optimal).
"""
times = characteristic_times(path, name)
min_osci = min([min(times[1 + i]) for i in range(Ne ** 2 - 1)])
max_osci = max([max(times[1 + i]) for i in range(Ne ** 2 - 1)])
min_half = min([min(times[1 + Ne ** 2 - 1 + i]) for i in range(Ne ** 2 - 1)])
max_half = max([max(times[1 + Ne ** 2 - 1 + i]) for i in range(Ne ** 2 - 1)])
return (min_osci, max_osci, min_half, max_half) |
async def pipeline(self, transaction=None, shard_hint=None, watches=None):
"""
Cluster impl:
Pipelines do not work in cluster mode the same way they do in normal mode.
Create a clone of this object so that simulating pipelines will work correctly.
Each command will be called directly when used and when calling execute() will only return the result stack.
cluster transaction can only be run with commands in the same node, otherwise error will be raised.
"""
await self.connection_pool.initialize()
if shard_hint:
raise RedisClusterException("shard_hint is deprecated in cluster mode")
from aredis.pipeline import StrictClusterPipeline
return StrictClusterPipeline(
connection_pool=self.connection_pool,
startup_nodes=self.connection_pool.nodes.startup_nodes,
result_callbacks=self.result_callbacks,
response_callbacks=self.response_callbacks,
transaction=transaction,
watches=watches
) | <ast.AsyncFunctionDef object at 0x7da1b07aeb90> | keyword[async] keyword[def] identifier[pipeline] ( identifier[self] , identifier[transaction] = keyword[None] , identifier[shard_hint] = keyword[None] , identifier[watches] = keyword[None] ):
literal[string]
keyword[await] identifier[self] . identifier[connection_pool] . identifier[initialize] ()
keyword[if] identifier[shard_hint] :
keyword[raise] identifier[RedisClusterException] ( literal[string] )
keyword[from] identifier[aredis] . identifier[pipeline] keyword[import] identifier[StrictClusterPipeline]
keyword[return] identifier[StrictClusterPipeline] (
identifier[connection_pool] = identifier[self] . identifier[connection_pool] ,
identifier[startup_nodes] = identifier[self] . identifier[connection_pool] . identifier[nodes] . identifier[startup_nodes] ,
identifier[result_callbacks] = identifier[self] . identifier[result_callbacks] ,
identifier[response_callbacks] = identifier[self] . identifier[response_callbacks] ,
identifier[transaction] = identifier[transaction] ,
identifier[watches] = identifier[watches]
) | async def pipeline(self, transaction=None, shard_hint=None, watches=None):
"""
Cluster impl:
Pipelines do not work in cluster mode the same way they do in normal mode.
Create a clone of this object so that simulating pipelines will work correctly.
Each command will be called directly when used and when calling execute() will only return the result stack.
cluster transaction can only be run with commands in the same node, otherwise error will be raised.
"""
await self.connection_pool.initialize()
if shard_hint:
raise RedisClusterException('shard_hint is deprecated in cluster mode') # depends on [control=['if'], data=[]]
from aredis.pipeline import StrictClusterPipeline
return StrictClusterPipeline(connection_pool=self.connection_pool, startup_nodes=self.connection_pool.nodes.startup_nodes, result_callbacks=self.result_callbacks, response_callbacks=self.response_callbacks, transaction=transaction, watches=watches) |
def _GetTimestamps(self, olecf_item):
"""Retrieves the timestamps from an OLECF item.
Args:
olecf_item (pyolecf.item): OLECF item.
Returns:
tuple[int, int]: creation and modification FILETIME timestamp.
"""
if not olecf_item:
return None, None
try:
creation_time = olecf_item.get_creation_time_as_integer()
except OverflowError as exception:
logger.warning(
'Unable to read the creation time with error: {0!s}'.format(
exception))
creation_time = 0
try:
modification_time = olecf_item.get_modification_time_as_integer()
except OverflowError as exception:
logger.warning(
'Unable to read the modification time with error: {0!s}'.format(
exception))
modification_time = 0
# If no useful events, return early.
if not creation_time and not modification_time:
return None, None
# Office template documents sometimes contain a creation time
# of -1 (0xffffffffffffffff).
if creation_time == 0xffffffffffffffff:
creation_time = 0
return creation_time, modification_time | def function[_GetTimestamps, parameter[self, olecf_item]]:
constant[Retrieves the timestamps from an OLECF item.
Args:
olecf_item (pyolecf.item): OLECF item.
Returns:
tuple[int, int]: creation and modification FILETIME timestamp.
]
if <ast.UnaryOp object at 0x7da207f01360> begin[:]
return[tuple[[<ast.Constant object at 0x7da207f001c0>, <ast.Constant object at 0x7da207f02ce0>]]]
<ast.Try object at 0x7da207f02e60>
<ast.Try object at 0x7da207f02ad0>
if <ast.BoolOp object at 0x7da207f02290> begin[:]
return[tuple[[<ast.Constant object at 0x7da207f010c0>, <ast.Constant object at 0x7da207f039d0>]]]
if compare[name[creation_time] equal[==] constant[18446744073709551615]] begin[:]
variable[creation_time] assign[=] constant[0]
return[tuple[[<ast.Name object at 0x7da207f01780>, <ast.Name object at 0x7da207f00ca0>]]] | keyword[def] identifier[_GetTimestamps] ( identifier[self] , identifier[olecf_item] ):
literal[string]
keyword[if] keyword[not] identifier[olecf_item] :
keyword[return] keyword[None] , keyword[None]
keyword[try] :
identifier[creation_time] = identifier[olecf_item] . identifier[get_creation_time_as_integer] ()
keyword[except] identifier[OverflowError] keyword[as] identifier[exception] :
identifier[logger] . identifier[warning] (
literal[string] . identifier[format] (
identifier[exception] ))
identifier[creation_time] = literal[int]
keyword[try] :
identifier[modification_time] = identifier[olecf_item] . identifier[get_modification_time_as_integer] ()
keyword[except] identifier[OverflowError] keyword[as] identifier[exception] :
identifier[logger] . identifier[warning] (
literal[string] . identifier[format] (
identifier[exception] ))
identifier[modification_time] = literal[int]
keyword[if] keyword[not] identifier[creation_time] keyword[and] keyword[not] identifier[modification_time] :
keyword[return] keyword[None] , keyword[None]
keyword[if] identifier[creation_time] == literal[int] :
identifier[creation_time] = literal[int]
keyword[return] identifier[creation_time] , identifier[modification_time] | def _GetTimestamps(self, olecf_item):
"""Retrieves the timestamps from an OLECF item.
Args:
olecf_item (pyolecf.item): OLECF item.
Returns:
tuple[int, int]: creation and modification FILETIME timestamp.
"""
if not olecf_item:
return (None, None) # depends on [control=['if'], data=[]]
try:
creation_time = olecf_item.get_creation_time_as_integer() # depends on [control=['try'], data=[]]
except OverflowError as exception:
logger.warning('Unable to read the creation time with error: {0!s}'.format(exception))
creation_time = 0 # depends on [control=['except'], data=['exception']]
try:
modification_time = olecf_item.get_modification_time_as_integer() # depends on [control=['try'], data=[]]
except OverflowError as exception:
logger.warning('Unable to read the modification time with error: {0!s}'.format(exception))
modification_time = 0 # depends on [control=['except'], data=['exception']]
# If no useful events, return early.
if not creation_time and (not modification_time):
return (None, None) # depends on [control=['if'], data=[]]
# Office template documents sometimes contain a creation time
# of -1 (0xffffffffffffffff).
if creation_time == 18446744073709551615:
creation_time = 0 # depends on [control=['if'], data=['creation_time']]
return (creation_time, modification_time) |
def get_help_width():
"""Returns the integer width of help lines that is used in TextWrap."""
if not sys.stdout.isatty() or termios is None or fcntl is None:
return _DEFAULT_HELP_WIDTH
try:
data = fcntl.ioctl(sys.stdout, termios.TIOCGWINSZ, '1234')
columns = struct.unpack('hh', data)[1]
# Emacs mode returns 0.
# Here we assume that any value below 40 is unreasonable.
if columns >= _MIN_HELP_WIDTH:
return columns
# Returning an int as default is fine, int(int) just return the int.
return int(os.getenv('COLUMNS', _DEFAULT_HELP_WIDTH))
except (TypeError, IOError, struct.error):
return _DEFAULT_HELP_WIDTH | def function[get_help_width, parameter[]]:
constant[Returns the integer width of help lines that is used in TextWrap.]
if <ast.BoolOp object at 0x7da1b18bd8a0> begin[:]
return[name[_DEFAULT_HELP_WIDTH]]
<ast.Try object at 0x7da1b18bf640> | keyword[def] identifier[get_help_width] ():
literal[string]
keyword[if] keyword[not] identifier[sys] . identifier[stdout] . identifier[isatty] () keyword[or] identifier[termios] keyword[is] keyword[None] keyword[or] identifier[fcntl] keyword[is] keyword[None] :
keyword[return] identifier[_DEFAULT_HELP_WIDTH]
keyword[try] :
identifier[data] = identifier[fcntl] . identifier[ioctl] ( identifier[sys] . identifier[stdout] , identifier[termios] . identifier[TIOCGWINSZ] , literal[string] )
identifier[columns] = identifier[struct] . identifier[unpack] ( literal[string] , identifier[data] )[ literal[int] ]
keyword[if] identifier[columns] >= identifier[_MIN_HELP_WIDTH] :
keyword[return] identifier[columns]
keyword[return] identifier[int] ( identifier[os] . identifier[getenv] ( literal[string] , identifier[_DEFAULT_HELP_WIDTH] ))
keyword[except] ( identifier[TypeError] , identifier[IOError] , identifier[struct] . identifier[error] ):
keyword[return] identifier[_DEFAULT_HELP_WIDTH] | def get_help_width():
"""Returns the integer width of help lines that is used in TextWrap."""
if not sys.stdout.isatty() or termios is None or fcntl is None:
return _DEFAULT_HELP_WIDTH # depends on [control=['if'], data=[]]
try:
data = fcntl.ioctl(sys.stdout, termios.TIOCGWINSZ, '1234')
columns = struct.unpack('hh', data)[1]
# Emacs mode returns 0.
# Here we assume that any value below 40 is unreasonable.
if columns >= _MIN_HELP_WIDTH:
return columns # depends on [control=['if'], data=['columns']]
# Returning an int as default is fine, int(int) just return the int.
return int(os.getenv('COLUMNS', _DEFAULT_HELP_WIDTH)) # depends on [control=['try'], data=[]]
except (TypeError, IOError, struct.error):
return _DEFAULT_HELP_WIDTH # depends on [control=['except'], data=[]] |
def make_update(model, docs, optimizer, drop=0.0, objective="L2"):
"""Perform an update over a single batch of documents.
docs (iterable): A batch of `Doc` objects.
drop (float): The droput rate.
optimizer (callable): An optimizer.
RETURNS loss: A float for the loss.
"""
predictions, backprop = model.begin_update(docs, drop=drop)
loss, gradients = get_vectors_loss(model.ops, docs, predictions, objective)
backprop(gradients, sgd=optimizer)
# Don't want to return a cupy object here
# The gradients are modified in-place by the BERT MLM,
# so we get an accurate loss
return float(loss) | def function[make_update, parameter[model, docs, optimizer, drop, objective]]:
constant[Perform an update over a single batch of documents.
docs (iterable): A batch of `Doc` objects.
drop (float): The droput rate.
optimizer (callable): An optimizer.
RETURNS loss: A float for the loss.
]
<ast.Tuple object at 0x7da1b20321d0> assign[=] call[name[model].begin_update, parameter[name[docs]]]
<ast.Tuple object at 0x7da1b2031d50> assign[=] call[name[get_vectors_loss], parameter[name[model].ops, name[docs], name[predictions], name[objective]]]
call[name[backprop], parameter[name[gradients]]]
return[call[name[float], parameter[name[loss]]]] | keyword[def] identifier[make_update] ( identifier[model] , identifier[docs] , identifier[optimizer] , identifier[drop] = literal[int] , identifier[objective] = literal[string] ):
literal[string]
identifier[predictions] , identifier[backprop] = identifier[model] . identifier[begin_update] ( identifier[docs] , identifier[drop] = identifier[drop] )
identifier[loss] , identifier[gradients] = identifier[get_vectors_loss] ( identifier[model] . identifier[ops] , identifier[docs] , identifier[predictions] , identifier[objective] )
identifier[backprop] ( identifier[gradients] , identifier[sgd] = identifier[optimizer] )
keyword[return] identifier[float] ( identifier[loss] ) | def make_update(model, docs, optimizer, drop=0.0, objective='L2'):
"""Perform an update over a single batch of documents.
docs (iterable): A batch of `Doc` objects.
drop (float): The droput rate.
optimizer (callable): An optimizer.
RETURNS loss: A float for the loss.
"""
(predictions, backprop) = model.begin_update(docs, drop=drop)
(loss, gradients) = get_vectors_loss(model.ops, docs, predictions, objective)
backprop(gradients, sgd=optimizer)
# Don't want to return a cupy object here
# The gradients are modified in-place by the BERT MLM,
# so we get an accurate loss
return float(loss) |
def _make_postfixes_1( analysis ):
''' Provides some post-fixes. '''
assert FORM in analysis, '(!) The input analysis does not contain "'+FORM+'" key.'
if 'neg' in analysis[FORM]:
analysis[FORM] = re.sub( '^\s*neg ([^,]*)$', '\\1 Neg', analysis[FORM] )
analysis[FORM] = re.sub( ' Neg Neg$', ' Neg', analysis[FORM] )
analysis[FORM] = re.sub( ' Aff Neg$', ' Neg', analysis[FORM] )
analysis[FORM] = re.sub( 'neg', 'Neg', analysis[FORM] )
analysis[FORM] = analysis[FORM].rstrip().lstrip()
assert 'neg' not in analysis[FORM], \
'(!) The label "neg" should be removed by now.'
assert 'Neg' not in analysis[FORM] or ('Neg' in analysis[FORM] and analysis[FORM].endswith('Neg')), \
'(!) The label "Neg" should end the analysis line: '+str(analysis[FORM])
return analysis | def function[_make_postfixes_1, parameter[analysis]]:
constant[ Provides some post-fixes. ]
assert[compare[name[FORM] in name[analysis]]]
if compare[constant[neg] in call[name[analysis]][name[FORM]]] begin[:]
call[name[analysis]][name[FORM]] assign[=] call[name[re].sub, parameter[constant[^\s*neg ([^,]*)$], constant[\1 Neg], call[name[analysis]][name[FORM]]]]
call[name[analysis]][name[FORM]] assign[=] call[name[re].sub, parameter[constant[ Neg Neg$], constant[ Neg], call[name[analysis]][name[FORM]]]]
call[name[analysis]][name[FORM]] assign[=] call[name[re].sub, parameter[constant[ Aff Neg$], constant[ Neg], call[name[analysis]][name[FORM]]]]
call[name[analysis]][name[FORM]] assign[=] call[name[re].sub, parameter[constant[neg], constant[Neg], call[name[analysis]][name[FORM]]]]
call[name[analysis]][name[FORM]] assign[=] call[call[call[name[analysis]][name[FORM]].rstrip, parameter[]].lstrip, parameter[]]
assert[compare[constant[neg] <ast.NotIn object at 0x7da2590d7190> call[name[analysis]][name[FORM]]]]
assert[<ast.BoolOp object at 0x7da18f09e4a0>]
return[name[analysis]] | keyword[def] identifier[_make_postfixes_1] ( identifier[analysis] ):
literal[string]
keyword[assert] identifier[FORM] keyword[in] identifier[analysis] , literal[string] + identifier[FORM] + literal[string]
keyword[if] literal[string] keyword[in] identifier[analysis] [ identifier[FORM] ]:
identifier[analysis] [ identifier[FORM] ]= identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[analysis] [ identifier[FORM] ])
identifier[analysis] [ identifier[FORM] ]= identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[analysis] [ identifier[FORM] ])
identifier[analysis] [ identifier[FORM] ]= identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[analysis] [ identifier[FORM] ])
identifier[analysis] [ identifier[FORM] ]= identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[analysis] [ identifier[FORM] ])
identifier[analysis] [ identifier[FORM] ]= identifier[analysis] [ identifier[FORM] ]. identifier[rstrip] (). identifier[lstrip] ()
keyword[assert] literal[string] keyword[not] keyword[in] identifier[analysis] [ identifier[FORM] ], literal[string]
keyword[assert] literal[string] keyword[not] keyword[in] identifier[analysis] [ identifier[FORM] ] keyword[or] ( literal[string] keyword[in] identifier[analysis] [ identifier[FORM] ] keyword[and] identifier[analysis] [ identifier[FORM] ]. identifier[endswith] ( literal[string] )), literal[string] + identifier[str] ( identifier[analysis] [ identifier[FORM] ])
keyword[return] identifier[analysis] | def _make_postfixes_1(analysis):
""" Provides some post-fixes. """
assert FORM in analysis, '(!) The input analysis does not contain "' + FORM + '" key.'
if 'neg' in analysis[FORM]:
analysis[FORM] = re.sub('^\\s*neg ([^,]*)$', '\\1 Neg', analysis[FORM]) # depends on [control=['if'], data=[]]
analysis[FORM] = re.sub(' Neg Neg$', ' Neg', analysis[FORM])
analysis[FORM] = re.sub(' Aff Neg$', ' Neg', analysis[FORM])
analysis[FORM] = re.sub('neg', 'Neg', analysis[FORM])
analysis[FORM] = analysis[FORM].rstrip().lstrip()
assert 'neg' not in analysis[FORM], '(!) The label "neg" should be removed by now.'
assert 'Neg' not in analysis[FORM] or ('Neg' in analysis[FORM] and analysis[FORM].endswith('Neg')), '(!) The label "Neg" should end the analysis line: ' + str(analysis[FORM])
return analysis |
def get_log_filehandle(context):
"""Open the log and error filehandles.
Args:
context (scriptworker.context.Context): the scriptworker context.
Yields:
log filehandle
"""
log_file_name = get_log_filename(context)
makedirs(context.config['task_log_dir'])
with open(log_file_name, "w", encoding="utf-8") as filehandle:
yield filehandle | def function[get_log_filehandle, parameter[context]]:
constant[Open the log and error filehandles.
Args:
context (scriptworker.context.Context): the scriptworker context.
Yields:
log filehandle
]
variable[log_file_name] assign[=] call[name[get_log_filename], parameter[name[context]]]
call[name[makedirs], parameter[call[name[context].config][constant[task_log_dir]]]]
with call[name[open], parameter[name[log_file_name], constant[w]]] begin[:]
<ast.Yield object at 0x7da1b0ea7c10> | keyword[def] identifier[get_log_filehandle] ( identifier[context] ):
literal[string]
identifier[log_file_name] = identifier[get_log_filename] ( identifier[context] )
identifier[makedirs] ( identifier[context] . identifier[config] [ literal[string] ])
keyword[with] identifier[open] ( identifier[log_file_name] , literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[filehandle] :
keyword[yield] identifier[filehandle] | def get_log_filehandle(context):
"""Open the log and error filehandles.
Args:
context (scriptworker.context.Context): the scriptworker context.
Yields:
log filehandle
"""
log_file_name = get_log_filename(context)
makedirs(context.config['task_log_dir'])
with open(log_file_name, 'w', encoding='utf-8') as filehandle:
yield filehandle # depends on [control=['with'], data=['filehandle']] |
def parse_data_shape(data_shape_str):
"""Parse string to tuple or int"""
ds = data_shape_str.strip().split(',')
if len(ds) == 1:
data_shape = (int(ds[0]), int(ds[0]))
elif len(ds) == 2:
data_shape = (int(ds[0]), int(ds[1]))
else:
raise ValueError("Unexpected data_shape: %s", data_shape_str)
return data_shape | def function[parse_data_shape, parameter[data_shape_str]]:
constant[Parse string to tuple or int]
variable[ds] assign[=] call[call[name[data_shape_str].strip, parameter[]].split, parameter[constant[,]]]
if compare[call[name[len], parameter[name[ds]]] equal[==] constant[1]] begin[:]
variable[data_shape] assign[=] tuple[[<ast.Call object at 0x7da1b2017dc0>, <ast.Call object at 0x7da1b2014580>]]
return[name[data_shape]] | keyword[def] identifier[parse_data_shape] ( identifier[data_shape_str] ):
literal[string]
identifier[ds] = identifier[data_shape_str] . identifier[strip] (). identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[ds] )== literal[int] :
identifier[data_shape] =( identifier[int] ( identifier[ds] [ literal[int] ]), identifier[int] ( identifier[ds] [ literal[int] ]))
keyword[elif] identifier[len] ( identifier[ds] )== literal[int] :
identifier[data_shape] =( identifier[int] ( identifier[ds] [ literal[int] ]), identifier[int] ( identifier[ds] [ literal[int] ]))
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] , identifier[data_shape_str] )
keyword[return] identifier[data_shape] | def parse_data_shape(data_shape_str):
"""Parse string to tuple or int"""
ds = data_shape_str.strip().split(',')
if len(ds) == 1:
data_shape = (int(ds[0]), int(ds[0])) # depends on [control=['if'], data=[]]
elif len(ds) == 2:
data_shape = (int(ds[0]), int(ds[1])) # depends on [control=['if'], data=[]]
else:
raise ValueError('Unexpected data_shape: %s', data_shape_str)
return data_shape |
def GetCacheValueByObject(self, vfs_object):
"""Retrieves the cache value for the cached object.
Args:
vfs_object (object): VFS object that was cached.
Returns:
tuple[str, ObjectsCacheValue]: identifier and cache value object or
(None, None) if not cached.
Raises:
RuntimeError: if the cache value is missing.
"""
for identifier, cache_value in iter(self._values.items()):
if not cache_value:
raise RuntimeError('Missing cache value.')
if cache_value.vfs_object == vfs_object:
return identifier, cache_value
return None, None | def function[GetCacheValueByObject, parameter[self, vfs_object]]:
constant[Retrieves the cache value for the cached object.
Args:
vfs_object (object): VFS object that was cached.
Returns:
tuple[str, ObjectsCacheValue]: identifier and cache value object or
(None, None) if not cached.
Raises:
RuntimeError: if the cache value is missing.
]
for taget[tuple[[<ast.Name object at 0x7da1b07a2200>, <ast.Name object at 0x7da1b07a3250>]]] in starred[call[name[iter], parameter[call[name[self]._values.items, parameter[]]]]] begin[:]
if <ast.UnaryOp object at 0x7da1b07a3220> begin[:]
<ast.Raise object at 0x7da1b07a1540>
if compare[name[cache_value].vfs_object equal[==] name[vfs_object]] begin[:]
return[tuple[[<ast.Name object at 0x7da1b07a0100>, <ast.Name object at 0x7da1b07a0250>]]]
return[tuple[[<ast.Constant object at 0x7da1b07a2230>, <ast.Constant object at 0x7da1b07a06a0>]]] | keyword[def] identifier[GetCacheValueByObject] ( identifier[self] , identifier[vfs_object] ):
literal[string]
keyword[for] identifier[identifier] , identifier[cache_value] keyword[in] identifier[iter] ( identifier[self] . identifier[_values] . identifier[items] ()):
keyword[if] keyword[not] identifier[cache_value] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
keyword[if] identifier[cache_value] . identifier[vfs_object] == identifier[vfs_object] :
keyword[return] identifier[identifier] , identifier[cache_value]
keyword[return] keyword[None] , keyword[None] | def GetCacheValueByObject(self, vfs_object):
"""Retrieves the cache value for the cached object.
Args:
vfs_object (object): VFS object that was cached.
Returns:
tuple[str, ObjectsCacheValue]: identifier and cache value object or
(None, None) if not cached.
Raises:
RuntimeError: if the cache value is missing.
"""
for (identifier, cache_value) in iter(self._values.items()):
if not cache_value:
raise RuntimeError('Missing cache value.') # depends on [control=['if'], data=[]]
if cache_value.vfs_object == vfs_object:
return (identifier, cache_value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return (None, None) |
def attend(x, source, hparams, name):
"""Self-attention layer with source as memory antecedent."""
with tf.variable_scope(name):
x = tf.squeeze(x, axis=2)
if len(source.get_shape()) > 3:
source = tf.squeeze(source, axis=2)
source = common_attention.add_timing_signal_1d(source)
y = common_attention.multihead_attention(
common_layers.layer_preprocess(x, hparams), source, None,
hparams.attention_key_channels or hparams.hidden_size,
hparams.attention_value_channels or hparams.hidden_size,
hparams.hidden_size, hparams.num_heads,
hparams.attention_dropout)
res = common_layers.layer_postprocess(x, y, hparams)
return tf.expand_dims(res, axis=2) | def function[attend, parameter[x, source, hparams, name]]:
constant[Self-attention layer with source as memory antecedent.]
with call[name[tf].variable_scope, parameter[name[name]]] begin[:]
variable[x] assign[=] call[name[tf].squeeze, parameter[name[x]]]
if compare[call[name[len], parameter[call[name[source].get_shape, parameter[]]]] greater[>] constant[3]] begin[:]
variable[source] assign[=] call[name[tf].squeeze, parameter[name[source]]]
variable[source] assign[=] call[name[common_attention].add_timing_signal_1d, parameter[name[source]]]
variable[y] assign[=] call[name[common_attention].multihead_attention, parameter[call[name[common_layers].layer_preprocess, parameter[name[x], name[hparams]]], name[source], constant[None], <ast.BoolOp object at 0x7da1b201d630>, <ast.BoolOp object at 0x7da1b201e740>, name[hparams].hidden_size, name[hparams].num_heads, name[hparams].attention_dropout]]
variable[res] assign[=] call[name[common_layers].layer_postprocess, parameter[name[x], name[y], name[hparams]]]
return[call[name[tf].expand_dims, parameter[name[res]]]] | keyword[def] identifier[attend] ( identifier[x] , identifier[source] , identifier[hparams] , identifier[name] ):
literal[string]
keyword[with] identifier[tf] . identifier[variable_scope] ( identifier[name] ):
identifier[x] = identifier[tf] . identifier[squeeze] ( identifier[x] , identifier[axis] = literal[int] )
keyword[if] identifier[len] ( identifier[source] . identifier[get_shape] ())> literal[int] :
identifier[source] = identifier[tf] . identifier[squeeze] ( identifier[source] , identifier[axis] = literal[int] )
identifier[source] = identifier[common_attention] . identifier[add_timing_signal_1d] ( identifier[source] )
identifier[y] = identifier[common_attention] . identifier[multihead_attention] (
identifier[common_layers] . identifier[layer_preprocess] ( identifier[x] , identifier[hparams] ), identifier[source] , keyword[None] ,
identifier[hparams] . identifier[attention_key_channels] keyword[or] identifier[hparams] . identifier[hidden_size] ,
identifier[hparams] . identifier[attention_value_channels] keyword[or] identifier[hparams] . identifier[hidden_size] ,
identifier[hparams] . identifier[hidden_size] , identifier[hparams] . identifier[num_heads] ,
identifier[hparams] . identifier[attention_dropout] )
identifier[res] = identifier[common_layers] . identifier[layer_postprocess] ( identifier[x] , identifier[y] , identifier[hparams] )
keyword[return] identifier[tf] . identifier[expand_dims] ( identifier[res] , identifier[axis] = literal[int] ) | def attend(x, source, hparams, name):
"""Self-attention layer with source as memory antecedent."""
with tf.variable_scope(name):
x = tf.squeeze(x, axis=2)
if len(source.get_shape()) > 3:
source = tf.squeeze(source, axis=2) # depends on [control=['if'], data=[]]
source = common_attention.add_timing_signal_1d(source)
y = common_attention.multihead_attention(common_layers.layer_preprocess(x, hparams), source, None, hparams.attention_key_channels or hparams.hidden_size, hparams.attention_value_channels or hparams.hidden_size, hparams.hidden_size, hparams.num_heads, hparams.attention_dropout)
res = common_layers.layer_postprocess(x, y, hparams)
return tf.expand_dims(res, axis=2) # depends on [control=['with'], data=[]] |
def makelink(self, tarinfo, targetpath):
"""Make a (symbolic) link called targetpath. If it cannot be created
(platform limitation), we try to make a copy of the referenced file
instead of a link.
"""
try:
# For systems that support symbolic and hard links.
if tarinfo.issym():
os.symlink(tarinfo.linkname, targetpath)
else:
# See extract().
if os.path.exists(tarinfo._link_target):
os.link(tarinfo._link_target, targetpath)
else:
self._extract_member(self._find_link_target(tarinfo),
targetpath)
except symlink_exception:
if tarinfo.issym():
linkpath = os.path.join(os.path.dirname(tarinfo.name),
tarinfo.linkname)
else:
linkpath = tarinfo.linkname
else:
try:
self._extract_member(self._find_link_target(tarinfo),
targetpath)
except KeyError:
raise ExtractError("unable to resolve link inside archive") | def function[makelink, parameter[self, tarinfo, targetpath]]:
constant[Make a (symbolic) link called targetpath. If it cannot be created
(platform limitation), we try to make a copy of the referenced file
instead of a link.
]
<ast.Try object at 0x7da1b20647c0> | keyword[def] identifier[makelink] ( identifier[self] , identifier[tarinfo] , identifier[targetpath] ):
literal[string]
keyword[try] :
keyword[if] identifier[tarinfo] . identifier[issym] ():
identifier[os] . identifier[symlink] ( identifier[tarinfo] . identifier[linkname] , identifier[targetpath] )
keyword[else] :
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[tarinfo] . identifier[_link_target] ):
identifier[os] . identifier[link] ( identifier[tarinfo] . identifier[_link_target] , identifier[targetpath] )
keyword[else] :
identifier[self] . identifier[_extract_member] ( identifier[self] . identifier[_find_link_target] ( identifier[tarinfo] ),
identifier[targetpath] )
keyword[except] identifier[symlink_exception] :
keyword[if] identifier[tarinfo] . identifier[issym] ():
identifier[linkpath] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[tarinfo] . identifier[name] ),
identifier[tarinfo] . identifier[linkname] )
keyword[else] :
identifier[linkpath] = identifier[tarinfo] . identifier[linkname]
keyword[else] :
keyword[try] :
identifier[self] . identifier[_extract_member] ( identifier[self] . identifier[_find_link_target] ( identifier[tarinfo] ),
identifier[targetpath] )
keyword[except] identifier[KeyError] :
keyword[raise] identifier[ExtractError] ( literal[string] ) | def makelink(self, tarinfo, targetpath):
"""Make a (symbolic) link called targetpath. If it cannot be created
(platform limitation), we try to make a copy of the referenced file
instead of a link.
"""
try:
# For systems that support symbolic and hard links.
if tarinfo.issym():
os.symlink(tarinfo.linkname, targetpath) # depends on [control=['if'], data=[]]
# See extract().
elif os.path.exists(tarinfo._link_target):
os.link(tarinfo._link_target, targetpath) # depends on [control=['if'], data=[]]
else:
self._extract_member(self._find_link_target(tarinfo), targetpath) # depends on [control=['try'], data=[]]
except symlink_exception:
if tarinfo.issym():
linkpath = os.path.join(os.path.dirname(tarinfo.name), tarinfo.linkname) # depends on [control=['if'], data=[]]
else:
linkpath = tarinfo.linkname # depends on [control=['except'], data=[]]
else:
try:
self._extract_member(self._find_link_target(tarinfo), targetpath) # depends on [control=['try'], data=[]]
except KeyError:
raise ExtractError('unable to resolve link inside archive') # depends on [control=['except'], data=[]] |
def competence(stochastic):
"""
The competence function for Binary One-At-A-Time Metropolis
"""
if stochastic.dtype in bool_dtypes:
return 2
elif isinstance(stochastic, distributions.Bernoulli):
return 2
elif (isinstance(stochastic, distributions.Categorical) and
(len(stochastic.parents['p'])==2)):
return 2
else:
return 0 | def function[competence, parameter[stochastic]]:
constant[
The competence function for Binary One-At-A-Time Metropolis
]
if compare[name[stochastic].dtype in name[bool_dtypes]] begin[:]
return[constant[2]] | keyword[def] identifier[competence] ( identifier[stochastic] ):
literal[string]
keyword[if] identifier[stochastic] . identifier[dtype] keyword[in] identifier[bool_dtypes] :
keyword[return] literal[int]
keyword[elif] identifier[isinstance] ( identifier[stochastic] , identifier[distributions] . identifier[Bernoulli] ):
keyword[return] literal[int]
keyword[elif] ( identifier[isinstance] ( identifier[stochastic] , identifier[distributions] . identifier[Categorical] ) keyword[and]
( identifier[len] ( identifier[stochastic] . identifier[parents] [ literal[string] ])== literal[int] )):
keyword[return] literal[int]
keyword[else] :
keyword[return] literal[int] | def competence(stochastic):
"""
The competence function for Binary One-At-A-Time Metropolis
"""
if stochastic.dtype in bool_dtypes:
return 2 # depends on [control=['if'], data=[]]
elif isinstance(stochastic, distributions.Bernoulli):
return 2 # depends on [control=['if'], data=[]]
elif isinstance(stochastic, distributions.Categorical) and len(stochastic.parents['p']) == 2:
return 2 # depends on [control=['if'], data=[]]
else:
return 0 |
def productForm(self, request, tag):
"""
Render a L{liveform.LiveForm} -- the main purpose of this fragment --
which will allow the administrator to endow or deprive existing users
using Products.
"""
def makeRemover(i):
def remover(s3lected):
if s3lected:
return self.products[i]
return None
return remover
f = liveform.LiveForm(
self._endow,
[liveform.Parameter(
'products' + str(i),
liveform.FORM_INPUT,
liveform.LiveForm(
makeRemover(i),
[liveform.Parameter(
's3lected',
liveform.RADIO_INPUT,
bool,
repr(p),
)],
'',
),
)
for (i, p)
in enumerate(self.products)],
self.which.capitalize() + u' ' + self.username)
f.setFragmentParent(self)
return f | def function[productForm, parameter[self, request, tag]]:
constant[
Render a L{liveform.LiveForm} -- the main purpose of this fragment --
which will allow the administrator to endow or deprive existing users
using Products.
]
def function[makeRemover, parameter[i]]:
def function[remover, parameter[s3lected]]:
if name[s3lected] begin[:]
return[call[name[self].products][name[i]]]
return[constant[None]]
return[name[remover]]
variable[f] assign[=] call[name[liveform].LiveForm, parameter[name[self]._endow, <ast.ListComp object at 0x7da1b0b82440>, binary_operation[binary_operation[call[name[self].which.capitalize, parameter[]] + constant[ ]] + name[self].username]]]
call[name[f].setFragmentParent, parameter[name[self]]]
return[name[f]] | keyword[def] identifier[productForm] ( identifier[self] , identifier[request] , identifier[tag] ):
literal[string]
keyword[def] identifier[makeRemover] ( identifier[i] ):
keyword[def] identifier[remover] ( identifier[s3lected] ):
keyword[if] identifier[s3lected] :
keyword[return] identifier[self] . identifier[products] [ identifier[i] ]
keyword[return] keyword[None]
keyword[return] identifier[remover]
identifier[f] = identifier[liveform] . identifier[LiveForm] (
identifier[self] . identifier[_endow] ,
[ identifier[liveform] . identifier[Parameter] (
literal[string] + identifier[str] ( identifier[i] ),
identifier[liveform] . identifier[FORM_INPUT] ,
identifier[liveform] . identifier[LiveForm] (
identifier[makeRemover] ( identifier[i] ),
[ identifier[liveform] . identifier[Parameter] (
literal[string] ,
identifier[liveform] . identifier[RADIO_INPUT] ,
identifier[bool] ,
identifier[repr] ( identifier[p] ),
)],
literal[string] ,
),
)
keyword[for] ( identifier[i] , identifier[p] )
keyword[in] identifier[enumerate] ( identifier[self] . identifier[products] )],
identifier[self] . identifier[which] . identifier[capitalize] ()+ literal[string] + identifier[self] . identifier[username] )
identifier[f] . identifier[setFragmentParent] ( identifier[self] )
keyword[return] identifier[f] | def productForm(self, request, tag):
"""
Render a L{liveform.LiveForm} -- the main purpose of this fragment --
which will allow the administrator to endow or deprive existing users
using Products.
"""
def makeRemover(i):
def remover(s3lected):
if s3lected:
return self.products[i] # depends on [control=['if'], data=[]]
return None
return remover
f = liveform.LiveForm(self._endow, [liveform.Parameter('products' + str(i), liveform.FORM_INPUT, liveform.LiveForm(makeRemover(i), [liveform.Parameter('s3lected', liveform.RADIO_INPUT, bool, repr(p))], '')) for (i, p) in enumerate(self.products)], self.which.capitalize() + u' ' + self.username)
f.setFragmentParent(self)
return f |
def message(self, text):
"""Write text to display. Note that text can include newlines."""
line = 0
# Iterate through each character.
for char in text:
# Advance to next line if character is a new line.
if char == '\n':
line += 1
# Move to left or right side depending on text direction.
col = 0 if self.displaymode & LCD_ENTRYLEFT > 0 else self._cols-1
self.set_cursor(col, line)
# Write the character to the display.
else:
self.write8(ord(char), True) | def function[message, parameter[self, text]]:
constant[Write text to display. Note that text can include newlines.]
variable[line] assign[=] constant[0]
for taget[name[char]] in starred[name[text]] begin[:]
if compare[name[char] equal[==] constant[
]] begin[:]
<ast.AugAssign object at 0x7da1b113f1f0>
variable[col] assign[=] <ast.IfExp object at 0x7da1b113ffd0>
call[name[self].set_cursor, parameter[name[col], name[line]]] | keyword[def] identifier[message] ( identifier[self] , identifier[text] ):
literal[string]
identifier[line] = literal[int]
keyword[for] identifier[char] keyword[in] identifier[text] :
keyword[if] identifier[char] == literal[string] :
identifier[line] += literal[int]
identifier[col] = literal[int] keyword[if] identifier[self] . identifier[displaymode] & identifier[LCD_ENTRYLEFT] > literal[int] keyword[else] identifier[self] . identifier[_cols] - literal[int]
identifier[self] . identifier[set_cursor] ( identifier[col] , identifier[line] )
keyword[else] :
identifier[self] . identifier[write8] ( identifier[ord] ( identifier[char] ), keyword[True] ) | def message(self, text):
"""Write text to display. Note that text can include newlines."""
line = 0
# Iterate through each character.
for char in text:
# Advance to next line if character is a new line.
if char == '\n':
line += 1
# Move to left or right side depending on text direction.
col = 0 if self.displaymode & LCD_ENTRYLEFT > 0 else self._cols - 1
self.set_cursor(col, line) # depends on [control=['if'], data=[]]
else:
# Write the character to the display.
self.write8(ord(char), True) # depends on [control=['for'], data=['char']] |
def lsbranch(self, astr_path=""):
"""
Print/return the set of nodes in current branch
"""
self.sCore.reset()
str_cwd = self.cwd()
if len(astr_path): self.cdnode(astr_path)
self.sCore.write('%s' % self.sbranch_current.dict_branch.keys())
str_ls = self.sCore.strget()
if len(astr_path): self.cdnode(str_cwd)
return str_ls | def function[lsbranch, parameter[self, astr_path]]:
constant[
Print/return the set of nodes in current branch
]
call[name[self].sCore.reset, parameter[]]
variable[str_cwd] assign[=] call[name[self].cwd, parameter[]]
if call[name[len], parameter[name[astr_path]]] begin[:]
call[name[self].cdnode, parameter[name[astr_path]]]
call[name[self].sCore.write, parameter[binary_operation[constant[%s] <ast.Mod object at 0x7da2590d6920> call[name[self].sbranch_current.dict_branch.keys, parameter[]]]]]
variable[str_ls] assign[=] call[name[self].sCore.strget, parameter[]]
if call[name[len], parameter[name[astr_path]]] begin[:]
call[name[self].cdnode, parameter[name[str_cwd]]]
return[name[str_ls]] | keyword[def] identifier[lsbranch] ( identifier[self] , identifier[astr_path] = literal[string] ):
literal[string]
identifier[self] . identifier[sCore] . identifier[reset] ()
identifier[str_cwd] = identifier[self] . identifier[cwd] ()
keyword[if] identifier[len] ( identifier[astr_path] ): identifier[self] . identifier[cdnode] ( identifier[astr_path] )
identifier[self] . identifier[sCore] . identifier[write] ( literal[string] % identifier[self] . identifier[sbranch_current] . identifier[dict_branch] . identifier[keys] ())
identifier[str_ls] = identifier[self] . identifier[sCore] . identifier[strget] ()
keyword[if] identifier[len] ( identifier[astr_path] ): identifier[self] . identifier[cdnode] ( identifier[str_cwd] )
keyword[return] identifier[str_ls] | def lsbranch(self, astr_path=''):
"""
Print/return the set of nodes in current branch
"""
self.sCore.reset()
str_cwd = self.cwd()
if len(astr_path):
self.cdnode(astr_path) # depends on [control=['if'], data=[]]
self.sCore.write('%s' % self.sbranch_current.dict_branch.keys())
str_ls = self.sCore.strget()
if len(astr_path):
self.cdnode(str_cwd) # depends on [control=['if'], data=[]]
return str_ls |
def _set_icmpv6(self, v, load=False):
"""
Setter method for icmpv6, mapped from YANG variable /interface/management/ipv6/icmpv6 (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_icmpv6 is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_icmpv6() directly.
YANG Description: The ICMPv6 control for this management interface.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=icmpv6.icmpv6, is_container='container', presence=False, yang_name="icmpv6", rest_name="icmpv6", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'The ICMPv6 control for this management interface.'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """icmpv6 must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=icmpv6.icmpv6, is_container='container', presence=False, yang_name="icmpv6", rest_name="icmpv6", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'The ICMPv6 control for this management interface.'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)""",
})
self.__icmpv6 = t
if hasattr(self, '_set'):
self._set() | def function[_set_icmpv6, parameter[self, v, load]]:
constant[
Setter method for icmpv6, mapped from YANG variable /interface/management/ipv6/icmpv6 (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_icmpv6 is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_icmpv6() directly.
YANG Description: The ICMPv6 control for this management interface.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da1b2596350>
name[self].__icmpv6 assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_icmpv6] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[icmpv6] . identifier[icmpv6] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__icmpv6] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_icmpv6(self, v, load=False):
"""
Setter method for icmpv6, mapped from YANG variable /interface/management/ipv6/icmpv6 (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_icmpv6 is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_icmpv6() directly.
YANG Description: The ICMPv6 control for this management interface.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=icmpv6.icmpv6, is_container='container', presence=False, yang_name='icmpv6', rest_name='icmpv6', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'The ICMPv6 control for this management interface.'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'icmpv6 must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=icmpv6.icmpv6, is_container=\'container\', presence=False, yang_name="icmpv6", rest_name="icmpv6", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'The ICMPv6 control for this management interface.\'}}, namespace=\'urn:brocade.com:mgmt:brocade-interface\', defining_module=\'brocade-interface\', yang_type=\'container\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__icmpv6 = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def recombine(self, other, d=0.7):
"""
Genetic recombination of two themes using cut and splice technique.
"""
a, b = self, other
d1 = max(0, min(d, 1))
d2 = d1
c = ColorTheme(
name=a.name[:int(len(a.name) * d1)] +
b.name[int(len(b.name) * d2):],
ranges=a.ranges[:int(len(a.ranges) * d1)] +
b.ranges[int(len(b.ranges) * d2):],
top=a.top,
cache=os.path.join(DEFAULT_CACHE, "recombined"),
blue=a.blue,
length=a.length * d1 + b.length * d2
)
c.tags = a.tags[:int(len(a.tags) * d1)]
c.tags += b.tags[int(len(b.tags) * d2):]
return c | def function[recombine, parameter[self, other, d]]:
constant[
Genetic recombination of two themes using cut and splice technique.
]
<ast.Tuple object at 0x7da2041dba30> assign[=] tuple[[<ast.Name object at 0x7da2041d8a60>, <ast.Name object at 0x7da2041d8dc0>]]
variable[d1] assign[=] call[name[max], parameter[constant[0], call[name[min], parameter[name[d], constant[1]]]]]
variable[d2] assign[=] name[d1]
variable[c] assign[=] call[name[ColorTheme], parameter[]]
name[c].tags assign[=] call[name[a].tags][<ast.Slice object at 0x7da1b2346650>]
<ast.AugAssign object at 0x7da1b2344220>
return[name[c]] | keyword[def] identifier[recombine] ( identifier[self] , identifier[other] , identifier[d] = literal[int] ):
literal[string]
identifier[a] , identifier[b] = identifier[self] , identifier[other]
identifier[d1] = identifier[max] ( literal[int] , identifier[min] ( identifier[d] , literal[int] ))
identifier[d2] = identifier[d1]
identifier[c] = identifier[ColorTheme] (
identifier[name] = identifier[a] . identifier[name] [: identifier[int] ( identifier[len] ( identifier[a] . identifier[name] )* identifier[d1] )]+
identifier[b] . identifier[name] [ identifier[int] ( identifier[len] ( identifier[b] . identifier[name] )* identifier[d2] ):],
identifier[ranges] = identifier[a] . identifier[ranges] [: identifier[int] ( identifier[len] ( identifier[a] . identifier[ranges] )* identifier[d1] )]+
identifier[b] . identifier[ranges] [ identifier[int] ( identifier[len] ( identifier[b] . identifier[ranges] )* identifier[d2] ):],
identifier[top] = identifier[a] . identifier[top] ,
identifier[cache] = identifier[os] . identifier[path] . identifier[join] ( identifier[DEFAULT_CACHE] , literal[string] ),
identifier[blue] = identifier[a] . identifier[blue] ,
identifier[length] = identifier[a] . identifier[length] * identifier[d1] + identifier[b] . identifier[length] * identifier[d2]
)
identifier[c] . identifier[tags] = identifier[a] . identifier[tags] [: identifier[int] ( identifier[len] ( identifier[a] . identifier[tags] )* identifier[d1] )]
identifier[c] . identifier[tags] += identifier[b] . identifier[tags] [ identifier[int] ( identifier[len] ( identifier[b] . identifier[tags] )* identifier[d2] ):]
keyword[return] identifier[c] | def recombine(self, other, d=0.7):
"""
Genetic recombination of two themes using cut and splice technique.
"""
(a, b) = (self, other)
d1 = max(0, min(d, 1))
d2 = d1
c = ColorTheme(name=a.name[:int(len(a.name) * d1)] + b.name[int(len(b.name) * d2):], ranges=a.ranges[:int(len(a.ranges) * d1)] + b.ranges[int(len(b.ranges) * d2):], top=a.top, cache=os.path.join(DEFAULT_CACHE, 'recombined'), blue=a.blue, length=a.length * d1 + b.length * d2)
c.tags = a.tags[:int(len(a.tags) * d1)]
c.tags += b.tags[int(len(b.tags) * d2):]
return c |
def filter(self):
"""
Apply the criteria to filter out on the metrics required
"""
if self.filter_expression is not None:
new_metrics = []
metrics = self.metrics['result']
for m in metrics:
if self.filter_expression.search(m['name']):
new_metrics.append(m)
else:
new_metrics = self.metrics['result']
self.metrics = self.extract_dictionary(new_metrics) | def function[filter, parameter[self]]:
constant[
Apply the criteria to filter out on the metrics required
]
if compare[name[self].filter_expression is_not constant[None]] begin[:]
variable[new_metrics] assign[=] list[[]]
variable[metrics] assign[=] call[name[self].metrics][constant[result]]
for taget[name[m]] in starred[name[metrics]] begin[:]
if call[name[self].filter_expression.search, parameter[call[name[m]][constant[name]]]] begin[:]
call[name[new_metrics].append, parameter[name[m]]]
name[self].metrics assign[=] call[name[self].extract_dictionary, parameter[name[new_metrics]]] | keyword[def] identifier[filter] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[filter_expression] keyword[is] keyword[not] keyword[None] :
identifier[new_metrics] =[]
identifier[metrics] = identifier[self] . identifier[metrics] [ literal[string] ]
keyword[for] identifier[m] keyword[in] identifier[metrics] :
keyword[if] identifier[self] . identifier[filter_expression] . identifier[search] ( identifier[m] [ literal[string] ]):
identifier[new_metrics] . identifier[append] ( identifier[m] )
keyword[else] :
identifier[new_metrics] = identifier[self] . identifier[metrics] [ literal[string] ]
identifier[self] . identifier[metrics] = identifier[self] . identifier[extract_dictionary] ( identifier[new_metrics] ) | def filter(self):
"""
Apply the criteria to filter out on the metrics required
"""
if self.filter_expression is not None:
new_metrics = []
metrics = self.metrics['result']
for m in metrics:
if self.filter_expression.search(m['name']):
new_metrics.append(m) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['m']] # depends on [control=['if'], data=[]]
else:
new_metrics = self.metrics['result']
self.metrics = self.extract_dictionary(new_metrics) |
def remove_elements_with_source(source, field):
"""Remove all elements matching ``source`` in ``field``."""
return freeze(
[element for element in field if element.get('source', '').lower() != source]
) | def function[remove_elements_with_source, parameter[source, field]]:
constant[Remove all elements matching ``source`` in ``field``.]
return[call[name[freeze], parameter[<ast.ListComp object at 0x7da1b09ba500>]]] | keyword[def] identifier[remove_elements_with_source] ( identifier[source] , identifier[field] ):
literal[string]
keyword[return] identifier[freeze] (
[ identifier[element] keyword[for] identifier[element] keyword[in] identifier[field] keyword[if] identifier[element] . identifier[get] ( literal[string] , literal[string] ). identifier[lower] ()!= identifier[source] ]
) | def remove_elements_with_source(source, field):
"""Remove all elements matching ``source`` in ``field``."""
return freeze([element for element in field if element.get('source', '').lower() != source]) |
def cp_cropduster_image(self, the_image_path, del_after_upload=False, overwrite=False, invalidate=False):
"""
Deal with saving cropduster images to S3. Cropduster is a Django library for resizing editorial images.
S3utils was originally written to put cropduster images on S3 bucket.
Extra Items in your Django Settings
-----------------------------------
MEDIA_ROOT : string
Django media root.
Currently it is ONLY used in cp_cropduster_image method.
NOT any other method as this library was originally made to put Django cropduster images on s3 bucket.
S3_ROOT_BASE : string
S3 media root base. This will be the root folder in S3.
Currently it is ONLY used in cp_cropduster_image method.
NOT any other method as this library was originally made to put Django cropduster images on s3 bucket.
"""
local_file = os.path.join(settings.MEDIA_ROOT, the_image_path)
# only try to upload things if the origin cropduster file exists (so it is not already uploaded to the CDN)
if os.path.exists(local_file):
the_image_crops_path = os.path.splitext(the_image_path)[0]
the_image_crops_path_full_path = os.path.join(settings.MEDIA_ROOT, the_image_crops_path)
self.cp(local_path=local_file,
target_path=os.path.join(settings.S3_ROOT_BASE, the_image_path),
del_after_upload=del_after_upload,
overwrite=overwrite,
invalidate=invalidate,
)
self.cp(local_path=the_image_crops_path_full_path + "/*",
target_path=os.path.join(settings.S3_ROOT_BASE, the_image_crops_path),
del_after_upload=del_after_upload,
overwrite=overwrite,
invalidate=invalidate,
) | def function[cp_cropduster_image, parameter[self, the_image_path, del_after_upload, overwrite, invalidate]]:
constant[
Deal with saving cropduster images to S3. Cropduster is a Django library for resizing editorial images.
S3utils was originally written to put cropduster images on S3 bucket.
Extra Items in your Django Settings
-----------------------------------
MEDIA_ROOT : string
Django media root.
Currently it is ONLY used in cp_cropduster_image method.
NOT any other method as this library was originally made to put Django cropduster images on s3 bucket.
S3_ROOT_BASE : string
S3 media root base. This will be the root folder in S3.
Currently it is ONLY used in cp_cropduster_image method.
NOT any other method as this library was originally made to put Django cropduster images on s3 bucket.
]
variable[local_file] assign[=] call[name[os].path.join, parameter[name[settings].MEDIA_ROOT, name[the_image_path]]]
if call[name[os].path.exists, parameter[name[local_file]]] begin[:]
variable[the_image_crops_path] assign[=] call[call[name[os].path.splitext, parameter[name[the_image_path]]]][constant[0]]
variable[the_image_crops_path_full_path] assign[=] call[name[os].path.join, parameter[name[settings].MEDIA_ROOT, name[the_image_crops_path]]]
call[name[self].cp, parameter[]]
call[name[self].cp, parameter[]] | keyword[def] identifier[cp_cropduster_image] ( identifier[self] , identifier[the_image_path] , identifier[del_after_upload] = keyword[False] , identifier[overwrite] = keyword[False] , identifier[invalidate] = keyword[False] ):
literal[string]
identifier[local_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[settings] . identifier[MEDIA_ROOT] , identifier[the_image_path] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[local_file] ):
identifier[the_image_crops_path] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[the_image_path] )[ literal[int] ]
identifier[the_image_crops_path_full_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[settings] . identifier[MEDIA_ROOT] , identifier[the_image_crops_path] )
identifier[self] . identifier[cp] ( identifier[local_path] = identifier[local_file] ,
identifier[target_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[settings] . identifier[S3_ROOT_BASE] , identifier[the_image_path] ),
identifier[del_after_upload] = identifier[del_after_upload] ,
identifier[overwrite] = identifier[overwrite] ,
identifier[invalidate] = identifier[invalidate] ,
)
identifier[self] . identifier[cp] ( identifier[local_path] = identifier[the_image_crops_path_full_path] + literal[string] ,
identifier[target_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[settings] . identifier[S3_ROOT_BASE] , identifier[the_image_crops_path] ),
identifier[del_after_upload] = identifier[del_after_upload] ,
identifier[overwrite] = identifier[overwrite] ,
identifier[invalidate] = identifier[invalidate] ,
) | def cp_cropduster_image(self, the_image_path, del_after_upload=False, overwrite=False, invalidate=False):
"""
Deal with saving cropduster images to S3. Cropduster is a Django library for resizing editorial images.
S3utils was originally written to put cropduster images on S3 bucket.
Extra Items in your Django Settings
-----------------------------------
MEDIA_ROOT : string
Django media root.
Currently it is ONLY used in cp_cropduster_image method.
NOT any other method as this library was originally made to put Django cropduster images on s3 bucket.
S3_ROOT_BASE : string
S3 media root base. This will be the root folder in S3.
Currently it is ONLY used in cp_cropduster_image method.
NOT any other method as this library was originally made to put Django cropduster images on s3 bucket.
"""
local_file = os.path.join(settings.MEDIA_ROOT, the_image_path)
# only try to upload things if the origin cropduster file exists (so it is not already uploaded to the CDN)
if os.path.exists(local_file):
the_image_crops_path = os.path.splitext(the_image_path)[0]
the_image_crops_path_full_path = os.path.join(settings.MEDIA_ROOT, the_image_crops_path)
self.cp(local_path=local_file, target_path=os.path.join(settings.S3_ROOT_BASE, the_image_path), del_after_upload=del_after_upload, overwrite=overwrite, invalidate=invalidate)
self.cp(local_path=the_image_crops_path_full_path + '/*', target_path=os.path.join(settings.S3_ROOT_BASE, the_image_crops_path), del_after_upload=del_after_upload, overwrite=overwrite, invalidate=invalidate) # depends on [control=['if'], data=[]] |
def _download_chunk(self, chunk_offset, chunk_size):
"""Reads or downloads the received blob from the system."""
range_id = 'bytes={0}-{1}'.format(
chunk_offset, chunk_offset + chunk_size - 1)
return self._blob_service.get_blob(
container_name=self._container_name,
blob_name=self._blob_name,
x_ms_range=range_id) | def function[_download_chunk, parameter[self, chunk_offset, chunk_size]]:
constant[Reads or downloads the received blob from the system.]
variable[range_id] assign[=] call[constant[bytes={0}-{1}].format, parameter[name[chunk_offset], binary_operation[binary_operation[name[chunk_offset] + name[chunk_size]] - constant[1]]]]
return[call[name[self]._blob_service.get_blob, parameter[]]] | keyword[def] identifier[_download_chunk] ( identifier[self] , identifier[chunk_offset] , identifier[chunk_size] ):
literal[string]
identifier[range_id] = literal[string] . identifier[format] (
identifier[chunk_offset] , identifier[chunk_offset] + identifier[chunk_size] - literal[int] )
keyword[return] identifier[self] . identifier[_blob_service] . identifier[get_blob] (
identifier[container_name] = identifier[self] . identifier[_container_name] ,
identifier[blob_name] = identifier[self] . identifier[_blob_name] ,
identifier[x_ms_range] = identifier[range_id] ) | def _download_chunk(self, chunk_offset, chunk_size):
"""Reads or downloads the received blob from the system."""
range_id = 'bytes={0}-{1}'.format(chunk_offset, chunk_offset + chunk_size - 1)
return self._blob_service.get_blob(container_name=self._container_name, blob_name=self._blob_name, x_ms_range=range_id) |
def return_an_error(*args):
'''List of errors
Put all errors into a list of errors
ref: http://jsonapi.org/format/#errors
Args:
*args: A tuple contain errors
Returns:
A dictionary contains a list of errors
'''
list_errors = []
list_errors.extend(list(args))
errors = { 'errors': list_errors }
return errors | def function[return_an_error, parameter[]]:
constant[List of errors
Put all errors into a list of errors
ref: http://jsonapi.org/format/#errors
Args:
*args: A tuple contain errors
Returns:
A dictionary contains a list of errors
]
variable[list_errors] assign[=] list[[]]
call[name[list_errors].extend, parameter[call[name[list], parameter[name[args]]]]]
variable[errors] assign[=] dictionary[[<ast.Constant object at 0x7da1b0b805b0>], [<ast.Name object at 0x7da1b0b83b50>]]
return[name[errors]] | keyword[def] identifier[return_an_error] (* identifier[args] ):
literal[string]
identifier[list_errors] =[]
identifier[list_errors] . identifier[extend] ( identifier[list] ( identifier[args] ))
identifier[errors] ={ literal[string] : identifier[list_errors] }
keyword[return] identifier[errors] | def return_an_error(*args):
"""List of errors
Put all errors into a list of errors
ref: http://jsonapi.org/format/#errors
Args:
*args: A tuple contain errors
Returns:
A dictionary contains a list of errors
"""
list_errors = []
list_errors.extend(list(args))
errors = {'errors': list_errors}
return errors |
def context_export(zap_helper, name, file_path):
"""Export a given context to a file."""
with zap_error_handler():
result = zap_helper.zap.context.export_context(name, file_path)
if result != 'OK':
raise ZAPError('Exporting context to file failed: {}'.format(result))
console.info('Exported context {0} to {1}'.format(name, file_path)) | def function[context_export, parameter[zap_helper, name, file_path]]:
constant[Export a given context to a file.]
with call[name[zap_error_handler], parameter[]] begin[:]
variable[result] assign[=] call[name[zap_helper].zap.context.export_context, parameter[name[name], name[file_path]]]
if compare[name[result] not_equal[!=] constant[OK]] begin[:]
<ast.Raise object at 0x7da20e9b1570>
call[name[console].info, parameter[call[constant[Exported context {0} to {1}].format, parameter[name[name], name[file_path]]]]] | keyword[def] identifier[context_export] ( identifier[zap_helper] , identifier[name] , identifier[file_path] ):
literal[string]
keyword[with] identifier[zap_error_handler] ():
identifier[result] = identifier[zap_helper] . identifier[zap] . identifier[context] . identifier[export_context] ( identifier[name] , identifier[file_path] )
keyword[if] identifier[result] != literal[string] :
keyword[raise] identifier[ZAPError] ( literal[string] . identifier[format] ( identifier[result] ))
identifier[console] . identifier[info] ( literal[string] . identifier[format] ( identifier[name] , identifier[file_path] )) | def context_export(zap_helper, name, file_path):
"""Export a given context to a file."""
with zap_error_handler():
result = zap_helper.zap.context.export_context(name, file_path)
if result != 'OK':
raise ZAPError('Exporting context to file failed: {}'.format(result)) # depends on [control=['if'], data=['result']] # depends on [control=['with'], data=[]]
console.info('Exported context {0} to {1}'.format(name, file_path)) |
def point_line_distance(p, l_p, l_v):
'''Calculate the distance between a point and a line defined
by a point and a direction vector.
'''
l_v = normalize(l_v)
u = p - l_p
return np.linalg.norm(u - np.dot(u, l_v) * l_v) | def function[point_line_distance, parameter[p, l_p, l_v]]:
constant[Calculate the distance between a point and a line defined
by a point and a direction vector.
]
variable[l_v] assign[=] call[name[normalize], parameter[name[l_v]]]
variable[u] assign[=] binary_operation[name[p] - name[l_p]]
return[call[name[np].linalg.norm, parameter[binary_operation[name[u] - binary_operation[call[name[np].dot, parameter[name[u], name[l_v]]] * name[l_v]]]]]] | keyword[def] identifier[point_line_distance] ( identifier[p] , identifier[l_p] , identifier[l_v] ):
literal[string]
identifier[l_v] = identifier[normalize] ( identifier[l_v] )
identifier[u] = identifier[p] - identifier[l_p]
keyword[return] identifier[np] . identifier[linalg] . identifier[norm] ( identifier[u] - identifier[np] . identifier[dot] ( identifier[u] , identifier[l_v] )* identifier[l_v] ) | def point_line_distance(p, l_p, l_v):
"""Calculate the distance between a point and a line defined
by a point and a direction vector.
"""
l_v = normalize(l_v)
u = p - l_p
return np.linalg.norm(u - np.dot(u, l_v) * l_v) |
def rrCellChangeOrder():
"""RR-CELL CHANGE ORDER Section 9.1.21e"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x8) # 00001000
c = CellDescription()
d = NcModeAndSpareHalfOctets()
packet = a / b / c / d
return packet | def function[rrCellChangeOrder, parameter[]]:
constant[RR-CELL CHANGE ORDER Section 9.1.21e]
variable[a] assign[=] call[name[TpPd], parameter[]]
variable[b] assign[=] call[name[MessageType], parameter[]]
variable[c] assign[=] call[name[CellDescription], parameter[]]
variable[d] assign[=] call[name[NcModeAndSpareHalfOctets], parameter[]]
variable[packet] assign[=] binary_operation[binary_operation[binary_operation[name[a] / name[b]] / name[c]] / name[d]]
return[name[packet]] | keyword[def] identifier[rrCellChangeOrder] ():
literal[string]
identifier[a] = identifier[TpPd] ( identifier[pd] = literal[int] )
identifier[b] = identifier[MessageType] ( identifier[mesType] = literal[int] )
identifier[c] = identifier[CellDescription] ()
identifier[d] = identifier[NcModeAndSpareHalfOctets] ()
identifier[packet] = identifier[a] / identifier[b] / identifier[c] / identifier[d]
keyword[return] identifier[packet] | def rrCellChangeOrder():
"""RR-CELL CHANGE ORDER Section 9.1.21e"""
a = TpPd(pd=6)
b = MessageType(mesType=8) # 00001000
c = CellDescription()
d = NcModeAndSpareHalfOctets()
packet = a / b / c / d
return packet |
def all(self):
"""Get all collaborators.
Returns:
List[str]: Collaborators.
"""
return [email for email, action in self._collaborators.items() if action in [RoleValue.Owner, RoleValue.User, ShareRequestValue.Add]] | def function[all, parameter[self]]:
constant[Get all collaborators.
Returns:
List[str]: Collaborators.
]
return[<ast.ListComp object at 0x7da1b23440a0>] | keyword[def] identifier[all] ( identifier[self] ):
literal[string]
keyword[return] [ identifier[email] keyword[for] identifier[email] , identifier[action] keyword[in] identifier[self] . identifier[_collaborators] . identifier[items] () keyword[if] identifier[action] keyword[in] [ identifier[RoleValue] . identifier[Owner] , identifier[RoleValue] . identifier[User] , identifier[ShareRequestValue] . identifier[Add] ]] | def all(self):
"""Get all collaborators.
Returns:
List[str]: Collaborators.
"""
return [email for (email, action) in self._collaborators.items() if action in [RoleValue.Owner, RoleValue.User, ShareRequestValue.Add]] |
def pkg_tracking(self):
"""Tracking package dependencies
"""
flag = []
options = [
"-t",
"--tracking"
]
additional_options = [
"--check-deps",
"--graph=",
"--case-ins"
]
for arg in self.args[2:]:
if arg.startswith(additional_options[1]):
flag.append(arg)
self.args.remove(arg)
if arg in additional_options:
flag.append(arg)
# clean additional options from args
for f in flag:
if f in self.args:
self.args.remove(f)
# print usage message if wrong additional option
for arg in self.args:
if arg.startswith("--"):
if arg not in additional_options:
usage("")
raise SystemExit()
if (len(self.args) >= 3 and len(self.args) <= 3 and
self.args[0] in options and
self.args[1] in self.meta.repositories):
TrackingDeps(self.args[2], self.args[1], flag).run()
elif (len(self.args) >= 2 and
self.args[1] not in self.meta.repositories):
usage(self.args[1])
else:
usage("") | def function[pkg_tracking, parameter[self]]:
constant[Tracking package dependencies
]
variable[flag] assign[=] list[[]]
variable[options] assign[=] list[[<ast.Constant object at 0x7da20c6c4a60>, <ast.Constant object at 0x7da20c6c6830>]]
variable[additional_options] assign[=] list[[<ast.Constant object at 0x7da20c6c4940>, <ast.Constant object at 0x7da20c6c5780>, <ast.Constant object at 0x7da20c6c5060>]]
for taget[name[arg]] in starred[call[name[self].args][<ast.Slice object at 0x7da20c6c6ad0>]] begin[:]
if call[name[arg].startswith, parameter[call[name[additional_options]][constant[1]]]] begin[:]
call[name[flag].append, parameter[name[arg]]]
call[name[self].args.remove, parameter[name[arg]]]
if compare[name[arg] in name[additional_options]] begin[:]
call[name[flag].append, parameter[name[arg]]]
for taget[name[f]] in starred[name[flag]] begin[:]
if compare[name[f] in name[self].args] begin[:]
call[name[self].args.remove, parameter[name[f]]]
for taget[name[arg]] in starred[name[self].args] begin[:]
if call[name[arg].startswith, parameter[constant[--]]] begin[:]
if compare[name[arg] <ast.NotIn object at 0x7da2590d7190> name[additional_options]] begin[:]
call[name[usage], parameter[constant[]]]
<ast.Raise object at 0x7da20c6c6e00>
if <ast.BoolOp object at 0x7da20c6c6b30> begin[:]
call[call[name[TrackingDeps], parameter[call[name[self].args][constant[2]], call[name[self].args][constant[1]], name[flag]]].run, parameter[]] | keyword[def] identifier[pkg_tracking] ( identifier[self] ):
literal[string]
identifier[flag] =[]
identifier[options] =[
literal[string] ,
literal[string]
]
identifier[additional_options] =[
literal[string] ,
literal[string] ,
literal[string]
]
keyword[for] identifier[arg] keyword[in] identifier[self] . identifier[args] [ literal[int] :]:
keyword[if] identifier[arg] . identifier[startswith] ( identifier[additional_options] [ literal[int] ]):
identifier[flag] . identifier[append] ( identifier[arg] )
identifier[self] . identifier[args] . identifier[remove] ( identifier[arg] )
keyword[if] identifier[arg] keyword[in] identifier[additional_options] :
identifier[flag] . identifier[append] ( identifier[arg] )
keyword[for] identifier[f] keyword[in] identifier[flag] :
keyword[if] identifier[f] keyword[in] identifier[self] . identifier[args] :
identifier[self] . identifier[args] . identifier[remove] ( identifier[f] )
keyword[for] identifier[arg] keyword[in] identifier[self] . identifier[args] :
keyword[if] identifier[arg] . identifier[startswith] ( literal[string] ):
keyword[if] identifier[arg] keyword[not] keyword[in] identifier[additional_options] :
identifier[usage] ( literal[string] )
keyword[raise] identifier[SystemExit] ()
keyword[if] ( identifier[len] ( identifier[self] . identifier[args] )>= literal[int] keyword[and] identifier[len] ( identifier[self] . identifier[args] )<= literal[int] keyword[and]
identifier[self] . identifier[args] [ literal[int] ] keyword[in] identifier[options] keyword[and]
identifier[self] . identifier[args] [ literal[int] ] keyword[in] identifier[self] . identifier[meta] . identifier[repositories] ):
identifier[TrackingDeps] ( identifier[self] . identifier[args] [ literal[int] ], identifier[self] . identifier[args] [ literal[int] ], identifier[flag] ). identifier[run] ()
keyword[elif] ( identifier[len] ( identifier[self] . identifier[args] )>= literal[int] keyword[and]
identifier[self] . identifier[args] [ literal[int] ] keyword[not] keyword[in] identifier[self] . identifier[meta] . identifier[repositories] ):
identifier[usage] ( identifier[self] . identifier[args] [ literal[int] ])
keyword[else] :
identifier[usage] ( literal[string] ) | def pkg_tracking(self):
"""Tracking package dependencies
"""
flag = []
options = ['-t', '--tracking']
additional_options = ['--check-deps', '--graph=', '--case-ins']
for arg in self.args[2:]:
if arg.startswith(additional_options[1]):
flag.append(arg)
self.args.remove(arg) # depends on [control=['if'], data=[]]
if arg in additional_options:
flag.append(arg) # depends on [control=['if'], data=['arg']] # depends on [control=['for'], data=['arg']]
# clean additional options from args
for f in flag:
if f in self.args:
self.args.remove(f) # depends on [control=['if'], data=['f']] # depends on [control=['for'], data=['f']]
# print usage message if wrong additional option
for arg in self.args:
if arg.startswith('--'):
if arg not in additional_options:
usage('')
raise SystemExit() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['arg']]
if len(self.args) >= 3 and len(self.args) <= 3 and (self.args[0] in options) and (self.args[1] in self.meta.repositories):
TrackingDeps(self.args[2], self.args[1], flag).run() # depends on [control=['if'], data=[]]
elif len(self.args) >= 2 and self.args[1] not in self.meta.repositories:
usage(self.args[1]) # depends on [control=['if'], data=[]]
else:
usage('') |
def _write_to_filepath(content, output):
"""
:param content: Content string to write to
:param output: Output file path
"""
outdir = os.path.dirname(output)
if outdir and not os.path.exists(outdir):
os.makedirs(outdir)
with anytemplate.compat.copen(output, 'w') as out:
out.write(content) | def function[_write_to_filepath, parameter[content, output]]:
constant[
:param content: Content string to write to
:param output: Output file path
]
variable[outdir] assign[=] call[name[os].path.dirname, parameter[name[output]]]
if <ast.BoolOp object at 0x7da204564af0> begin[:]
call[name[os].makedirs, parameter[name[outdir]]]
with call[name[anytemplate].compat.copen, parameter[name[output], constant[w]]] begin[:]
call[name[out].write, parameter[name[content]]] | keyword[def] identifier[_write_to_filepath] ( identifier[content] , identifier[output] ):
literal[string]
identifier[outdir] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[output] )
keyword[if] identifier[outdir] keyword[and] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[outdir] ):
identifier[os] . identifier[makedirs] ( identifier[outdir] )
keyword[with] identifier[anytemplate] . identifier[compat] . identifier[copen] ( identifier[output] , literal[string] ) keyword[as] identifier[out] :
identifier[out] . identifier[write] ( identifier[content] ) | def _write_to_filepath(content, output):
"""
:param content: Content string to write to
:param output: Output file path
"""
outdir = os.path.dirname(output)
if outdir and (not os.path.exists(outdir)):
os.makedirs(outdir) # depends on [control=['if'], data=[]]
with anytemplate.compat.copen(output, 'w') as out:
out.write(content) # depends on [control=['with'], data=['out']] |
def __Script_Editor_Output_plainTextEdit_set_default_view_state(self):
"""
Sets the **Script_Editor_Output_plainTextEdit** Widget default View state.
"""
self.Script_Editor_Output_plainTextEdit.moveCursor(QTextCursor.End)
self.Script_Editor_Output_plainTextEdit.ensureCursorVisible() | def function[__Script_Editor_Output_plainTextEdit_set_default_view_state, parameter[self]]:
constant[
Sets the **Script_Editor_Output_plainTextEdit** Widget default View state.
]
call[name[self].Script_Editor_Output_plainTextEdit.moveCursor, parameter[name[QTextCursor].End]]
call[name[self].Script_Editor_Output_plainTextEdit.ensureCursorVisible, parameter[]] | keyword[def] identifier[__Script_Editor_Output_plainTextEdit_set_default_view_state] ( identifier[self] ):
literal[string]
identifier[self] . identifier[Script_Editor_Output_plainTextEdit] . identifier[moveCursor] ( identifier[QTextCursor] . identifier[End] )
identifier[self] . identifier[Script_Editor_Output_plainTextEdit] . identifier[ensureCursorVisible] () | def __Script_Editor_Output_plainTextEdit_set_default_view_state(self):
"""
Sets the **Script_Editor_Output_plainTextEdit** Widget default View state.
"""
self.Script_Editor_Output_plainTextEdit.moveCursor(QTextCursor.End)
self.Script_Editor_Output_plainTextEdit.ensureCursorVisible() |
def setup(self):
self.client = self._get_client()
sg = self._create_isolation_security_group()
if self.exists is not True:
acl = self._create_network_acl()
self._add_network_acl_entries(acl)
self._add_security_group_rule(sg)
self._add_security_group_to_instance(sg)
"""Conditions that can not be dry_run"""
if self.dry_run is not False:
self._add_security_group_rule(sg)
self._add_security_group_to_instance(sg) | def function[setup, parameter[self]]:
name[self].client assign[=] call[name[self]._get_client, parameter[]]
variable[sg] assign[=] call[name[self]._create_isolation_security_group, parameter[]]
if compare[name[self].exists is_not constant[True]] begin[:]
variable[acl] assign[=] call[name[self]._create_network_acl, parameter[]]
call[name[self]._add_network_acl_entries, parameter[name[acl]]]
call[name[self]._add_security_group_rule, parameter[name[sg]]]
call[name[self]._add_security_group_to_instance, parameter[name[sg]]]
constant[Conditions that can not be dry_run]
if compare[name[self].dry_run is_not constant[False]] begin[:]
call[name[self]._add_security_group_rule, parameter[name[sg]]]
call[name[self]._add_security_group_to_instance, parameter[name[sg]]] | keyword[def] identifier[setup] ( identifier[self] ):
identifier[self] . identifier[client] = identifier[self] . identifier[_get_client] ()
identifier[sg] = identifier[self] . identifier[_create_isolation_security_group] ()
keyword[if] identifier[self] . identifier[exists] keyword[is] keyword[not] keyword[True] :
identifier[acl] = identifier[self] . identifier[_create_network_acl] ()
identifier[self] . identifier[_add_network_acl_entries] ( identifier[acl] )
identifier[self] . identifier[_add_security_group_rule] ( identifier[sg] )
identifier[self] . identifier[_add_security_group_to_instance] ( identifier[sg] )
literal[string]
keyword[if] identifier[self] . identifier[dry_run] keyword[is] keyword[not] keyword[False] :
identifier[self] . identifier[_add_security_group_rule] ( identifier[sg] )
identifier[self] . identifier[_add_security_group_to_instance] ( identifier[sg] ) | def setup(self):
self.client = self._get_client()
sg = self._create_isolation_security_group()
if self.exists is not True:
acl = self._create_network_acl()
self._add_network_acl_entries(acl)
self._add_security_group_rule(sg) # depends on [control=['if'], data=[]]
self._add_security_group_to_instance(sg)
'Conditions that can not be dry_run'
if self.dry_run is not False:
self._add_security_group_rule(sg)
self._add_security_group_to_instance(sg) # depends on [control=['if'], data=[]] |
def disaggregate_precipitation(self, method='equal', zerodiv='uniform', shift=0, master_precip=None):
"""
Disaggregate precipitation.
Parameters
----------
method : str, optional
Disaggregation method.
``equal``
Daily precipitation is distributed equally over the 24 hours of the day. (Default)
``cascade``
Hourly precipitation values are obtained using a cascade model set up using
hourly observations.
zerodiv : str, optional
Method to deal with zero division, relevant for ``method='masterstation'``.
``uniform``
Use uniform distribution. (Default)
master_precip : Series, optional
Hourly precipitation records from a representative station
(required for ``method='masterstation'``).
"""
if method == 'equal':
precip_disagg = melodist.disagg_prec(self.data_daily, method=method, shift=shift)
elif method == 'cascade':
precip_disagg = pd.Series(index=self.data_disagg.index)
for months, stats in zip(self.statistics.precip.months, self.statistics.precip.stats):
precip_daily = melodist.seasonal_subset(self.data_daily.precip, months=months)
if len(precip_daily) > 1:
data = melodist.disagg_prec(precip_daily, method=method, cascade_options=stats,
shift=shift, zerodiv=zerodiv)
precip_disagg.loc[data.index] = data
elif method == 'masterstation':
precip_disagg = melodist.precip_master_station(self.data_daily.precip, master_precip, zerodiv)
self.data_disagg.precip = precip_disagg | def function[disaggregate_precipitation, parameter[self, method, zerodiv, shift, master_precip]]:
constant[
Disaggregate precipitation.
Parameters
----------
method : str, optional
Disaggregation method.
``equal``
Daily precipitation is distributed equally over the 24 hours of the day. (Default)
``cascade``
Hourly precipitation values are obtained using a cascade model set up using
hourly observations.
zerodiv : str, optional
Method to deal with zero division, relevant for ``method='masterstation'``.
``uniform``
Use uniform distribution. (Default)
master_precip : Series, optional
Hourly precipitation records from a representative station
(required for ``method='masterstation'``).
]
if compare[name[method] equal[==] constant[equal]] begin[:]
variable[precip_disagg] assign[=] call[name[melodist].disagg_prec, parameter[name[self].data_daily]]
name[self].data_disagg.precip assign[=] name[precip_disagg] | keyword[def] identifier[disaggregate_precipitation] ( identifier[self] , identifier[method] = literal[string] , identifier[zerodiv] = literal[string] , identifier[shift] = literal[int] , identifier[master_precip] = keyword[None] ):
literal[string]
keyword[if] identifier[method] == literal[string] :
identifier[precip_disagg] = identifier[melodist] . identifier[disagg_prec] ( identifier[self] . identifier[data_daily] , identifier[method] = identifier[method] , identifier[shift] = identifier[shift] )
keyword[elif] identifier[method] == literal[string] :
identifier[precip_disagg] = identifier[pd] . identifier[Series] ( identifier[index] = identifier[self] . identifier[data_disagg] . identifier[index] )
keyword[for] identifier[months] , identifier[stats] keyword[in] identifier[zip] ( identifier[self] . identifier[statistics] . identifier[precip] . identifier[months] , identifier[self] . identifier[statistics] . identifier[precip] . identifier[stats] ):
identifier[precip_daily] = identifier[melodist] . identifier[seasonal_subset] ( identifier[self] . identifier[data_daily] . identifier[precip] , identifier[months] = identifier[months] )
keyword[if] identifier[len] ( identifier[precip_daily] )> literal[int] :
identifier[data] = identifier[melodist] . identifier[disagg_prec] ( identifier[precip_daily] , identifier[method] = identifier[method] , identifier[cascade_options] = identifier[stats] ,
identifier[shift] = identifier[shift] , identifier[zerodiv] = identifier[zerodiv] )
identifier[precip_disagg] . identifier[loc] [ identifier[data] . identifier[index] ]= identifier[data]
keyword[elif] identifier[method] == literal[string] :
identifier[precip_disagg] = identifier[melodist] . identifier[precip_master_station] ( identifier[self] . identifier[data_daily] . identifier[precip] , identifier[master_precip] , identifier[zerodiv] )
identifier[self] . identifier[data_disagg] . identifier[precip] = identifier[precip_disagg] | def disaggregate_precipitation(self, method='equal', zerodiv='uniform', shift=0, master_precip=None):
"""
Disaggregate precipitation.
Parameters
----------
method : str, optional
Disaggregation method.
``equal``
Daily precipitation is distributed equally over the 24 hours of the day. (Default)
``cascade``
Hourly precipitation values are obtained using a cascade model set up using
hourly observations.
zerodiv : str, optional
Method to deal with zero division, relevant for ``method='masterstation'``.
``uniform``
Use uniform distribution. (Default)
master_precip : Series, optional
Hourly precipitation records from a representative station
(required for ``method='masterstation'``).
"""
if method == 'equal':
precip_disagg = melodist.disagg_prec(self.data_daily, method=method, shift=shift) # depends on [control=['if'], data=['method']]
elif method == 'cascade':
precip_disagg = pd.Series(index=self.data_disagg.index)
for (months, stats) in zip(self.statistics.precip.months, self.statistics.precip.stats):
precip_daily = melodist.seasonal_subset(self.data_daily.precip, months=months)
if len(precip_daily) > 1:
data = melodist.disagg_prec(precip_daily, method=method, cascade_options=stats, shift=shift, zerodiv=zerodiv)
precip_disagg.loc[data.index] = data # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['method']]
elif method == 'masterstation':
precip_disagg = melodist.precip_master_station(self.data_daily.precip, master_precip, zerodiv) # depends on [control=['if'], data=[]]
self.data_disagg.precip = precip_disagg |
def fit(self, X):
"""Gradient Descent optimization process
Tunes the embeddings (Y) so that their pairwise distance distribution
matches the input high-dimensional data (X) pairwise distance distribution.
In other words, minimizes the KL divergence cost.
"""
# compute the std. deviation with mean vector x in X
self._compute_std_dev(X)
# Kullback–Leibler divergence
kl_cost = KL_Divergence()
# compute high-dimensional affinities (Gaussian Distribution)
high_dim_dist = self._get_high_dim_dist(X)
# sample initial solutions
Y = np.random.randn(X.shape[0], self.n_components)
prev_Ys = [Y, Y]
for iteration in range(1, self.n_iter+1):
# compute low-dimensional affinities (Student t-Distribution)
low_dim_dist = self._get_low_dim_dist(Y)
for i in range(Y.shape[0]):
# compute gradient
grad = kl_cost.gradient(high_dim_dist, low_dim_dist, Y, i)
# set new Y[i]
Y[i] = prev_Ys[1][i] + self.learning_rate * grad + self.momentum * (prev_Ys[1][i] - prev_Ys[0][i])
prev_Ys = [prev_Ys[1], Y]
if iteration % 100 == 0 and self.verbose:
low_dim_dist = self._get_low_dim_dist(Y)
print(f"ITERATION: {iteration}{3*' '}|||{3*' '}KL divergence: {kl_cost(high_dim_dist, low_dim_dist)}")
self.embeddings = Y
return self | def function[fit, parameter[self, X]]:
constant[Gradient Descent optimization process
Tunes the embeddings (Y) so that their pairwise distance distribution
matches the input high-dimensional data (X) pairwise distance distribution.
In other words, minimizes the KL divergence cost.
]
call[name[self]._compute_std_dev, parameter[name[X]]]
variable[kl_cost] assign[=] call[name[KL_Divergence], parameter[]]
variable[high_dim_dist] assign[=] call[name[self]._get_high_dim_dist, parameter[name[X]]]
variable[Y] assign[=] call[name[np].random.randn, parameter[call[name[X].shape][constant[0]], name[self].n_components]]
variable[prev_Ys] assign[=] list[[<ast.Name object at 0x7da1b1f290c0>, <ast.Name object at 0x7da1b1f29090>]]
for taget[name[iteration]] in starred[call[name[range], parameter[constant[1], binary_operation[name[self].n_iter + constant[1]]]]] begin[:]
variable[low_dim_dist] assign[=] call[name[self]._get_low_dim_dist, parameter[name[Y]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[Y].shape][constant[0]]]]] begin[:]
variable[grad] assign[=] call[name[kl_cost].gradient, parameter[name[high_dim_dist], name[low_dim_dist], name[Y], name[i]]]
call[name[Y]][name[i]] assign[=] binary_operation[binary_operation[call[call[name[prev_Ys]][constant[1]]][name[i]] + binary_operation[name[self].learning_rate * name[grad]]] + binary_operation[name[self].momentum * binary_operation[call[call[name[prev_Ys]][constant[1]]][name[i]] - call[call[name[prev_Ys]][constant[0]]][name[i]]]]]
variable[prev_Ys] assign[=] list[[<ast.Subscript object at 0x7da1b1f20850>, <ast.Name object at 0x7da1b1f21330>]]
if <ast.BoolOp object at 0x7da1b1f21630> begin[:]
variable[low_dim_dist] assign[=] call[name[self]._get_low_dim_dist, parameter[name[Y]]]
call[name[print], parameter[<ast.JoinedStr object at 0x7da1b1f215a0>]]
name[self].embeddings assign[=] name[Y]
return[name[self]] | keyword[def] identifier[fit] ( identifier[self] , identifier[X] ):
literal[string]
identifier[self] . identifier[_compute_std_dev] ( identifier[X] )
identifier[kl_cost] = identifier[KL_Divergence] ()
identifier[high_dim_dist] = identifier[self] . identifier[_get_high_dim_dist] ( identifier[X] )
identifier[Y] = identifier[np] . identifier[random] . identifier[randn] ( identifier[X] . identifier[shape] [ literal[int] ], identifier[self] . identifier[n_components] )
identifier[prev_Ys] =[ identifier[Y] , identifier[Y] ]
keyword[for] identifier[iteration] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[n_iter] + literal[int] ):
identifier[low_dim_dist] = identifier[self] . identifier[_get_low_dim_dist] ( identifier[Y] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[Y] . identifier[shape] [ literal[int] ]):
identifier[grad] = identifier[kl_cost] . identifier[gradient] ( identifier[high_dim_dist] , identifier[low_dim_dist] , identifier[Y] , identifier[i] )
identifier[Y] [ identifier[i] ]= identifier[prev_Ys] [ literal[int] ][ identifier[i] ]+ identifier[self] . identifier[learning_rate] * identifier[grad] + identifier[self] . identifier[momentum] *( identifier[prev_Ys] [ literal[int] ][ identifier[i] ]- identifier[prev_Ys] [ literal[int] ][ identifier[i] ])
identifier[prev_Ys] =[ identifier[prev_Ys] [ literal[int] ], identifier[Y] ]
keyword[if] identifier[iteration] % literal[int] == literal[int] keyword[and] identifier[self] . identifier[verbose] :
identifier[low_dim_dist] = identifier[self] . identifier[_get_low_dim_dist] ( identifier[Y] )
identifier[print] ( literal[string] )
identifier[self] . identifier[embeddings] = identifier[Y]
keyword[return] identifier[self] | def fit(self, X):
"""Gradient Descent optimization process
Tunes the embeddings (Y) so that their pairwise distance distribution
matches the input high-dimensional data (X) pairwise distance distribution.
In other words, minimizes the KL divergence cost.
"""
# compute the std. deviation with mean vector x in X
self._compute_std_dev(X)
# Kullback–Leibler divergence
kl_cost = KL_Divergence()
# compute high-dimensional affinities (Gaussian Distribution)
high_dim_dist = self._get_high_dim_dist(X)
# sample initial solutions
Y = np.random.randn(X.shape[0], self.n_components)
prev_Ys = [Y, Y]
for iteration in range(1, self.n_iter + 1):
# compute low-dimensional affinities (Student t-Distribution)
low_dim_dist = self._get_low_dim_dist(Y)
for i in range(Y.shape[0]):
# compute gradient
grad = kl_cost.gradient(high_dim_dist, low_dim_dist, Y, i)
# set new Y[i]
Y[i] = prev_Ys[1][i] + self.learning_rate * grad + self.momentum * (prev_Ys[1][i] - prev_Ys[0][i]) # depends on [control=['for'], data=['i']]
prev_Ys = [prev_Ys[1], Y]
if iteration % 100 == 0 and self.verbose:
low_dim_dist = self._get_low_dim_dist(Y)
print(f"ITERATION: {iteration}{3 * ' '}|||{3 * ' '}KL divergence: {kl_cost(high_dim_dist, low_dim_dist)}") # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['iteration']]
self.embeddings = Y
return self |
def recurrence(self, recurrence):
"""See `recurrence`."""
if not is_valid_recurrence(recurrence):
raise KeyError("'%s' is not a valid recurrence value" % recurrence)
self._recurrence = recurrence | def function[recurrence, parameter[self, recurrence]]:
constant[See `recurrence`.]
if <ast.UnaryOp object at 0x7da2045641f0> begin[:]
<ast.Raise object at 0x7da204565d20>
name[self]._recurrence assign[=] name[recurrence] | keyword[def] identifier[recurrence] ( identifier[self] , identifier[recurrence] ):
literal[string]
keyword[if] keyword[not] identifier[is_valid_recurrence] ( identifier[recurrence] ):
keyword[raise] identifier[KeyError] ( literal[string] % identifier[recurrence] )
identifier[self] . identifier[_recurrence] = identifier[recurrence] | def recurrence(self, recurrence):
"""See `recurrence`."""
if not is_valid_recurrence(recurrence):
raise KeyError("'%s' is not a valid recurrence value" % recurrence) # depends on [control=['if'], data=[]]
self._recurrence = recurrence |
def selfcheck(tools):
"""Audit the system for issues.
:param tools: Tools description. Use elevation.TOOLS to test elevation.
"""
msg = []
for tool_name, check_cli in collections.OrderedDict(tools).items():
try:
subprocess.check_output(check_cli, shell=True, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError:
msg.append('%r not found or not usable.' % tool_name)
return '\n'.join(msg) if msg else 'Your system is ready.' | def function[selfcheck, parameter[tools]]:
constant[Audit the system for issues.
:param tools: Tools description. Use elevation.TOOLS to test elevation.
]
variable[msg] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da20c6e5840>, <ast.Name object at 0x7da20c6e5570>]]] in starred[call[call[name[collections].OrderedDict, parameter[name[tools]]].items, parameter[]]] begin[:]
<ast.Try object at 0x7da20c6e61d0>
return[<ast.IfExp object at 0x7da20c7ca350>] | keyword[def] identifier[selfcheck] ( identifier[tools] ):
literal[string]
identifier[msg] =[]
keyword[for] identifier[tool_name] , identifier[check_cli] keyword[in] identifier[collections] . identifier[OrderedDict] ( identifier[tools] ). identifier[items] ():
keyword[try] :
identifier[subprocess] . identifier[check_output] ( identifier[check_cli] , identifier[shell] = keyword[True] , identifier[stderr] = identifier[subprocess] . identifier[STDOUT] )
keyword[except] identifier[subprocess] . identifier[CalledProcessError] :
identifier[msg] . identifier[append] ( literal[string] % identifier[tool_name] )
keyword[return] literal[string] . identifier[join] ( identifier[msg] ) keyword[if] identifier[msg] keyword[else] literal[string] | def selfcheck(tools):
"""Audit the system for issues.
:param tools: Tools description. Use elevation.TOOLS to test elevation.
"""
msg = []
for (tool_name, check_cli) in collections.OrderedDict(tools).items():
try:
subprocess.check_output(check_cli, shell=True, stderr=subprocess.STDOUT) # depends on [control=['try'], data=[]]
except subprocess.CalledProcessError:
msg.append('%r not found or not usable.' % tool_name) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]]
return '\n'.join(msg) if msg else 'Your system is ready.' |
def dispatch_op(self, op_name, args_dict):
"""Dispatches an operation requested.
:param str op_name:
:param dict args_dict:
"""
self.logger.debug('Requested `%s` command with `%s` args.' % (op_name, args_dict))
method = getattr(self, 'op_%s' % op_name, None)
if method is None:
error_str = '`%s` command is not supported.' % op_name
self.logger.error(error_str)
raise DjangoDevException(error_str)
method(**args_dict)
self.logger.info('Done.') | def function[dispatch_op, parameter[self, op_name, args_dict]]:
constant[Dispatches an operation requested.
:param str op_name:
:param dict args_dict:
]
call[name[self].logger.debug, parameter[binary_operation[constant[Requested `%s` command with `%s` args.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c991600>, <ast.Name object at 0x7da20c991420>]]]]]
variable[method] assign[=] call[name[getattr], parameter[name[self], binary_operation[constant[op_%s] <ast.Mod object at 0x7da2590d6920> name[op_name]], constant[None]]]
if compare[name[method] is constant[None]] begin[:]
variable[error_str] assign[=] binary_operation[constant[`%s` command is not supported.] <ast.Mod object at 0x7da2590d6920> name[op_name]]
call[name[self].logger.error, parameter[name[error_str]]]
<ast.Raise object at 0x7da18fe90d60>
call[name[method], parameter[]]
call[name[self].logger.info, parameter[constant[Done.]]] | keyword[def] identifier[dispatch_op] ( identifier[self] , identifier[op_name] , identifier[args_dict] ):
literal[string]
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] %( identifier[op_name] , identifier[args_dict] ))
identifier[method] = identifier[getattr] ( identifier[self] , literal[string] % identifier[op_name] , keyword[None] )
keyword[if] identifier[method] keyword[is] keyword[None] :
identifier[error_str] = literal[string] % identifier[op_name]
identifier[self] . identifier[logger] . identifier[error] ( identifier[error_str] )
keyword[raise] identifier[DjangoDevException] ( identifier[error_str] )
identifier[method] (** identifier[args_dict] )
identifier[self] . identifier[logger] . identifier[info] ( literal[string] ) | def dispatch_op(self, op_name, args_dict):
"""Dispatches an operation requested.
:param str op_name:
:param dict args_dict:
"""
self.logger.debug('Requested `%s` command with `%s` args.' % (op_name, args_dict))
method = getattr(self, 'op_%s' % op_name, None)
if method is None:
error_str = '`%s` command is not supported.' % op_name
self.logger.error(error_str)
raise DjangoDevException(error_str) # depends on [control=['if'], data=[]]
method(**args_dict)
self.logger.info('Done.') |
def f_iter_leaves(self, with_links=True):
"""Iterates (recursively) over all leaves hanging below the current group.
:param with_links:
If links should be ignored, leaves hanging below linked nodes are not listed.
:returns:
Iterator over all leaf nodes
"""
for node in self.f_iter_nodes(with_links=with_links):
if node.v_is_leaf:
yield node | def function[f_iter_leaves, parameter[self, with_links]]:
constant[Iterates (recursively) over all leaves hanging below the current group.
:param with_links:
If links should be ignored, leaves hanging below linked nodes are not listed.
:returns:
Iterator over all leaf nodes
]
for taget[name[node]] in starred[call[name[self].f_iter_nodes, parameter[]]] begin[:]
if name[node].v_is_leaf begin[:]
<ast.Yield object at 0x7da18f723c40> | keyword[def] identifier[f_iter_leaves] ( identifier[self] , identifier[with_links] = keyword[True] ):
literal[string]
keyword[for] identifier[node] keyword[in] identifier[self] . identifier[f_iter_nodes] ( identifier[with_links] = identifier[with_links] ):
keyword[if] identifier[node] . identifier[v_is_leaf] :
keyword[yield] identifier[node] | def f_iter_leaves(self, with_links=True):
"""Iterates (recursively) over all leaves hanging below the current group.
:param with_links:
If links should be ignored, leaves hanging below linked nodes are not listed.
:returns:
Iterator over all leaf nodes
"""
for node in self.f_iter_nodes(with_links=with_links):
if node.v_is_leaf:
yield node # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['node']] |
def parse_transaction_bytes(tx_bytes):
"""Parses base64data into a map with the following keys:
tx - a StellarSignTx describing the transaction header
operations - an array of protobuf message objects for each operation
"""
tx = messages.StellarSignTx()
unpacker = xdrlib.Unpacker(tx_bytes)
tx.source_account = _xdr_read_address(unpacker)
tx.fee = unpacker.unpack_uint()
tx.sequence_number = unpacker.unpack_uhyper()
# Timebounds is an optional field
if unpacker.unpack_bool():
max_timebound = 2 ** 32 - 1 # max unsigned 32-bit int
# (trezor does not support the full 64-bit time value)
tx.timebounds_start = unpacker.unpack_uhyper()
tx.timebounds_end = unpacker.unpack_uhyper()
if tx.timebounds_start > max_timebound or tx.timebounds_start < 0:
raise ValueError(
"Starting timebound out of range (must be between 0 and "
+ max_timebound
)
if tx.timebounds_end > max_timebound or tx.timebounds_end < 0:
raise ValueError(
"Ending timebound out of range (must be between 0 and " + max_timebound
)
# memo type determines what optional fields are set
tx.memo_type = unpacker.unpack_uint()
# text
if tx.memo_type == MEMO_TYPE_TEXT:
tx.memo_text = unpacker.unpack_string()
# id (64-bit uint)
if tx.memo_type == MEMO_TYPE_ID:
tx.memo_id = unpacker.unpack_uhyper()
# hash / return are the same structure (32 bytes representing a hash)
if tx.memo_type == MEMO_TYPE_HASH or tx.memo_type == MEMO_TYPE_RETURN:
tx.memo_hash = unpacker.unpack_fopaque(32)
tx.num_operations = unpacker.unpack_uint()
operations = []
for _ in range(tx.num_operations):
operations.append(_parse_operation_bytes(unpacker))
return tx, operations | def function[parse_transaction_bytes, parameter[tx_bytes]]:
constant[Parses base64data into a map with the following keys:
tx - a StellarSignTx describing the transaction header
operations - an array of protobuf message objects for each operation
]
variable[tx] assign[=] call[name[messages].StellarSignTx, parameter[]]
variable[unpacker] assign[=] call[name[xdrlib].Unpacker, parameter[name[tx_bytes]]]
name[tx].source_account assign[=] call[name[_xdr_read_address], parameter[name[unpacker]]]
name[tx].fee assign[=] call[name[unpacker].unpack_uint, parameter[]]
name[tx].sequence_number assign[=] call[name[unpacker].unpack_uhyper, parameter[]]
if call[name[unpacker].unpack_bool, parameter[]] begin[:]
variable[max_timebound] assign[=] binary_operation[binary_operation[constant[2] ** constant[32]] - constant[1]]
name[tx].timebounds_start assign[=] call[name[unpacker].unpack_uhyper, parameter[]]
name[tx].timebounds_end assign[=] call[name[unpacker].unpack_uhyper, parameter[]]
if <ast.BoolOp object at 0x7da204565570> begin[:]
<ast.Raise object at 0x7da204564580>
if <ast.BoolOp object at 0x7da204566c50> begin[:]
<ast.Raise object at 0x7da204565540>
name[tx].memo_type assign[=] call[name[unpacker].unpack_uint, parameter[]]
if compare[name[tx].memo_type equal[==] name[MEMO_TYPE_TEXT]] begin[:]
name[tx].memo_text assign[=] call[name[unpacker].unpack_string, parameter[]]
if compare[name[tx].memo_type equal[==] name[MEMO_TYPE_ID]] begin[:]
name[tx].memo_id assign[=] call[name[unpacker].unpack_uhyper, parameter[]]
if <ast.BoolOp object at 0x7da18fe922f0> begin[:]
name[tx].memo_hash assign[=] call[name[unpacker].unpack_fopaque, parameter[constant[32]]]
name[tx].num_operations assign[=] call[name[unpacker].unpack_uint, parameter[]]
variable[operations] assign[=] list[[]]
for taget[name[_]] in starred[call[name[range], parameter[name[tx].num_operations]]] begin[:]
call[name[operations].append, parameter[call[name[_parse_operation_bytes], parameter[name[unpacker]]]]]
return[tuple[[<ast.Name object at 0x7da1b07e9b40>, <ast.Name object at 0x7da1b07e8790>]]] | keyword[def] identifier[parse_transaction_bytes] ( identifier[tx_bytes] ):
literal[string]
identifier[tx] = identifier[messages] . identifier[StellarSignTx] ()
identifier[unpacker] = identifier[xdrlib] . identifier[Unpacker] ( identifier[tx_bytes] )
identifier[tx] . identifier[source_account] = identifier[_xdr_read_address] ( identifier[unpacker] )
identifier[tx] . identifier[fee] = identifier[unpacker] . identifier[unpack_uint] ()
identifier[tx] . identifier[sequence_number] = identifier[unpacker] . identifier[unpack_uhyper] ()
keyword[if] identifier[unpacker] . identifier[unpack_bool] ():
identifier[max_timebound] = literal[int] ** literal[int] - literal[int]
identifier[tx] . identifier[timebounds_start] = identifier[unpacker] . identifier[unpack_uhyper] ()
identifier[tx] . identifier[timebounds_end] = identifier[unpacker] . identifier[unpack_uhyper] ()
keyword[if] identifier[tx] . identifier[timebounds_start] > identifier[max_timebound] keyword[or] identifier[tx] . identifier[timebounds_start] < literal[int] :
keyword[raise] identifier[ValueError] (
literal[string]
+ identifier[max_timebound]
)
keyword[if] identifier[tx] . identifier[timebounds_end] > identifier[max_timebound] keyword[or] identifier[tx] . identifier[timebounds_end] < literal[int] :
keyword[raise] identifier[ValueError] (
literal[string] + identifier[max_timebound]
)
identifier[tx] . identifier[memo_type] = identifier[unpacker] . identifier[unpack_uint] ()
keyword[if] identifier[tx] . identifier[memo_type] == identifier[MEMO_TYPE_TEXT] :
identifier[tx] . identifier[memo_text] = identifier[unpacker] . identifier[unpack_string] ()
keyword[if] identifier[tx] . identifier[memo_type] == identifier[MEMO_TYPE_ID] :
identifier[tx] . identifier[memo_id] = identifier[unpacker] . identifier[unpack_uhyper] ()
keyword[if] identifier[tx] . identifier[memo_type] == identifier[MEMO_TYPE_HASH] keyword[or] identifier[tx] . identifier[memo_type] == identifier[MEMO_TYPE_RETURN] :
identifier[tx] . identifier[memo_hash] = identifier[unpacker] . identifier[unpack_fopaque] ( literal[int] )
identifier[tx] . identifier[num_operations] = identifier[unpacker] . identifier[unpack_uint] ()
identifier[operations] =[]
keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[tx] . identifier[num_operations] ):
identifier[operations] . identifier[append] ( identifier[_parse_operation_bytes] ( identifier[unpacker] ))
keyword[return] identifier[tx] , identifier[operations] | def parse_transaction_bytes(tx_bytes):
"""Parses base64data into a map with the following keys:
tx - a StellarSignTx describing the transaction header
operations - an array of protobuf message objects for each operation
"""
tx = messages.StellarSignTx()
unpacker = xdrlib.Unpacker(tx_bytes)
tx.source_account = _xdr_read_address(unpacker)
tx.fee = unpacker.unpack_uint()
tx.sequence_number = unpacker.unpack_uhyper()
# Timebounds is an optional field
if unpacker.unpack_bool():
max_timebound = 2 ** 32 - 1 # max unsigned 32-bit int
# (trezor does not support the full 64-bit time value)
tx.timebounds_start = unpacker.unpack_uhyper()
tx.timebounds_end = unpacker.unpack_uhyper()
if tx.timebounds_start > max_timebound or tx.timebounds_start < 0:
raise ValueError('Starting timebound out of range (must be between 0 and ' + max_timebound) # depends on [control=['if'], data=[]]
if tx.timebounds_end > max_timebound or tx.timebounds_end < 0:
raise ValueError('Ending timebound out of range (must be between 0 and ' + max_timebound) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# memo type determines what optional fields are set
tx.memo_type = unpacker.unpack_uint()
# text
if tx.memo_type == MEMO_TYPE_TEXT:
tx.memo_text = unpacker.unpack_string() # depends on [control=['if'], data=[]]
# id (64-bit uint)
if tx.memo_type == MEMO_TYPE_ID:
tx.memo_id = unpacker.unpack_uhyper() # depends on [control=['if'], data=[]]
# hash / return are the same structure (32 bytes representing a hash)
if tx.memo_type == MEMO_TYPE_HASH or tx.memo_type == MEMO_TYPE_RETURN:
tx.memo_hash = unpacker.unpack_fopaque(32) # depends on [control=['if'], data=[]]
tx.num_operations = unpacker.unpack_uint()
operations = []
for _ in range(tx.num_operations):
operations.append(_parse_operation_bytes(unpacker)) # depends on [control=['for'], data=[]]
return (tx, operations) |
def jsonify_r(obj): # pragma: no cover, not for unit tests...
# pylint: disable=too-many-branches
"""Convert an object into json (recursively on attribute)
:param obj: obj to jsonify
:type obj: object
:return: json representation of obj
:rtype: dict
"""
res = {}
cls = obj.__class__
if not hasattr(cls, 'properties'):
try:
json.dumps(obj)
return obj
except TypeError:
return None
properties = list(cls.properties.keys())
if hasattr(cls, 'running_properties'):
properties += list(cls.running_properties.keys())
for prop in properties:
if not hasattr(obj, prop):
continue
val = getattr(obj, prop)
# Maybe the property is not jsonable
try:
if isinstance(val, set):
val = list(val)
if isinstance(val, list):
val = sorted(val)
json.dumps(val)
res[prop] = val
except TypeError:
if isinstance(val, list):
lst = []
for subval in val:
o_type = getattr(subval.__class__, 'my_type', '')
if o_type == 'CommandCall':
try:
lst.append(subval.call)
except AttributeError: # pragma: no cover, should not happen...
pass
continue
if o_type and hasattr(subval, o_type + '_name'):
lst.append(getattr(subval, o_type + '_name'))
else:
pass
res[prop] = lst
else:
o_type = getattr(val.__class__, 'my_type', '')
if o_type == 'CommandCall':
try:
res[prop] = val.call
except AttributeError: # pragma: no cover, should not happen...
pass
continue
if o_type and hasattr(val, o_type + '_name'):
res[prop] = getattr(val, o_type + '_name')
return res | def function[jsonify_r, parameter[obj]]:
constant[Convert an object into json (recursively on attribute)
:param obj: obj to jsonify
:type obj: object
:return: json representation of obj
:rtype: dict
]
variable[res] assign[=] dictionary[[], []]
variable[cls] assign[=] name[obj].__class__
if <ast.UnaryOp object at 0x7da18f58fa90> begin[:]
<ast.Try object at 0x7da18f58f910>
variable[properties] assign[=] call[name[list], parameter[call[name[cls].properties.keys, parameter[]]]]
if call[name[hasattr], parameter[name[cls], constant[running_properties]]] begin[:]
<ast.AugAssign object at 0x7da18f58f1f0>
for taget[name[prop]] in starred[name[properties]] begin[:]
if <ast.UnaryOp object at 0x7da18f58fe50> begin[:]
continue
variable[val] assign[=] call[name[getattr], parameter[name[obj], name[prop]]]
<ast.Try object at 0x7da18f58d7e0>
return[name[res]] | keyword[def] identifier[jsonify_r] ( identifier[obj] ):
literal[string]
identifier[res] ={}
identifier[cls] = identifier[obj] . identifier[__class__]
keyword[if] keyword[not] identifier[hasattr] ( identifier[cls] , literal[string] ):
keyword[try] :
identifier[json] . identifier[dumps] ( identifier[obj] )
keyword[return] identifier[obj]
keyword[except] identifier[TypeError] :
keyword[return] keyword[None]
identifier[properties] = identifier[list] ( identifier[cls] . identifier[properties] . identifier[keys] ())
keyword[if] identifier[hasattr] ( identifier[cls] , literal[string] ):
identifier[properties] += identifier[list] ( identifier[cls] . identifier[running_properties] . identifier[keys] ())
keyword[for] identifier[prop] keyword[in] identifier[properties] :
keyword[if] keyword[not] identifier[hasattr] ( identifier[obj] , identifier[prop] ):
keyword[continue]
identifier[val] = identifier[getattr] ( identifier[obj] , identifier[prop] )
keyword[try] :
keyword[if] identifier[isinstance] ( identifier[val] , identifier[set] ):
identifier[val] = identifier[list] ( identifier[val] )
keyword[if] identifier[isinstance] ( identifier[val] , identifier[list] ):
identifier[val] = identifier[sorted] ( identifier[val] )
identifier[json] . identifier[dumps] ( identifier[val] )
identifier[res] [ identifier[prop] ]= identifier[val]
keyword[except] identifier[TypeError] :
keyword[if] identifier[isinstance] ( identifier[val] , identifier[list] ):
identifier[lst] =[]
keyword[for] identifier[subval] keyword[in] identifier[val] :
identifier[o_type] = identifier[getattr] ( identifier[subval] . identifier[__class__] , literal[string] , literal[string] )
keyword[if] identifier[o_type] == literal[string] :
keyword[try] :
identifier[lst] . identifier[append] ( identifier[subval] . identifier[call] )
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[continue]
keyword[if] identifier[o_type] keyword[and] identifier[hasattr] ( identifier[subval] , identifier[o_type] + literal[string] ):
identifier[lst] . identifier[append] ( identifier[getattr] ( identifier[subval] , identifier[o_type] + literal[string] ))
keyword[else] :
keyword[pass]
identifier[res] [ identifier[prop] ]= identifier[lst]
keyword[else] :
identifier[o_type] = identifier[getattr] ( identifier[val] . identifier[__class__] , literal[string] , literal[string] )
keyword[if] identifier[o_type] == literal[string] :
keyword[try] :
identifier[res] [ identifier[prop] ]= identifier[val] . identifier[call]
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[continue]
keyword[if] identifier[o_type] keyword[and] identifier[hasattr] ( identifier[val] , identifier[o_type] + literal[string] ):
identifier[res] [ identifier[prop] ]= identifier[getattr] ( identifier[val] , identifier[o_type] + literal[string] )
keyword[return] identifier[res] | def jsonify_r(obj): # pragma: no cover, not for unit tests...
# pylint: disable=too-many-branches
'Convert an object into json (recursively on attribute)\n\n :param obj: obj to jsonify\n :type obj: object\n :return: json representation of obj\n :rtype: dict\n '
res = {}
cls = obj.__class__
if not hasattr(cls, 'properties'):
try:
json.dumps(obj)
return obj # depends on [control=['try'], data=[]]
except TypeError:
return None # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
properties = list(cls.properties.keys())
if hasattr(cls, 'running_properties'):
properties += list(cls.running_properties.keys()) # depends on [control=['if'], data=[]]
for prop in properties:
if not hasattr(obj, prop):
continue # depends on [control=['if'], data=[]]
val = getattr(obj, prop)
# Maybe the property is not jsonable
try:
if isinstance(val, set):
val = list(val) # depends on [control=['if'], data=[]]
if isinstance(val, list):
val = sorted(val) # depends on [control=['if'], data=[]]
json.dumps(val)
res[prop] = val # depends on [control=['try'], data=[]]
except TypeError:
if isinstance(val, list):
lst = []
for subval in val:
o_type = getattr(subval.__class__, 'my_type', '')
if o_type == 'CommandCall':
try:
lst.append(subval.call) # depends on [control=['try'], data=[]]
except AttributeError: # pragma: no cover, should not happen...
pass # depends on [control=['except'], data=[]]
continue # depends on [control=['if'], data=[]]
if o_type and hasattr(subval, o_type + '_name'):
lst.append(getattr(subval, o_type + '_name')) # depends on [control=['if'], data=[]]
else:
pass # depends on [control=['for'], data=['subval']]
res[prop] = lst # depends on [control=['if'], data=[]]
else:
o_type = getattr(val.__class__, 'my_type', '')
if o_type == 'CommandCall':
try:
res[prop] = val.call # depends on [control=['try'], data=[]]
except AttributeError: # pragma: no cover, should not happen...
pass # depends on [control=['except'], data=[]]
continue # depends on [control=['if'], data=[]]
if o_type and hasattr(val, o_type + '_name'):
res[prop] = getattr(val, o_type + '_name') # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['prop']]
return res |
def plotting_class(cls, obj):
"""
Given an object or Element class, return the suitable plotting
class needed to render it with the current renderer.
"""
if isinstance(obj, AdjointLayout) or obj is AdjointLayout:
obj = Layout
if isinstance(obj, type):
element_type = obj
else:
element_type = obj.type if isinstance(obj, HoloMap) else type(obj)
try:
plotclass = Store.registry[cls.backend][element_type]
except KeyError:
raise SkipRendering("No plotting class for {0} "
"found".format(element_type.__name__))
return plotclass | def function[plotting_class, parameter[cls, obj]]:
constant[
Given an object or Element class, return the suitable plotting
class needed to render it with the current renderer.
]
if <ast.BoolOp object at 0x7da20e9602e0> begin[:]
variable[obj] assign[=] name[Layout]
if call[name[isinstance], parameter[name[obj], name[type]]] begin[:]
variable[element_type] assign[=] name[obj]
<ast.Try object at 0x7da20c6a9d80>
return[name[plotclass]] | keyword[def] identifier[plotting_class] ( identifier[cls] , identifier[obj] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[AdjointLayout] ) keyword[or] identifier[obj] keyword[is] identifier[AdjointLayout] :
identifier[obj] = identifier[Layout]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[type] ):
identifier[element_type] = identifier[obj]
keyword[else] :
identifier[element_type] = identifier[obj] . identifier[type] keyword[if] identifier[isinstance] ( identifier[obj] , identifier[HoloMap] ) keyword[else] identifier[type] ( identifier[obj] )
keyword[try] :
identifier[plotclass] = identifier[Store] . identifier[registry] [ identifier[cls] . identifier[backend] ][ identifier[element_type] ]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[SkipRendering] ( literal[string]
literal[string] . identifier[format] ( identifier[element_type] . identifier[__name__] ))
keyword[return] identifier[plotclass] | def plotting_class(cls, obj):
"""
Given an object or Element class, return the suitable plotting
class needed to render it with the current renderer.
"""
if isinstance(obj, AdjointLayout) or obj is AdjointLayout:
obj = Layout # depends on [control=['if'], data=[]]
if isinstance(obj, type):
element_type = obj # depends on [control=['if'], data=[]]
else:
element_type = obj.type if isinstance(obj, HoloMap) else type(obj)
try:
plotclass = Store.registry[cls.backend][element_type] # depends on [control=['try'], data=[]]
except KeyError:
raise SkipRendering('No plotting class for {0} found'.format(element_type.__name__)) # depends on [control=['except'], data=[]]
return plotclass |
def get_cpu_props(cls, family, arch='x86'):
"""
Get CPU info XML
Args:
family(str): CPU family
arch(str): CPU arch
Returns:
lxml.etree.Element: CPU xml
Raises:
:exc:`~LagoException`: If no such CPU family exists
"""
cpus = cls.get_cpus_by_arch(arch)
try:
return cpus.xpath('model[@name="{0}"]'.format(family))[0]
except IndexError:
raise LagoException('No such CPU family: {0}'.format(family)) | def function[get_cpu_props, parameter[cls, family, arch]]:
constant[
Get CPU info XML
Args:
family(str): CPU family
arch(str): CPU arch
Returns:
lxml.etree.Element: CPU xml
Raises:
:exc:`~LagoException`: If no such CPU family exists
]
variable[cpus] assign[=] call[name[cls].get_cpus_by_arch, parameter[name[arch]]]
<ast.Try object at 0x7da2041d88e0> | keyword[def] identifier[get_cpu_props] ( identifier[cls] , identifier[family] , identifier[arch] = literal[string] ):
literal[string]
identifier[cpus] = identifier[cls] . identifier[get_cpus_by_arch] ( identifier[arch] )
keyword[try] :
keyword[return] identifier[cpus] . identifier[xpath] ( literal[string] . identifier[format] ( identifier[family] ))[ literal[int] ]
keyword[except] identifier[IndexError] :
keyword[raise] identifier[LagoException] ( literal[string] . identifier[format] ( identifier[family] )) | def get_cpu_props(cls, family, arch='x86'):
"""
Get CPU info XML
Args:
family(str): CPU family
arch(str): CPU arch
Returns:
lxml.etree.Element: CPU xml
Raises:
:exc:`~LagoException`: If no such CPU family exists
"""
cpus = cls.get_cpus_by_arch(arch)
try:
return cpus.xpath('model[@name="{0}"]'.format(family))[0] # depends on [control=['try'], data=[]]
except IndexError:
raise LagoException('No such CPU family: {0}'.format(family)) # depends on [control=['except'], data=[]] |
def _show_help(self, txt,
mode_to_set=MAIN_HELP_MODE,
caption=' Help ',
prompt=' Press any key to hide ',
too_small_msg='Window too small to show message',
is_message=False):
""" Display a help, info or question window. """
self.helpWinContainer = None
self.helpWin = None
self.operation_mode = mode_to_set
txt_col = curses.color_pair(5)
box_col = curses.color_pair(3)
caption_col = curses.color_pair(4)
lines = txt.split('\n')
st_lines = [item.replace('\r','') for item in lines]
lines = [item.strip() for item in st_lines]
inner_height = len(lines) + 2
inner_width = self._get_message_width_from_list(lines) + 4
outer_height = inner_height + 2
outer_width = inner_width + 2
if self.window_mode == CONFIG_MODE and \
self.operation_mode > CONFIG_HELP_MODE:
use_empty_win = True
height_to_use = outer_height
width_to_use = outer_width
else:
use_empty_win = False
height_to_use = inner_height
width_to_use = inner_width
if self.maxY - 2 < outer_height or self.maxX < outer_width:
txt = too_small_msg
inner_height = 3
inner_width = len(txt) + 4
if use_empty_win:
height_to_use = inner_height +2
width_to_use = inner_width + 2
else:
height_to_use = inner_height
width_to_use = inner_width
if self.maxX < width_to_use:
if logger.isEnabledFor(logging.DEBUG):
logger.debug(' *** Window too small even to show help warning ***')
self.operation_mode = self.window_mode = NORMAL_MODE
return
lines = [ txt , ]
if use_empty_win:
self.helpWinContainer = curses.newwin(height_to_use,width_to_use,int((self.maxY-height_to_use)/2),int((self.maxX-width_to_use)/2))
self.helpWinContainer.bkgdset(' ', box_col)
self.helpWinContainer.erase()
self.helpWin = curses.newwin(inner_height,inner_width,int((self.maxY-inner_height)/2),int((self.maxX-inner_width)/2))
self.helpWin.bkgdset(' ', box_col)
self.helpWin.erase()
self.helpWin.box()
if is_message:
start_with = txt_col
follow = caption_col
else:
start_with = caption_col
follow = txt_col
if caption.strip():
self.helpWin.addstr(0, int((inner_width-len(caption))/2), caption, caption_col)
splited = []
for i, n in enumerate(lines):
a_line = self._replace_starting_undesscore(n)
if a_line.startswith('%'):
self.helpWin.move(i + 1, 0)
try:
self.helpWin.addstr('├', curses.color_pair(3))
self.helpWin.addstr('─' * (inner_width - 2), curses.color_pair(3))
self.helpWin.addstr('┤', curses.color_pair(3))
except:
self.helpWin.addstr('├'.encode('utf-8'), curses.color_pair(3))
self.helpWin.addstr('─'.encode('utf-8') * (inner_width - 2), curses.color_pair(3))
self.helpWin.addstr('┤'.encode('utf-8'), curses.color_pair(3))
self.helpWin.addstr(i + 1, inner_width-len(a_line[1:]) - 1, a_line[1:].replace('_', ' '), caption_col)
#self.helpWin.addstr(i + 1, int((inner_width-len(a_line[1:]))/2), a_line[1:].replace('_', ' '), caption_col)
else:
splited = a_line.split('|')
self.helpWin.move(i + 1, 2)
for part, part_string in enumerate(splited):
if part_string.strip():
if part == 0 or part % 2 == 0:
self.helpWin.addstr(splited[part], start_with)
else:
self.helpWin.addstr(splited[part], follow)
if prompt.strip():
self.helpWin.addstr(inner_height - 1, int(inner_width-len(prompt)-1), prompt)
if use_empty_win:
self.helpWinContainer.refresh()
self.helpWin.refresh() | def function[_show_help, parameter[self, txt, mode_to_set, caption, prompt, too_small_msg, is_message]]:
constant[ Display a help, info or question window. ]
name[self].helpWinContainer assign[=] constant[None]
name[self].helpWin assign[=] constant[None]
name[self].operation_mode assign[=] name[mode_to_set]
variable[txt_col] assign[=] call[name[curses].color_pair, parameter[constant[5]]]
variable[box_col] assign[=] call[name[curses].color_pair, parameter[constant[3]]]
variable[caption_col] assign[=] call[name[curses].color_pair, parameter[constant[4]]]
variable[lines] assign[=] call[name[txt].split, parameter[constant[
]]]
variable[st_lines] assign[=] <ast.ListComp object at 0x7da1b1152680>
variable[lines] assign[=] <ast.ListComp object at 0x7da1b11518d0>
variable[inner_height] assign[=] binary_operation[call[name[len], parameter[name[lines]]] + constant[2]]
variable[inner_width] assign[=] binary_operation[call[name[self]._get_message_width_from_list, parameter[name[lines]]] + constant[4]]
variable[outer_height] assign[=] binary_operation[name[inner_height] + constant[2]]
variable[outer_width] assign[=] binary_operation[name[inner_width] + constant[2]]
if <ast.BoolOp object at 0x7da1b1152ad0> begin[:]
variable[use_empty_win] assign[=] constant[True]
variable[height_to_use] assign[=] name[outer_height]
variable[width_to_use] assign[=] name[outer_width]
if <ast.BoolOp object at 0x7da1b11a9060> begin[:]
variable[txt] assign[=] name[too_small_msg]
variable[inner_height] assign[=] constant[3]
variable[inner_width] assign[=] binary_operation[call[name[len], parameter[name[txt]]] + constant[4]]
if name[use_empty_win] begin[:]
variable[height_to_use] assign[=] binary_operation[name[inner_height] + constant[2]]
variable[width_to_use] assign[=] binary_operation[name[inner_width] + constant[2]]
if compare[name[self].maxX less[<] name[width_to_use]] begin[:]
if call[name[logger].isEnabledFor, parameter[name[logging].DEBUG]] begin[:]
call[name[logger].debug, parameter[constant[ *** Window too small even to show help warning ***]]]
name[self].operation_mode assign[=] name[NORMAL_MODE]
return[None]
variable[lines] assign[=] list[[<ast.Name object at 0x7da1b101b520>]]
if name[use_empty_win] begin[:]
name[self].helpWinContainer assign[=] call[name[curses].newwin, parameter[name[height_to_use], name[width_to_use], call[name[int], parameter[binary_operation[binary_operation[name[self].maxY - name[height_to_use]] / constant[2]]]], call[name[int], parameter[binary_operation[binary_operation[name[self].maxX - name[width_to_use]] / constant[2]]]]]]
call[name[self].helpWinContainer.bkgdset, parameter[constant[ ], name[box_col]]]
call[name[self].helpWinContainer.erase, parameter[]]
name[self].helpWin assign[=] call[name[curses].newwin, parameter[name[inner_height], name[inner_width], call[name[int], parameter[binary_operation[binary_operation[name[self].maxY - name[inner_height]] / constant[2]]]], call[name[int], parameter[binary_operation[binary_operation[name[self].maxX - name[inner_width]] / constant[2]]]]]]
call[name[self].helpWin.bkgdset, parameter[constant[ ], name[box_col]]]
call[name[self].helpWin.erase, parameter[]]
call[name[self].helpWin.box, parameter[]]
if name[is_message] begin[:]
variable[start_with] assign[=] name[txt_col]
variable[follow] assign[=] name[caption_col]
if call[name[caption].strip, parameter[]] begin[:]
call[name[self].helpWin.addstr, parameter[constant[0], call[name[int], parameter[binary_operation[binary_operation[name[inner_width] - call[name[len], parameter[name[caption]]]] / constant[2]]]], name[caption], name[caption_col]]]
variable[splited] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b101b8b0>, <ast.Name object at 0x7da1b1019720>]]] in starred[call[name[enumerate], parameter[name[lines]]]] begin[:]
variable[a_line] assign[=] call[name[self]._replace_starting_undesscore, parameter[name[n]]]
if call[name[a_line].startswith, parameter[constant[%]]] begin[:]
call[name[self].helpWin.move, parameter[binary_operation[name[i] + constant[1]], constant[0]]]
<ast.Try object at 0x7da1b1004790>
call[name[self].helpWin.addstr, parameter[binary_operation[name[i] + constant[1]], binary_operation[binary_operation[name[inner_width] - call[name[len], parameter[call[name[a_line]][<ast.Slice object at 0x7da1b1005f60>]]]] - constant[1]], call[call[name[a_line]][<ast.Slice object at 0x7da1b1004fa0>].replace, parameter[constant[_], constant[ ]]], name[caption_col]]]
if call[name[prompt].strip, parameter[]] begin[:]
call[name[self].helpWin.addstr, parameter[binary_operation[name[inner_height] - constant[1]], call[name[int], parameter[binary_operation[binary_operation[name[inner_width] - call[name[len], parameter[name[prompt]]]] - constant[1]]]], name[prompt]]]
if name[use_empty_win] begin[:]
call[name[self].helpWinContainer.refresh, parameter[]]
call[name[self].helpWin.refresh, parameter[]] | keyword[def] identifier[_show_help] ( identifier[self] , identifier[txt] ,
identifier[mode_to_set] = identifier[MAIN_HELP_MODE] ,
identifier[caption] = literal[string] ,
identifier[prompt] = literal[string] ,
identifier[too_small_msg] = literal[string] ,
identifier[is_message] = keyword[False] ):
literal[string]
identifier[self] . identifier[helpWinContainer] = keyword[None]
identifier[self] . identifier[helpWin] = keyword[None]
identifier[self] . identifier[operation_mode] = identifier[mode_to_set]
identifier[txt_col] = identifier[curses] . identifier[color_pair] ( literal[int] )
identifier[box_col] = identifier[curses] . identifier[color_pair] ( literal[int] )
identifier[caption_col] = identifier[curses] . identifier[color_pair] ( literal[int] )
identifier[lines] = identifier[txt] . identifier[split] ( literal[string] )
identifier[st_lines] =[ identifier[item] . identifier[replace] ( literal[string] , literal[string] ) keyword[for] identifier[item] keyword[in] identifier[lines] ]
identifier[lines] =[ identifier[item] . identifier[strip] () keyword[for] identifier[item] keyword[in] identifier[st_lines] ]
identifier[inner_height] = identifier[len] ( identifier[lines] )+ literal[int]
identifier[inner_width] = identifier[self] . identifier[_get_message_width_from_list] ( identifier[lines] )+ literal[int]
identifier[outer_height] = identifier[inner_height] + literal[int]
identifier[outer_width] = identifier[inner_width] + literal[int]
keyword[if] identifier[self] . identifier[window_mode] == identifier[CONFIG_MODE] keyword[and] identifier[self] . identifier[operation_mode] > identifier[CONFIG_HELP_MODE] :
identifier[use_empty_win] = keyword[True]
identifier[height_to_use] = identifier[outer_height]
identifier[width_to_use] = identifier[outer_width]
keyword[else] :
identifier[use_empty_win] = keyword[False]
identifier[height_to_use] = identifier[inner_height]
identifier[width_to_use] = identifier[inner_width]
keyword[if] identifier[self] . identifier[maxY] - literal[int] < identifier[outer_height] keyword[or] identifier[self] . identifier[maxX] < identifier[outer_width] :
identifier[txt] = identifier[too_small_msg]
identifier[inner_height] = literal[int]
identifier[inner_width] = identifier[len] ( identifier[txt] )+ literal[int]
keyword[if] identifier[use_empty_win] :
identifier[height_to_use] = identifier[inner_height] + literal[int]
identifier[width_to_use] = identifier[inner_width] + literal[int]
keyword[else] :
identifier[height_to_use] = identifier[inner_height]
identifier[width_to_use] = identifier[inner_width]
keyword[if] identifier[self] . identifier[maxX] < identifier[width_to_use] :
keyword[if] identifier[logger] . identifier[isEnabledFor] ( identifier[logging] . identifier[DEBUG] ):
identifier[logger] . identifier[debug] ( literal[string] )
identifier[self] . identifier[operation_mode] = identifier[self] . identifier[window_mode] = identifier[NORMAL_MODE]
keyword[return]
identifier[lines] =[ identifier[txt] ,]
keyword[if] identifier[use_empty_win] :
identifier[self] . identifier[helpWinContainer] = identifier[curses] . identifier[newwin] ( identifier[height_to_use] , identifier[width_to_use] , identifier[int] (( identifier[self] . identifier[maxY] - identifier[height_to_use] )/ literal[int] ), identifier[int] (( identifier[self] . identifier[maxX] - identifier[width_to_use] )/ literal[int] ))
identifier[self] . identifier[helpWinContainer] . identifier[bkgdset] ( literal[string] , identifier[box_col] )
identifier[self] . identifier[helpWinContainer] . identifier[erase] ()
identifier[self] . identifier[helpWin] = identifier[curses] . identifier[newwin] ( identifier[inner_height] , identifier[inner_width] , identifier[int] (( identifier[self] . identifier[maxY] - identifier[inner_height] )/ literal[int] ), identifier[int] (( identifier[self] . identifier[maxX] - identifier[inner_width] )/ literal[int] ))
identifier[self] . identifier[helpWin] . identifier[bkgdset] ( literal[string] , identifier[box_col] )
identifier[self] . identifier[helpWin] . identifier[erase] ()
identifier[self] . identifier[helpWin] . identifier[box] ()
keyword[if] identifier[is_message] :
identifier[start_with] = identifier[txt_col]
identifier[follow] = identifier[caption_col]
keyword[else] :
identifier[start_with] = identifier[caption_col]
identifier[follow] = identifier[txt_col]
keyword[if] identifier[caption] . identifier[strip] ():
identifier[self] . identifier[helpWin] . identifier[addstr] ( literal[int] , identifier[int] (( identifier[inner_width] - identifier[len] ( identifier[caption] ))/ literal[int] ), identifier[caption] , identifier[caption_col] )
identifier[splited] =[]
keyword[for] identifier[i] , identifier[n] keyword[in] identifier[enumerate] ( identifier[lines] ):
identifier[a_line] = identifier[self] . identifier[_replace_starting_undesscore] ( identifier[n] )
keyword[if] identifier[a_line] . identifier[startswith] ( literal[string] ):
identifier[self] . identifier[helpWin] . identifier[move] ( identifier[i] + literal[int] , literal[int] )
keyword[try] :
identifier[self] . identifier[helpWin] . identifier[addstr] ( literal[string] , identifier[curses] . identifier[color_pair] ( literal[int] ))
identifier[self] . identifier[helpWin] . identifier[addstr] ( literal[string] *( identifier[inner_width] - literal[int] ), identifier[curses] . identifier[color_pair] ( literal[int] ))
identifier[self] . identifier[helpWin] . identifier[addstr] ( literal[string] , identifier[curses] . identifier[color_pair] ( literal[int] ))
keyword[except] :
identifier[self] . identifier[helpWin] . identifier[addstr] ( literal[string] . identifier[encode] ( literal[string] ), identifier[curses] . identifier[color_pair] ( literal[int] ))
identifier[self] . identifier[helpWin] . identifier[addstr] ( literal[string] . identifier[encode] ( literal[string] )*( identifier[inner_width] - literal[int] ), identifier[curses] . identifier[color_pair] ( literal[int] ))
identifier[self] . identifier[helpWin] . identifier[addstr] ( literal[string] . identifier[encode] ( literal[string] ), identifier[curses] . identifier[color_pair] ( literal[int] ))
identifier[self] . identifier[helpWin] . identifier[addstr] ( identifier[i] + literal[int] , identifier[inner_width] - identifier[len] ( identifier[a_line] [ literal[int] :])- literal[int] , identifier[a_line] [ literal[int] :]. identifier[replace] ( literal[string] , literal[string] ), identifier[caption_col] )
keyword[else] :
identifier[splited] = identifier[a_line] . identifier[split] ( literal[string] )
identifier[self] . identifier[helpWin] . identifier[move] ( identifier[i] + literal[int] , literal[int] )
keyword[for] identifier[part] , identifier[part_string] keyword[in] identifier[enumerate] ( identifier[splited] ):
keyword[if] identifier[part_string] . identifier[strip] ():
keyword[if] identifier[part] == literal[int] keyword[or] identifier[part] % literal[int] == literal[int] :
identifier[self] . identifier[helpWin] . identifier[addstr] ( identifier[splited] [ identifier[part] ], identifier[start_with] )
keyword[else] :
identifier[self] . identifier[helpWin] . identifier[addstr] ( identifier[splited] [ identifier[part] ], identifier[follow] )
keyword[if] identifier[prompt] . identifier[strip] ():
identifier[self] . identifier[helpWin] . identifier[addstr] ( identifier[inner_height] - literal[int] , identifier[int] ( identifier[inner_width] - identifier[len] ( identifier[prompt] )- literal[int] ), identifier[prompt] )
keyword[if] identifier[use_empty_win] :
identifier[self] . identifier[helpWinContainer] . identifier[refresh] ()
identifier[self] . identifier[helpWin] . identifier[refresh] () | def _show_help(self, txt, mode_to_set=MAIN_HELP_MODE, caption=' Help ', prompt=' Press any key to hide ', too_small_msg='Window too small to show message', is_message=False):
""" Display a help, info or question window. """
self.helpWinContainer = None
self.helpWin = None
self.operation_mode = mode_to_set
txt_col = curses.color_pair(5)
box_col = curses.color_pair(3)
caption_col = curses.color_pair(4)
lines = txt.split('\n')
st_lines = [item.replace('\r', '') for item in lines]
lines = [item.strip() for item in st_lines]
inner_height = len(lines) + 2
inner_width = self._get_message_width_from_list(lines) + 4
outer_height = inner_height + 2
outer_width = inner_width + 2
if self.window_mode == CONFIG_MODE and self.operation_mode > CONFIG_HELP_MODE:
use_empty_win = True
height_to_use = outer_height
width_to_use = outer_width # depends on [control=['if'], data=[]]
else:
use_empty_win = False
height_to_use = inner_height
width_to_use = inner_width
if self.maxY - 2 < outer_height or self.maxX < outer_width:
txt = too_small_msg
inner_height = 3
inner_width = len(txt) + 4
if use_empty_win:
height_to_use = inner_height + 2
width_to_use = inner_width + 2 # depends on [control=['if'], data=[]]
else:
height_to_use = inner_height
width_to_use = inner_width
if self.maxX < width_to_use:
if logger.isEnabledFor(logging.DEBUG):
logger.debug(' *** Window too small even to show help warning ***') # depends on [control=['if'], data=[]]
self.operation_mode = self.window_mode = NORMAL_MODE
return # depends on [control=['if'], data=[]]
lines = [txt] # depends on [control=['if'], data=[]]
if use_empty_win:
self.helpWinContainer = curses.newwin(height_to_use, width_to_use, int((self.maxY - height_to_use) / 2), int((self.maxX - width_to_use) / 2))
self.helpWinContainer.bkgdset(' ', box_col)
self.helpWinContainer.erase() # depends on [control=['if'], data=[]]
self.helpWin = curses.newwin(inner_height, inner_width, int((self.maxY - inner_height) / 2), int((self.maxX - inner_width) / 2))
self.helpWin.bkgdset(' ', box_col)
self.helpWin.erase()
self.helpWin.box()
if is_message:
start_with = txt_col
follow = caption_col # depends on [control=['if'], data=[]]
else:
start_with = caption_col
follow = txt_col
if caption.strip():
self.helpWin.addstr(0, int((inner_width - len(caption)) / 2), caption, caption_col) # depends on [control=['if'], data=[]]
splited = []
for (i, n) in enumerate(lines):
a_line = self._replace_starting_undesscore(n)
if a_line.startswith('%'):
self.helpWin.move(i + 1, 0)
try:
self.helpWin.addstr('├', curses.color_pair(3))
self.helpWin.addstr('─' * (inner_width - 2), curses.color_pair(3))
self.helpWin.addstr('┤', curses.color_pair(3)) # depends on [control=['try'], data=[]]
except:
self.helpWin.addstr('├'.encode('utf-8'), curses.color_pair(3))
self.helpWin.addstr('─'.encode('utf-8') * (inner_width - 2), curses.color_pair(3))
self.helpWin.addstr('┤'.encode('utf-8'), curses.color_pair(3)) # depends on [control=['except'], data=[]]
self.helpWin.addstr(i + 1, inner_width - len(a_line[1:]) - 1, a_line[1:].replace('_', ' '), caption_col) # depends on [control=['if'], data=[]]
else:
#self.helpWin.addstr(i + 1, int((inner_width-len(a_line[1:]))/2), a_line[1:].replace('_', ' '), caption_col)
splited = a_line.split('|')
self.helpWin.move(i + 1, 2)
for (part, part_string) in enumerate(splited):
if part_string.strip():
if part == 0 or part % 2 == 0:
self.helpWin.addstr(splited[part], start_with) # depends on [control=['if'], data=[]]
else:
self.helpWin.addstr(splited[part], follow) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
if prompt.strip():
self.helpWin.addstr(inner_height - 1, int(inner_width - len(prompt) - 1), prompt) # depends on [control=['if'], data=[]]
if use_empty_win:
self.helpWinContainer.refresh() # depends on [control=['if'], data=[]]
self.helpWin.refresh() |
def is_forced_retry(self, method, status_code):
""" Is this method/status code retryable? (Based on method/codes whitelists)
"""
if self.method_whitelist and method.upper() not in self.method_whitelist:
return False
return self.status_forcelist and status_code in self.status_forcelist | def function[is_forced_retry, parameter[self, method, status_code]]:
constant[ Is this method/status code retryable? (Based on method/codes whitelists)
]
if <ast.BoolOp object at 0x7da1b11ec340> begin[:]
return[constant[False]]
return[<ast.BoolOp object at 0x7da1b11ed9f0>] | keyword[def] identifier[is_forced_retry] ( identifier[self] , identifier[method] , identifier[status_code] ):
literal[string]
keyword[if] identifier[self] . identifier[method_whitelist] keyword[and] identifier[method] . identifier[upper] () keyword[not] keyword[in] identifier[self] . identifier[method_whitelist] :
keyword[return] keyword[False]
keyword[return] identifier[self] . identifier[status_forcelist] keyword[and] identifier[status_code] keyword[in] identifier[self] . identifier[status_forcelist] | def is_forced_retry(self, method, status_code):
""" Is this method/status code retryable? (Based on method/codes whitelists)
"""
if self.method_whitelist and method.upper() not in self.method_whitelist:
return False # depends on [control=['if'], data=[]]
return self.status_forcelist and status_code in self.status_forcelist |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.