code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def saveCertPem(self, cert, path):
'''
Save a certificate in PEM format to a file outside the certdir.
'''
with s_common.genfile(path) as fd:
fd.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert)) | def function[saveCertPem, parameter[self, cert, path]]:
constant[
Save a certificate in PEM format to a file outside the certdir.
]
with call[name[s_common].genfile, parameter[name[path]]] begin[:]
call[name[fd].write, parameter[call[name[crypto].dump_certificate, parameter[name[crypto].FILETYPE_PEM, name[cert]]]]] | keyword[def] identifier[saveCertPem] ( identifier[self] , identifier[cert] , identifier[path] ):
literal[string]
keyword[with] identifier[s_common] . identifier[genfile] ( identifier[path] ) keyword[as] identifier[fd] :
identifier[fd] . identifier[write] ( identifier[crypto] . identifier[dump_certificate] ( identifier[crypto] . identifier[FILETYPE_PEM] , identifier[cert] )) | def saveCertPem(self, cert, path):
"""
Save a certificate in PEM format to a file outside the certdir.
"""
with s_common.genfile(path) as fd:
fd.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert)) # depends on [control=['with'], data=['fd']] |
def DbGetHostList(self, argin):
""" Get host list with name matching the specified filter
:param argin: The filter
:type: tango.DevString
:return: Host name list
:rtype: tango.DevVarStringArray """
self._log.debug("In DbGetHostList()")
argin = replace_wildcard(argin)
return self.db.get_host_list(argin) | def function[DbGetHostList, parameter[self, argin]]:
constant[ Get host list with name matching the specified filter
:param argin: The filter
:type: tango.DevString
:return: Host name list
:rtype: tango.DevVarStringArray ]
call[name[self]._log.debug, parameter[constant[In DbGetHostList()]]]
variable[argin] assign[=] call[name[replace_wildcard], parameter[name[argin]]]
return[call[name[self].db.get_host_list, parameter[name[argin]]]] | keyword[def] identifier[DbGetHostList] ( identifier[self] , identifier[argin] ):
literal[string]
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] )
identifier[argin] = identifier[replace_wildcard] ( identifier[argin] )
keyword[return] identifier[self] . identifier[db] . identifier[get_host_list] ( identifier[argin] ) | def DbGetHostList(self, argin):
""" Get host list with name matching the specified filter
:param argin: The filter
:type: tango.DevString
:return: Host name list
:rtype: tango.DevVarStringArray """
self._log.debug('In DbGetHostList()')
argin = replace_wildcard(argin)
return self.db.get_host_list(argin) |
def get_nameserver_detail_output_show_nameserver_nameserver_real(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_nameserver_detail = ET.Element("get_nameserver_detail")
config = get_nameserver_detail
output = ET.SubElement(get_nameserver_detail, "output")
show_nameserver = ET.SubElement(output, "show-nameserver")
nameserver_portid_key = ET.SubElement(show_nameserver, "nameserver-portid")
nameserver_portid_key.text = kwargs.pop('nameserver_portid')
nameserver_real = ET.SubElement(show_nameserver, "nameserver-real")
nameserver_real.text = kwargs.pop('nameserver_real')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[get_nameserver_detail_output_show_nameserver_nameserver_real, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[get_nameserver_detail] assign[=] call[name[ET].Element, parameter[constant[get_nameserver_detail]]]
variable[config] assign[=] name[get_nameserver_detail]
variable[output] assign[=] call[name[ET].SubElement, parameter[name[get_nameserver_detail], constant[output]]]
variable[show_nameserver] assign[=] call[name[ET].SubElement, parameter[name[output], constant[show-nameserver]]]
variable[nameserver_portid_key] assign[=] call[name[ET].SubElement, parameter[name[show_nameserver], constant[nameserver-portid]]]
name[nameserver_portid_key].text assign[=] call[name[kwargs].pop, parameter[constant[nameserver_portid]]]
variable[nameserver_real] assign[=] call[name[ET].SubElement, parameter[name[show_nameserver], constant[nameserver-real]]]
name[nameserver_real].text assign[=] call[name[kwargs].pop, parameter[constant[nameserver_real]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[get_nameserver_detail_output_show_nameserver_nameserver_real] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[get_nameserver_detail] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[config] = identifier[get_nameserver_detail]
identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[get_nameserver_detail] , literal[string] )
identifier[show_nameserver] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] )
identifier[nameserver_portid_key] = identifier[ET] . identifier[SubElement] ( identifier[show_nameserver] , literal[string] )
identifier[nameserver_portid_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[nameserver_real] = identifier[ET] . identifier[SubElement] ( identifier[show_nameserver] , literal[string] )
identifier[nameserver_real] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def get_nameserver_detail_output_show_nameserver_nameserver_real(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
get_nameserver_detail = ET.Element('get_nameserver_detail')
config = get_nameserver_detail
output = ET.SubElement(get_nameserver_detail, 'output')
show_nameserver = ET.SubElement(output, 'show-nameserver')
nameserver_portid_key = ET.SubElement(show_nameserver, 'nameserver-portid')
nameserver_portid_key.text = kwargs.pop('nameserver_portid')
nameserver_real = ET.SubElement(show_nameserver, 'nameserver-real')
nameserver_real.text = kwargs.pop('nameserver_real')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def makeSer(segID, N, CA, C, O, geo):
'''Creates a Serine residue'''
##R-Group
CA_CB_length=geo.CA_CB_length
C_CA_CB_angle=geo.C_CA_CB_angle
N_C_CA_CB_diangle=geo.N_C_CA_CB_diangle
CB_OG_length=geo.CB_OG_length
CA_CB_OG_angle=geo.CA_CB_OG_angle
N_CA_CB_OG_diangle=geo.N_CA_CB_OG_diangle
carbon_b= calculateCoordinates(N, C, CA, CA_CB_length, C_CA_CB_angle, N_C_CA_CB_diangle)
CB= Atom("CB", carbon_b, 0.0 , 1.0, " "," CB", 0,"C")
oxygen_g= calculateCoordinates(N, CA, CB, CB_OG_length, CA_CB_OG_angle, N_CA_CB_OG_diangle)
OG= Atom("OG", oxygen_g, 0.0, 1.0, " ", " OG", 0, "O")
##Create Reside Data Structure
res= Residue((' ', segID, ' '), "SER", ' ')
res.add(N)
res.add(CA)
res.add(C)
res.add(O)
res.add(CB)
res.add(OG)
##print(res)
return res | def function[makeSer, parameter[segID, N, CA, C, O, geo]]:
constant[Creates a Serine residue]
variable[CA_CB_length] assign[=] name[geo].CA_CB_length
variable[C_CA_CB_angle] assign[=] name[geo].C_CA_CB_angle
variable[N_C_CA_CB_diangle] assign[=] name[geo].N_C_CA_CB_diangle
variable[CB_OG_length] assign[=] name[geo].CB_OG_length
variable[CA_CB_OG_angle] assign[=] name[geo].CA_CB_OG_angle
variable[N_CA_CB_OG_diangle] assign[=] name[geo].N_CA_CB_OG_diangle
variable[carbon_b] assign[=] call[name[calculateCoordinates], parameter[name[N], name[C], name[CA], name[CA_CB_length], name[C_CA_CB_angle], name[N_C_CA_CB_diangle]]]
variable[CB] assign[=] call[name[Atom], parameter[constant[CB], name[carbon_b], constant[0.0], constant[1.0], constant[ ], constant[ CB], constant[0], constant[C]]]
variable[oxygen_g] assign[=] call[name[calculateCoordinates], parameter[name[N], name[CA], name[CB], name[CB_OG_length], name[CA_CB_OG_angle], name[N_CA_CB_OG_diangle]]]
variable[OG] assign[=] call[name[Atom], parameter[constant[OG], name[oxygen_g], constant[0.0], constant[1.0], constant[ ], constant[ OG], constant[0], constant[O]]]
variable[res] assign[=] call[name[Residue], parameter[tuple[[<ast.Constant object at 0x7da207f9b4f0>, <ast.Name object at 0x7da207f99cf0>, <ast.Constant object at 0x7da207f99d20>]], constant[SER], constant[ ]]]
call[name[res].add, parameter[name[N]]]
call[name[res].add, parameter[name[CA]]]
call[name[res].add, parameter[name[C]]]
call[name[res].add, parameter[name[O]]]
call[name[res].add, parameter[name[CB]]]
call[name[res].add, parameter[name[OG]]]
return[name[res]] | keyword[def] identifier[makeSer] ( identifier[segID] , identifier[N] , identifier[CA] , identifier[C] , identifier[O] , identifier[geo] ):
literal[string]
identifier[CA_CB_length] = identifier[geo] . identifier[CA_CB_length]
identifier[C_CA_CB_angle] = identifier[geo] . identifier[C_CA_CB_angle]
identifier[N_C_CA_CB_diangle] = identifier[geo] . identifier[N_C_CA_CB_diangle]
identifier[CB_OG_length] = identifier[geo] . identifier[CB_OG_length]
identifier[CA_CB_OG_angle] = identifier[geo] . identifier[CA_CB_OG_angle]
identifier[N_CA_CB_OG_diangle] = identifier[geo] . identifier[N_CA_CB_OG_diangle]
identifier[carbon_b] = identifier[calculateCoordinates] ( identifier[N] , identifier[C] , identifier[CA] , identifier[CA_CB_length] , identifier[C_CA_CB_angle] , identifier[N_C_CA_CB_diangle] )
identifier[CB] = identifier[Atom] ( literal[string] , identifier[carbon_b] , literal[int] , literal[int] , literal[string] , literal[string] , literal[int] , literal[string] )
identifier[oxygen_g] = identifier[calculateCoordinates] ( identifier[N] , identifier[CA] , identifier[CB] , identifier[CB_OG_length] , identifier[CA_CB_OG_angle] , identifier[N_CA_CB_OG_diangle] )
identifier[OG] = identifier[Atom] ( literal[string] , identifier[oxygen_g] , literal[int] , literal[int] , literal[string] , literal[string] , literal[int] , literal[string] )
identifier[res] = identifier[Residue] (( literal[string] , identifier[segID] , literal[string] ), literal[string] , literal[string] )
identifier[res] . identifier[add] ( identifier[N] )
identifier[res] . identifier[add] ( identifier[CA] )
identifier[res] . identifier[add] ( identifier[C] )
identifier[res] . identifier[add] ( identifier[O] )
identifier[res] . identifier[add] ( identifier[CB] )
identifier[res] . identifier[add] ( identifier[OG] )
keyword[return] identifier[res] | def makeSer(segID, N, CA, C, O, geo):
"""Creates a Serine residue"""
##R-Group
CA_CB_length = geo.CA_CB_length
C_CA_CB_angle = geo.C_CA_CB_angle
N_C_CA_CB_diangle = geo.N_C_CA_CB_diangle
CB_OG_length = geo.CB_OG_length
CA_CB_OG_angle = geo.CA_CB_OG_angle
N_CA_CB_OG_diangle = geo.N_CA_CB_OG_diangle
carbon_b = calculateCoordinates(N, C, CA, CA_CB_length, C_CA_CB_angle, N_C_CA_CB_diangle)
CB = Atom('CB', carbon_b, 0.0, 1.0, ' ', ' CB', 0, 'C')
oxygen_g = calculateCoordinates(N, CA, CB, CB_OG_length, CA_CB_OG_angle, N_CA_CB_OG_diangle)
OG = Atom('OG', oxygen_g, 0.0, 1.0, ' ', ' OG', 0, 'O')
##Create Reside Data Structure
res = Residue((' ', segID, ' '), 'SER', ' ')
res.add(N)
res.add(CA)
res.add(C)
res.add(O)
res.add(CB)
res.add(OG)
##print(res)
return res |
def operation_recorder_stage_result(self, ret, exc):
"""
**Experimental:** Low-level method used by the operation-specific
methods of this class.
*New in pywbem 0.9 as experimental.*
It forwards the operation results including exceptions that were
raised, to all recorders of this connection, and causes the forwarded
information to be recorded by all recorders of this connection.
"""
for recorder in self._operation_recorders:
recorder.stage_pywbem_result(ret, exc)
recorder.record_staged() | def function[operation_recorder_stage_result, parameter[self, ret, exc]]:
constant[
**Experimental:** Low-level method used by the operation-specific
methods of this class.
*New in pywbem 0.9 as experimental.*
It forwards the operation results including exceptions that were
raised, to all recorders of this connection, and causes the forwarded
information to be recorded by all recorders of this connection.
]
for taget[name[recorder]] in starred[name[self]._operation_recorders] begin[:]
call[name[recorder].stage_pywbem_result, parameter[name[ret], name[exc]]]
call[name[recorder].record_staged, parameter[]] | keyword[def] identifier[operation_recorder_stage_result] ( identifier[self] , identifier[ret] , identifier[exc] ):
literal[string]
keyword[for] identifier[recorder] keyword[in] identifier[self] . identifier[_operation_recorders] :
identifier[recorder] . identifier[stage_pywbem_result] ( identifier[ret] , identifier[exc] )
identifier[recorder] . identifier[record_staged] () | def operation_recorder_stage_result(self, ret, exc):
"""
**Experimental:** Low-level method used by the operation-specific
methods of this class.
*New in pywbem 0.9 as experimental.*
It forwards the operation results including exceptions that were
raised, to all recorders of this connection, and causes the forwarded
information to be recorded by all recorders of this connection.
"""
for recorder in self._operation_recorders:
recorder.stage_pywbem_result(ret, exc)
recorder.record_staged() # depends on [control=['for'], data=['recorder']] |
def angsep(lon1, lat1, lon2, lat2):
"""
Angular separation (deg) between two sky coordinates.
Borrowed from astropy (www.astropy.org)
Notes
-----
The angular separation is calculated using the Vincenty formula [1],
which is slighly more complex and computationally expensive than
some alternatives, but is stable at at all distances, including the
poles and antipodes.
[1] http://en.wikipedia.org/wiki/Great-circle_distance
"""
lon1,lat1 = np.radians([lon1,lat1])
lon2,lat2 = np.radians([lon2,lat2])
sdlon = np.sin(lon2 - lon1)
cdlon = np.cos(lon2 - lon1)
slat1 = np.sin(lat1)
slat2 = np.sin(lat2)
clat1 = np.cos(lat1)
clat2 = np.cos(lat2)
num1 = clat2 * sdlon
num2 = clat1 * slat2 - slat1 * clat2 * cdlon
denominator = slat1 * slat2 + clat1 * clat2 * cdlon
return np.degrees(np.arctan2(np.hypot(num1,num2), denominator)) | def function[angsep, parameter[lon1, lat1, lon2, lat2]]:
constant[
Angular separation (deg) between two sky coordinates.
Borrowed from astropy (www.astropy.org)
Notes
-----
The angular separation is calculated using the Vincenty formula [1],
which is slighly more complex and computationally expensive than
some alternatives, but is stable at at all distances, including the
poles and antipodes.
[1] http://en.wikipedia.org/wiki/Great-circle_distance
]
<ast.Tuple object at 0x7da18bc70280> assign[=] call[name[np].radians, parameter[list[[<ast.Name object at 0x7da18bc72bc0>, <ast.Name object at 0x7da18bc719c0>]]]]
<ast.Tuple object at 0x7da18bc71c00> assign[=] call[name[np].radians, parameter[list[[<ast.Name object at 0x7da18bc71330>, <ast.Name object at 0x7da18bc71d80>]]]]
variable[sdlon] assign[=] call[name[np].sin, parameter[binary_operation[name[lon2] - name[lon1]]]]
variable[cdlon] assign[=] call[name[np].cos, parameter[binary_operation[name[lon2] - name[lon1]]]]
variable[slat1] assign[=] call[name[np].sin, parameter[name[lat1]]]
variable[slat2] assign[=] call[name[np].sin, parameter[name[lat2]]]
variable[clat1] assign[=] call[name[np].cos, parameter[name[lat1]]]
variable[clat2] assign[=] call[name[np].cos, parameter[name[lat2]]]
variable[num1] assign[=] binary_operation[name[clat2] * name[sdlon]]
variable[num2] assign[=] binary_operation[binary_operation[name[clat1] * name[slat2]] - binary_operation[binary_operation[name[slat1] * name[clat2]] * name[cdlon]]]
variable[denominator] assign[=] binary_operation[binary_operation[name[slat1] * name[slat2]] + binary_operation[binary_operation[name[clat1] * name[clat2]] * name[cdlon]]]
return[call[name[np].degrees, parameter[call[name[np].arctan2, parameter[call[name[np].hypot, parameter[name[num1], name[num2]]], name[denominator]]]]]] | keyword[def] identifier[angsep] ( identifier[lon1] , identifier[lat1] , identifier[lon2] , identifier[lat2] ):
literal[string]
identifier[lon1] , identifier[lat1] = identifier[np] . identifier[radians] ([ identifier[lon1] , identifier[lat1] ])
identifier[lon2] , identifier[lat2] = identifier[np] . identifier[radians] ([ identifier[lon2] , identifier[lat2] ])
identifier[sdlon] = identifier[np] . identifier[sin] ( identifier[lon2] - identifier[lon1] )
identifier[cdlon] = identifier[np] . identifier[cos] ( identifier[lon2] - identifier[lon1] )
identifier[slat1] = identifier[np] . identifier[sin] ( identifier[lat1] )
identifier[slat2] = identifier[np] . identifier[sin] ( identifier[lat2] )
identifier[clat1] = identifier[np] . identifier[cos] ( identifier[lat1] )
identifier[clat2] = identifier[np] . identifier[cos] ( identifier[lat2] )
identifier[num1] = identifier[clat2] * identifier[sdlon]
identifier[num2] = identifier[clat1] * identifier[slat2] - identifier[slat1] * identifier[clat2] * identifier[cdlon]
identifier[denominator] = identifier[slat1] * identifier[slat2] + identifier[clat1] * identifier[clat2] * identifier[cdlon]
keyword[return] identifier[np] . identifier[degrees] ( identifier[np] . identifier[arctan2] ( identifier[np] . identifier[hypot] ( identifier[num1] , identifier[num2] ), identifier[denominator] )) | def angsep(lon1, lat1, lon2, lat2):
"""
Angular separation (deg) between two sky coordinates.
Borrowed from astropy (www.astropy.org)
Notes
-----
The angular separation is calculated using the Vincenty formula [1],
which is slighly more complex and computationally expensive than
some alternatives, but is stable at at all distances, including the
poles and antipodes.
[1] http://en.wikipedia.org/wiki/Great-circle_distance
"""
(lon1, lat1) = np.radians([lon1, lat1])
(lon2, lat2) = np.radians([lon2, lat2])
sdlon = np.sin(lon2 - lon1)
cdlon = np.cos(lon2 - lon1)
slat1 = np.sin(lat1)
slat2 = np.sin(lat2)
clat1 = np.cos(lat1)
clat2 = np.cos(lat2)
num1 = clat2 * sdlon
num2 = clat1 * slat2 - slat1 * clat2 * cdlon
denominator = slat1 * slat2 + clat1 * clat2 * cdlon
return np.degrees(np.arctan2(np.hypot(num1, num2), denominator)) |
def init_app(self, app):
"""Initialize Flask application."""
if self.module:
app.config.from_object(self.module) | def function[init_app, parameter[self, app]]:
constant[Initialize Flask application.]
if name[self].module begin[:]
call[name[app].config.from_object, parameter[name[self].module]] | keyword[def] identifier[init_app] ( identifier[self] , identifier[app] ):
literal[string]
keyword[if] identifier[self] . identifier[module] :
identifier[app] . identifier[config] . identifier[from_object] ( identifier[self] . identifier[module] ) | def init_app(self, app):
"""Initialize Flask application."""
if self.module:
app.config.from_object(self.module) # depends on [control=['if'], data=[]] |
def __clear_bp(self, aThread):
"""
Clears this breakpoint from the debug registers.
@type aThread: L{Thread}
@param aThread: Thread object.
"""
if self.__slot is not None:
aThread.suspend()
try:
ctx = aThread.get_context(win32.CONTEXT_DEBUG_REGISTERS)
DebugRegister.clear_bp(ctx, self.__slot)
aThread.set_context(ctx)
self.__slot = None
finally:
aThread.resume() | def function[__clear_bp, parameter[self, aThread]]:
constant[
Clears this breakpoint from the debug registers.
@type aThread: L{Thread}
@param aThread: Thread object.
]
if compare[name[self].__slot is_not constant[None]] begin[:]
call[name[aThread].suspend, parameter[]]
<ast.Try object at 0x7da18fe927a0> | keyword[def] identifier[__clear_bp] ( identifier[self] , identifier[aThread] ):
literal[string]
keyword[if] identifier[self] . identifier[__slot] keyword[is] keyword[not] keyword[None] :
identifier[aThread] . identifier[suspend] ()
keyword[try] :
identifier[ctx] = identifier[aThread] . identifier[get_context] ( identifier[win32] . identifier[CONTEXT_DEBUG_REGISTERS] )
identifier[DebugRegister] . identifier[clear_bp] ( identifier[ctx] , identifier[self] . identifier[__slot] )
identifier[aThread] . identifier[set_context] ( identifier[ctx] )
identifier[self] . identifier[__slot] = keyword[None]
keyword[finally] :
identifier[aThread] . identifier[resume] () | def __clear_bp(self, aThread):
"""
Clears this breakpoint from the debug registers.
@type aThread: L{Thread}
@param aThread: Thread object.
"""
if self.__slot is not None:
aThread.suspend()
try:
ctx = aThread.get_context(win32.CONTEXT_DEBUG_REGISTERS)
DebugRegister.clear_bp(ctx, self.__slot)
aThread.set_context(ctx)
self.__slot = None # depends on [control=['try'], data=[]]
finally:
aThread.resume() # depends on [control=['if'], data=[]] |
def setup(self):
"""Method runs the plugin"""
if self.dry_run is not True:
self.client = self._get_client()
self._disable_access_key() | def function[setup, parameter[self]]:
constant[Method runs the plugin]
if compare[name[self].dry_run is_not constant[True]] begin[:]
name[self].client assign[=] call[name[self]._get_client, parameter[]]
call[name[self]._disable_access_key, parameter[]] | keyword[def] identifier[setup] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[dry_run] keyword[is] keyword[not] keyword[True] :
identifier[self] . identifier[client] = identifier[self] . identifier[_get_client] ()
identifier[self] . identifier[_disable_access_key] () | def setup(self):
"""Method runs the plugin"""
if self.dry_run is not True:
self.client = self._get_client()
self._disable_access_key() # depends on [control=['if'], data=[]] |
def _set_enum_list_local_type(self, specifications):
""" This method is needed until get_type_name() is enhanced to include specification subtype and local_name
"""
for rest_name, specification in specifications.items():
for attribute in specification.attributes:
if attribute.type == "enum":
enum_type = attribute.local_name[0:1].upper() + attribute.local_name[1:]
attribute.local_type = enum_type
elif attribute.type == "object":
attr_type = "Object"
if self.attrs_types.has_option(specification.entity_name, attribute.local_name):
type = self.attrs_types.get(specification.entity_name, attribute.local_name)
if type:
attr_type = type
attribute.local_type = attr_type
elif attribute.type == "list":
if attribute.subtype == "enum":
enum_subtype = attribute.local_name[0:1].upper() + attribute.local_name[1:]
attribute.local_type = "System.Collections.Generic.List<E" + enum_subtype + ">"
elif attribute.subtype == "object":
attr_subtype = "JObject"
if self.attrs_types.has_option(specification.entity_name, attribute.local_name):
subtype = self.attrs_types.get(specification.entity_name, attribute.local_name)
if subtype:
attr_subtype = subtype
attribute.local_type = "System.Collections.Generic.List<" + attr_subtype + ">"
elif attribute.subtype == "entity":
attribute.local_type = "System.Collections.Generic.List<JObject>"
else:
attribute.local_type = "System.Collections.Generic.List<String>" | def function[_set_enum_list_local_type, parameter[self, specifications]]:
constant[ This method is needed until get_type_name() is enhanced to include specification subtype and local_name
]
for taget[tuple[[<ast.Name object at 0x7da1b04080d0>, <ast.Name object at 0x7da1b0408a60>]]] in starred[call[name[specifications].items, parameter[]]] begin[:]
for taget[name[attribute]] in starred[name[specification].attributes] begin[:]
if compare[name[attribute].type equal[==] constant[enum]] begin[:]
variable[enum_type] assign[=] binary_operation[call[call[name[attribute].local_name][<ast.Slice object at 0x7da1b040ac20>].upper, parameter[]] + call[name[attribute].local_name][<ast.Slice object at 0x7da1b040a710>]]
name[attribute].local_type assign[=] name[enum_type] | keyword[def] identifier[_set_enum_list_local_type] ( identifier[self] , identifier[specifications] ):
literal[string]
keyword[for] identifier[rest_name] , identifier[specification] keyword[in] identifier[specifications] . identifier[items] ():
keyword[for] identifier[attribute] keyword[in] identifier[specification] . identifier[attributes] :
keyword[if] identifier[attribute] . identifier[type] == literal[string] :
identifier[enum_type] = identifier[attribute] . identifier[local_name] [ literal[int] : literal[int] ]. identifier[upper] ()+ identifier[attribute] . identifier[local_name] [ literal[int] :]
identifier[attribute] . identifier[local_type] = identifier[enum_type]
keyword[elif] identifier[attribute] . identifier[type] == literal[string] :
identifier[attr_type] = literal[string]
keyword[if] identifier[self] . identifier[attrs_types] . identifier[has_option] ( identifier[specification] . identifier[entity_name] , identifier[attribute] . identifier[local_name] ):
identifier[type] = identifier[self] . identifier[attrs_types] . identifier[get] ( identifier[specification] . identifier[entity_name] , identifier[attribute] . identifier[local_name] )
keyword[if] identifier[type] :
identifier[attr_type] = identifier[type]
identifier[attribute] . identifier[local_type] = identifier[attr_type]
keyword[elif] identifier[attribute] . identifier[type] == literal[string] :
keyword[if] identifier[attribute] . identifier[subtype] == literal[string] :
identifier[enum_subtype] = identifier[attribute] . identifier[local_name] [ literal[int] : literal[int] ]. identifier[upper] ()+ identifier[attribute] . identifier[local_name] [ literal[int] :]
identifier[attribute] . identifier[local_type] = literal[string] + identifier[enum_subtype] + literal[string]
keyword[elif] identifier[attribute] . identifier[subtype] == literal[string] :
identifier[attr_subtype] = literal[string]
keyword[if] identifier[self] . identifier[attrs_types] . identifier[has_option] ( identifier[specification] . identifier[entity_name] , identifier[attribute] . identifier[local_name] ):
identifier[subtype] = identifier[self] . identifier[attrs_types] . identifier[get] ( identifier[specification] . identifier[entity_name] , identifier[attribute] . identifier[local_name] )
keyword[if] identifier[subtype] :
identifier[attr_subtype] = identifier[subtype]
identifier[attribute] . identifier[local_type] = literal[string] + identifier[attr_subtype] + literal[string]
keyword[elif] identifier[attribute] . identifier[subtype] == literal[string] :
identifier[attribute] . identifier[local_type] = literal[string]
keyword[else] :
identifier[attribute] . identifier[local_type] = literal[string] | def _set_enum_list_local_type(self, specifications):
""" This method is needed until get_type_name() is enhanced to include specification subtype and local_name
"""
for (rest_name, specification) in specifications.items():
for attribute in specification.attributes:
if attribute.type == 'enum':
enum_type = attribute.local_name[0:1].upper() + attribute.local_name[1:]
attribute.local_type = enum_type # depends on [control=['if'], data=[]]
elif attribute.type == 'object':
attr_type = 'Object'
if self.attrs_types.has_option(specification.entity_name, attribute.local_name):
type = self.attrs_types.get(specification.entity_name, attribute.local_name)
if type:
attr_type = type # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
attribute.local_type = attr_type # depends on [control=['if'], data=[]]
elif attribute.type == 'list':
if attribute.subtype == 'enum':
enum_subtype = attribute.local_name[0:1].upper() + attribute.local_name[1:]
attribute.local_type = 'System.Collections.Generic.List<E' + enum_subtype + '>' # depends on [control=['if'], data=[]]
elif attribute.subtype == 'object':
attr_subtype = 'JObject'
if self.attrs_types.has_option(specification.entity_name, attribute.local_name):
subtype = self.attrs_types.get(specification.entity_name, attribute.local_name)
if subtype:
attr_subtype = subtype # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
attribute.local_type = 'System.Collections.Generic.List<' + attr_subtype + '>' # depends on [control=['if'], data=[]]
elif attribute.subtype == 'entity':
attribute.local_type = 'System.Collections.Generic.List<JObject>' # depends on [control=['if'], data=[]]
else:
attribute.local_type = 'System.Collections.Generic.List<String>' # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['attribute']] # depends on [control=['for'], data=[]] |
def group(self, indent: int = DEFAULT_INDENT, add_line: bool = True) -> _TextGroup:
"""
Returns a context manager which adds an indentation before each line.
:param indent: Number of spaces to print.
:param add_line: If True, a new line will be printed after the group.
:return: A TextGroup context manager.
"""
return _TextGroup(self, indent, add_line) | def function[group, parameter[self, indent, add_line]]:
constant[
Returns a context manager which adds an indentation before each line.
:param indent: Number of spaces to print.
:param add_line: If True, a new line will be printed after the group.
:return: A TextGroup context manager.
]
return[call[name[_TextGroup], parameter[name[self], name[indent], name[add_line]]]] | keyword[def] identifier[group] ( identifier[self] , identifier[indent] : identifier[int] = identifier[DEFAULT_INDENT] , identifier[add_line] : identifier[bool] = keyword[True] )-> identifier[_TextGroup] :
literal[string]
keyword[return] identifier[_TextGroup] ( identifier[self] , identifier[indent] , identifier[add_line] ) | def group(self, indent: int=DEFAULT_INDENT, add_line: bool=True) -> _TextGroup:
"""
Returns a context manager which adds an indentation before each line.
:param indent: Number of spaces to print.
:param add_line: If True, a new line will be printed after the group.
:return: A TextGroup context manager.
"""
return _TextGroup(self, indent, add_line) |
def _tasks_to_reinsert(tasks, transactional):
"""Return a list containing the tasks that should be reinserted based on the
was_enqueued property and whether the insert is transactional or not.
"""
if transactional:
return tasks
return [task for task in tasks if not task.was_enqueued] | def function[_tasks_to_reinsert, parameter[tasks, transactional]]:
constant[Return a list containing the tasks that should be reinserted based on the
was_enqueued property and whether the insert is transactional or not.
]
if name[transactional] begin[:]
return[name[tasks]]
return[<ast.ListComp object at 0x7da20c6a8d30>] | keyword[def] identifier[_tasks_to_reinsert] ( identifier[tasks] , identifier[transactional] ):
literal[string]
keyword[if] identifier[transactional] :
keyword[return] identifier[tasks]
keyword[return] [ identifier[task] keyword[for] identifier[task] keyword[in] identifier[tasks] keyword[if] keyword[not] identifier[task] . identifier[was_enqueued] ] | def _tasks_to_reinsert(tasks, transactional):
"""Return a list containing the tasks that should be reinserted based on the
was_enqueued property and whether the insert is transactional or not.
"""
if transactional:
return tasks # depends on [control=['if'], data=[]]
return [task for task in tasks if not task.was_enqueued] |
def items(self):
"""Get an iter of VenvDirs and VenvFiles within the directory."""
contents = self.paths
contents = (
VenvFile(path.path) if path.is_file else VenvDir(path.path)
for path in contents
)
return contents | def function[items, parameter[self]]:
constant[Get an iter of VenvDirs and VenvFiles within the directory.]
variable[contents] assign[=] name[self].paths
variable[contents] assign[=] <ast.GeneratorExp object at 0x7da2044c1870>
return[name[contents]] | keyword[def] identifier[items] ( identifier[self] ):
literal[string]
identifier[contents] = identifier[self] . identifier[paths]
identifier[contents] =(
identifier[VenvFile] ( identifier[path] . identifier[path] ) keyword[if] identifier[path] . identifier[is_file] keyword[else] identifier[VenvDir] ( identifier[path] . identifier[path] )
keyword[for] identifier[path] keyword[in] identifier[contents]
)
keyword[return] identifier[contents] | def items(self):
"""Get an iter of VenvDirs and VenvFiles within the directory."""
contents = self.paths
contents = (VenvFile(path.path) if path.is_file else VenvDir(path.path) for path in contents)
return contents |
def import_json():
'''
Import a json module, starting with the quick ones and going down the list)
'''
for fast_json in ('ujson', 'yajl', 'json'):
try:
mod = __import__(fast_json)
log.trace('loaded %s json lib', fast_json)
return mod
except ImportError:
continue | def function[import_json, parameter[]]:
constant[
Import a json module, starting with the quick ones and going down the list)
]
for taget[name[fast_json]] in starred[tuple[[<ast.Constant object at 0x7da18ede7f70>, <ast.Constant object at 0x7da18ede7bb0>, <ast.Constant object at 0x7da18ede5390>]]] begin[:]
<ast.Try object at 0x7da18ede56f0> | keyword[def] identifier[import_json] ():
literal[string]
keyword[for] identifier[fast_json] keyword[in] ( literal[string] , literal[string] , literal[string] ):
keyword[try] :
identifier[mod] = identifier[__import__] ( identifier[fast_json] )
identifier[log] . identifier[trace] ( literal[string] , identifier[fast_json] )
keyword[return] identifier[mod]
keyword[except] identifier[ImportError] :
keyword[continue] | def import_json():
"""
Import a json module, starting with the quick ones and going down the list)
"""
for fast_json in ('ujson', 'yajl', 'json'):
try:
mod = __import__(fast_json)
log.trace('loaded %s json lib', fast_json)
return mod # depends on [control=['try'], data=[]]
except ImportError:
continue # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['fast_json']] |
def get_voltage(self, channel, unit='V'):
'''Reading voltage
'''
adc_ch = self._ch_map[channel]['ADCV']['adc_ch']
address = self._ch_map[channel]['ADCV']['address']
raw = self._get_adc_value(address=address)[adc_ch]
dac_offset = self._ch_cal[channel]['ADCV']['offset']
dac_gain = self._ch_cal[channel]['ADCV']['gain']
voltage = ((raw - dac_offset) / dac_gain)
if unit == 'raw':
return raw
elif unit == 'V':
return voltage / 1000
elif unit == 'mV':
return voltage
else:
raise TypeError("Invalid unit type.") | def function[get_voltage, parameter[self, channel, unit]]:
constant[Reading voltage
]
variable[adc_ch] assign[=] call[call[call[name[self]._ch_map][name[channel]]][constant[ADCV]]][constant[adc_ch]]
variable[address] assign[=] call[call[call[name[self]._ch_map][name[channel]]][constant[ADCV]]][constant[address]]
variable[raw] assign[=] call[call[name[self]._get_adc_value, parameter[]]][name[adc_ch]]
variable[dac_offset] assign[=] call[call[call[name[self]._ch_cal][name[channel]]][constant[ADCV]]][constant[offset]]
variable[dac_gain] assign[=] call[call[call[name[self]._ch_cal][name[channel]]][constant[ADCV]]][constant[gain]]
variable[voltage] assign[=] binary_operation[binary_operation[name[raw] - name[dac_offset]] / name[dac_gain]]
if compare[name[unit] equal[==] constant[raw]] begin[:]
return[name[raw]] | keyword[def] identifier[get_voltage] ( identifier[self] , identifier[channel] , identifier[unit] = literal[string] ):
literal[string]
identifier[adc_ch] = identifier[self] . identifier[_ch_map] [ identifier[channel] ][ literal[string] ][ literal[string] ]
identifier[address] = identifier[self] . identifier[_ch_map] [ identifier[channel] ][ literal[string] ][ literal[string] ]
identifier[raw] = identifier[self] . identifier[_get_adc_value] ( identifier[address] = identifier[address] )[ identifier[adc_ch] ]
identifier[dac_offset] = identifier[self] . identifier[_ch_cal] [ identifier[channel] ][ literal[string] ][ literal[string] ]
identifier[dac_gain] = identifier[self] . identifier[_ch_cal] [ identifier[channel] ][ literal[string] ][ literal[string] ]
identifier[voltage] =(( identifier[raw] - identifier[dac_offset] )/ identifier[dac_gain] )
keyword[if] identifier[unit] == literal[string] :
keyword[return] identifier[raw]
keyword[elif] identifier[unit] == literal[string] :
keyword[return] identifier[voltage] / literal[int]
keyword[elif] identifier[unit] == literal[string] :
keyword[return] identifier[voltage]
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] ) | def get_voltage(self, channel, unit='V'):
"""Reading voltage
"""
adc_ch = self._ch_map[channel]['ADCV']['adc_ch']
address = self._ch_map[channel]['ADCV']['address']
raw = self._get_adc_value(address=address)[adc_ch]
dac_offset = self._ch_cal[channel]['ADCV']['offset']
dac_gain = self._ch_cal[channel]['ADCV']['gain']
voltage = (raw - dac_offset) / dac_gain
if unit == 'raw':
return raw # depends on [control=['if'], data=[]]
elif unit == 'V':
return voltage / 1000 # depends on [control=['if'], data=[]]
elif unit == 'mV':
return voltage # depends on [control=['if'], data=[]]
else:
raise TypeError('Invalid unit type.') |
def set_parameters(self):
"""Setup all the parameters for the binaries to be evaluated.
Grid values and store necessary parameters for input into the SNR function.
"""
# declare 1D arrays of both paramters
if self.xscale != 'lin':
self.xvals = np.logspace(np.log10(float(self.x_low)),
np.log10(float(self.x_high)),
self.num_x)
else:
self.xvals = np.linspace(float(self.x_low),
float(self.x_high),
self.num_x)
if self.yscale != 'lin':
self.yvals = np.logspace(np.log10(float(self.y_low)),
np.log10(float(self.y_high)),
self.num_y)
else:
self.yvals = np.linspace(float(self.y_low),
float(self.y_high),
self.num_y)
self.xvals, self.yvals = np.meshgrid(self.xvals, self.yvals)
self.xvals, self.yvals = self.xvals.ravel(), self.yvals.ravel()
for which in ['x', 'y']:
setattr(self, getattr(self, which + 'val_name'), getattr(self, which + 'vals'))
self.ecc = 'eccentricity' in self.__dict__
if self.ecc:
if 'observation_time' not in self.__dict__:
if 'start_time' not in self.__dict__:
raise ValueError('If no observation time is provided, the time before'
+ 'merger must be the inital starting condition.')
self.observation_time = self.start_time # small number so it is not zero
else:
if 'spin' in self.__dict__:
self.spin_1 = self.spin
self.spin_2 = self.spin
for key in ['redshift', 'luminosity_distance', 'comoving_distance']:
if key in self.__dict__:
self.dist_type = key
self.z_or_dist = getattr(self, key)
if self.ecc:
for key in ['start_frequency', 'start_time', 'start_separation']:
if key in self.__dict__:
self.initial_cond_type = key.split('_')[-1]
self.initial_point = getattr(self, key)
# add m1 and m2
self.m1 = (self.total_mass / (1. + self.mass_ratio))
self.m2 = (self.total_mass * self.mass_ratio / (1. + self.mass_ratio))
return | def function[set_parameters, parameter[self]]:
constant[Setup all the parameters for the binaries to be evaluated.
Grid values and store necessary parameters for input into the SNR function.
]
if compare[name[self].xscale not_equal[!=] constant[lin]] begin[:]
name[self].xvals assign[=] call[name[np].logspace, parameter[call[name[np].log10, parameter[call[name[float], parameter[name[self].x_low]]]], call[name[np].log10, parameter[call[name[float], parameter[name[self].x_high]]]], name[self].num_x]]
if compare[name[self].yscale not_equal[!=] constant[lin]] begin[:]
name[self].yvals assign[=] call[name[np].logspace, parameter[call[name[np].log10, parameter[call[name[float], parameter[name[self].y_low]]]], call[name[np].log10, parameter[call[name[float], parameter[name[self].y_high]]]], name[self].num_y]]
<ast.Tuple object at 0x7da1b0a6c2e0> assign[=] call[name[np].meshgrid, parameter[name[self].xvals, name[self].yvals]]
<ast.Tuple object at 0x7da1b0a6cee0> assign[=] tuple[[<ast.Call object at 0x7da1b0a6da50>, <ast.Call object at 0x7da1b0a6d600>]]
for taget[name[which]] in starred[list[[<ast.Constant object at 0x7da1b0cffeb0>, <ast.Constant object at 0x7da1b0cfc340>]]] begin[:]
call[name[setattr], parameter[name[self], call[name[getattr], parameter[name[self], binary_operation[name[which] + constant[val_name]]]], call[name[getattr], parameter[name[self], binary_operation[name[which] + constant[vals]]]]]]
name[self].ecc assign[=] compare[constant[eccentricity] in name[self].__dict__]
if name[self].ecc begin[:]
if compare[constant[observation_time] <ast.NotIn object at 0x7da2590d7190> name[self].__dict__] begin[:]
if compare[constant[start_time] <ast.NotIn object at 0x7da2590d7190> name[self].__dict__] begin[:]
<ast.Raise object at 0x7da1b0cfe350>
name[self].observation_time assign[=] name[self].start_time
for taget[name[key]] in starred[list[[<ast.Constant object at 0x7da1b0cff1f0>, <ast.Constant object at 0x7da18c4cdc30>, <ast.Constant object at 0x7da18c4cefe0>]]] begin[:]
if compare[name[key] in name[self].__dict__] begin[:]
name[self].dist_type assign[=] name[key]
name[self].z_or_dist assign[=] call[name[getattr], parameter[name[self], name[key]]]
if name[self].ecc begin[:]
for taget[name[key]] in starred[list[[<ast.Constant object at 0x7da18c4cdff0>, <ast.Constant object at 0x7da18c4ce890>, <ast.Constant object at 0x7da18c4cf6d0>]]] begin[:]
if compare[name[key] in name[self].__dict__] begin[:]
name[self].initial_cond_type assign[=] call[call[name[key].split, parameter[constant[_]]]][<ast.UnaryOp object at 0x7da18c4ce8f0>]
name[self].initial_point assign[=] call[name[getattr], parameter[name[self], name[key]]]
name[self].m1 assign[=] binary_operation[name[self].total_mass / binary_operation[constant[1.0] + name[self].mass_ratio]]
name[self].m2 assign[=] binary_operation[binary_operation[name[self].total_mass * name[self].mass_ratio] / binary_operation[constant[1.0] + name[self].mass_ratio]]
return[None] | keyword[def] identifier[set_parameters] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[xscale] != literal[string] :
identifier[self] . identifier[xvals] = identifier[np] . identifier[logspace] ( identifier[np] . identifier[log10] ( identifier[float] ( identifier[self] . identifier[x_low] )),
identifier[np] . identifier[log10] ( identifier[float] ( identifier[self] . identifier[x_high] )),
identifier[self] . identifier[num_x] )
keyword[else] :
identifier[self] . identifier[xvals] = identifier[np] . identifier[linspace] ( identifier[float] ( identifier[self] . identifier[x_low] ),
identifier[float] ( identifier[self] . identifier[x_high] ),
identifier[self] . identifier[num_x] )
keyword[if] identifier[self] . identifier[yscale] != literal[string] :
identifier[self] . identifier[yvals] = identifier[np] . identifier[logspace] ( identifier[np] . identifier[log10] ( identifier[float] ( identifier[self] . identifier[y_low] )),
identifier[np] . identifier[log10] ( identifier[float] ( identifier[self] . identifier[y_high] )),
identifier[self] . identifier[num_y] )
keyword[else] :
identifier[self] . identifier[yvals] = identifier[np] . identifier[linspace] ( identifier[float] ( identifier[self] . identifier[y_low] ),
identifier[float] ( identifier[self] . identifier[y_high] ),
identifier[self] . identifier[num_y] )
identifier[self] . identifier[xvals] , identifier[self] . identifier[yvals] = identifier[np] . identifier[meshgrid] ( identifier[self] . identifier[xvals] , identifier[self] . identifier[yvals] )
identifier[self] . identifier[xvals] , identifier[self] . identifier[yvals] = identifier[self] . identifier[xvals] . identifier[ravel] (), identifier[self] . identifier[yvals] . identifier[ravel] ()
keyword[for] identifier[which] keyword[in] [ literal[string] , literal[string] ]:
identifier[setattr] ( identifier[self] , identifier[getattr] ( identifier[self] , identifier[which] + literal[string] ), identifier[getattr] ( identifier[self] , identifier[which] + literal[string] ))
identifier[self] . identifier[ecc] = literal[string] keyword[in] identifier[self] . identifier[__dict__]
keyword[if] identifier[self] . identifier[ecc] :
keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[__dict__] :
keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[__dict__] :
keyword[raise] identifier[ValueError] ( literal[string]
+ literal[string] )
identifier[self] . identifier[observation_time] = identifier[self] . identifier[start_time]
keyword[else] :
keyword[if] literal[string] keyword[in] identifier[self] . identifier[__dict__] :
identifier[self] . identifier[spin_1] = identifier[self] . identifier[spin]
identifier[self] . identifier[spin_2] = identifier[self] . identifier[spin]
keyword[for] identifier[key] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
keyword[if] identifier[key] keyword[in] identifier[self] . identifier[__dict__] :
identifier[self] . identifier[dist_type] = identifier[key]
identifier[self] . identifier[z_or_dist] = identifier[getattr] ( identifier[self] , identifier[key] )
keyword[if] identifier[self] . identifier[ecc] :
keyword[for] identifier[key] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
keyword[if] identifier[key] keyword[in] identifier[self] . identifier[__dict__] :
identifier[self] . identifier[initial_cond_type] = identifier[key] . identifier[split] ( literal[string] )[- literal[int] ]
identifier[self] . identifier[initial_point] = identifier[getattr] ( identifier[self] , identifier[key] )
identifier[self] . identifier[m1] =( identifier[self] . identifier[total_mass] /( literal[int] + identifier[self] . identifier[mass_ratio] ))
identifier[self] . identifier[m2] =( identifier[self] . identifier[total_mass] * identifier[self] . identifier[mass_ratio] /( literal[int] + identifier[self] . identifier[mass_ratio] ))
keyword[return] | def set_parameters(self):
"""Setup all the parameters for the binaries to be evaluated.
Grid values and store necessary parameters for input into the SNR function.
"""
# declare 1D arrays of both paramters
if self.xscale != 'lin':
self.xvals = np.logspace(np.log10(float(self.x_low)), np.log10(float(self.x_high)), self.num_x) # depends on [control=['if'], data=[]]
else:
self.xvals = np.linspace(float(self.x_low), float(self.x_high), self.num_x)
if self.yscale != 'lin':
self.yvals = np.logspace(np.log10(float(self.y_low)), np.log10(float(self.y_high)), self.num_y) # depends on [control=['if'], data=[]]
else:
self.yvals = np.linspace(float(self.y_low), float(self.y_high), self.num_y)
(self.xvals, self.yvals) = np.meshgrid(self.xvals, self.yvals)
(self.xvals, self.yvals) = (self.xvals.ravel(), self.yvals.ravel())
for which in ['x', 'y']:
setattr(self, getattr(self, which + 'val_name'), getattr(self, which + 'vals')) # depends on [control=['for'], data=['which']]
self.ecc = 'eccentricity' in self.__dict__
if self.ecc:
if 'observation_time' not in self.__dict__:
if 'start_time' not in self.__dict__:
raise ValueError('If no observation time is provided, the time before' + 'merger must be the inital starting condition.') # depends on [control=['if'], data=[]]
self.observation_time = self.start_time # small number so it is not zero # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif 'spin' in self.__dict__:
self.spin_1 = self.spin
self.spin_2 = self.spin # depends on [control=['if'], data=[]]
for key in ['redshift', 'luminosity_distance', 'comoving_distance']:
if key in self.__dict__:
self.dist_type = key
self.z_or_dist = getattr(self, key) # depends on [control=['if'], data=['key']]
if self.ecc:
for key in ['start_frequency', 'start_time', 'start_separation']:
if key in self.__dict__:
self.initial_cond_type = key.split('_')[-1]
self.initial_point = getattr(self, key) # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
# add m1 and m2
self.m1 = self.total_mass / (1.0 + self.mass_ratio)
self.m2 = self.total_mass * self.mass_ratio / (1.0 + self.mass_ratio)
return |
def popitem(self):
"""Remove and return an arbitrary ``(key, value)`` pair from
the dictionary.
:func:`popitem` is useful to destructively iterate over
a dictionary, as often used in set algorithms. If
the dictionary is empty, calling :func:`popitem` raises
a :exc:`KeyError`.
"""
def popitem_trans(pipe):
try:
pickled_key = pipe.hkeys(self.key)[0]
except IndexError:
raise KeyError
# pop its value
pipe.multi()
pipe.hget(self.key, pickled_key)
pipe.hdel(self.key, pickled_key)
pickled_value, __ = pipe.execute()
return (
self._unpickle_key(pickled_key), self._unpickle(pickled_value)
)
key, value = self._transaction(popitem_trans)
return key, self.cache.pop(key, value) | def function[popitem, parameter[self]]:
constant[Remove and return an arbitrary ``(key, value)`` pair from
the dictionary.
:func:`popitem` is useful to destructively iterate over
a dictionary, as often used in set algorithms. If
the dictionary is empty, calling :func:`popitem` raises
a :exc:`KeyError`.
]
def function[popitem_trans, parameter[pipe]]:
<ast.Try object at 0x7da207f98370>
call[name[pipe].multi, parameter[]]
call[name[pipe].hget, parameter[name[self].key, name[pickled_key]]]
call[name[pipe].hdel, parameter[name[self].key, name[pickled_key]]]
<ast.Tuple object at 0x7da18eb57910> assign[=] call[name[pipe].execute, parameter[]]
return[tuple[[<ast.Call object at 0x7da18eb561a0>, <ast.Call object at 0x7da18eb57700>]]]
<ast.Tuple object at 0x7da18eb554e0> assign[=] call[name[self]._transaction, parameter[name[popitem_trans]]]
return[tuple[[<ast.Name object at 0x7da18eb568c0>, <ast.Call object at 0x7da18eb57a60>]]] | keyword[def] identifier[popitem] ( identifier[self] ):
literal[string]
keyword[def] identifier[popitem_trans] ( identifier[pipe] ):
keyword[try] :
identifier[pickled_key] = identifier[pipe] . identifier[hkeys] ( identifier[self] . identifier[key] )[ literal[int] ]
keyword[except] identifier[IndexError] :
keyword[raise] identifier[KeyError]
identifier[pipe] . identifier[multi] ()
identifier[pipe] . identifier[hget] ( identifier[self] . identifier[key] , identifier[pickled_key] )
identifier[pipe] . identifier[hdel] ( identifier[self] . identifier[key] , identifier[pickled_key] )
identifier[pickled_value] , identifier[__] = identifier[pipe] . identifier[execute] ()
keyword[return] (
identifier[self] . identifier[_unpickle_key] ( identifier[pickled_key] ), identifier[self] . identifier[_unpickle] ( identifier[pickled_value] )
)
identifier[key] , identifier[value] = identifier[self] . identifier[_transaction] ( identifier[popitem_trans] )
keyword[return] identifier[key] , identifier[self] . identifier[cache] . identifier[pop] ( identifier[key] , identifier[value] ) | def popitem(self):
"""Remove and return an arbitrary ``(key, value)`` pair from
the dictionary.
:func:`popitem` is useful to destructively iterate over
a dictionary, as often used in set algorithms. If
the dictionary is empty, calling :func:`popitem` raises
a :exc:`KeyError`.
"""
def popitem_trans(pipe):
try:
pickled_key = pipe.hkeys(self.key)[0] # depends on [control=['try'], data=[]]
except IndexError:
raise KeyError # depends on [control=['except'], data=[]]
# pop its value
pipe.multi()
pipe.hget(self.key, pickled_key)
pipe.hdel(self.key, pickled_key)
(pickled_value, __) = pipe.execute()
return (self._unpickle_key(pickled_key), self._unpickle(pickled_value))
(key, value) = self._transaction(popitem_trans)
return (key, self.cache.pop(key, value)) |
def setup_lookup(apps, lookup_class=TemplateLookup):
""" Registering template directories of apps to Lookup.
Lookups will be set up as dictionary, app name
as key and lookup for this app will be it's value.
Each lookups is correspond to each template directories of apps._lookups.
The directory should be named 'templates', and put under app directory.
"""
global _lookups
_lookups = {}
for app in apps:
app_template_dir = os.path.join(os.path.dirname(app.__file__),
'templates')
app_lookup = lookup_class(directories=[app_template_dir],
output_encoding='utf-8',
encoding_errors='replace')
_lookups[app.__name__] = app_lookup | def function[setup_lookup, parameter[apps, lookup_class]]:
constant[ Registering template directories of apps to Lookup.
Lookups will be set up as dictionary, app name
as key and lookup for this app will be it's value.
Each lookups is correspond to each template directories of apps._lookups.
The directory should be named 'templates', and put under app directory.
]
<ast.Global object at 0x7da1b143e470>
variable[_lookups] assign[=] dictionary[[], []]
for taget[name[app]] in starred[name[apps]] begin[:]
variable[app_template_dir] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[app].__file__]], constant[templates]]]
variable[app_lookup] assign[=] call[name[lookup_class], parameter[]]
call[name[_lookups]][name[app].__name__] assign[=] name[app_lookup] | keyword[def] identifier[setup_lookup] ( identifier[apps] , identifier[lookup_class] = identifier[TemplateLookup] ):
literal[string]
keyword[global] identifier[_lookups]
identifier[_lookups] ={}
keyword[for] identifier[app] keyword[in] identifier[apps] :
identifier[app_template_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[app] . identifier[__file__] ),
literal[string] )
identifier[app_lookup] = identifier[lookup_class] ( identifier[directories] =[ identifier[app_template_dir] ],
identifier[output_encoding] = literal[string] ,
identifier[encoding_errors] = literal[string] )
identifier[_lookups] [ identifier[app] . identifier[__name__] ]= identifier[app_lookup] | def setup_lookup(apps, lookup_class=TemplateLookup):
""" Registering template directories of apps to Lookup.
Lookups will be set up as dictionary, app name
as key and lookup for this app will be it's value.
Each lookups is correspond to each template directories of apps._lookups.
The directory should be named 'templates', and put under app directory.
"""
global _lookups
_lookups = {}
for app in apps:
app_template_dir = os.path.join(os.path.dirname(app.__file__), 'templates')
app_lookup = lookup_class(directories=[app_template_dir], output_encoding='utf-8', encoding_errors='replace')
_lookups[app.__name__] = app_lookup # depends on [control=['for'], data=['app']] |
def hide_virtual_ip_holder_chassis_virtual_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_virtual_ip_holder = ET.SubElement(config, "hide-virtual-ip-holder", xmlns="urn:brocade.com:mgmt:brocade-chassis")
chassis = ET.SubElement(hide_virtual_ip_holder, "chassis")
virtual_ip = ET.SubElement(chassis, "virtual-ip")
virtual_ip.text = kwargs.pop('virtual_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[hide_virtual_ip_holder_chassis_virtual_ip, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[hide_virtual_ip_holder] assign[=] call[name[ET].SubElement, parameter[name[config], constant[hide-virtual-ip-holder]]]
variable[chassis] assign[=] call[name[ET].SubElement, parameter[name[hide_virtual_ip_holder], constant[chassis]]]
variable[virtual_ip] assign[=] call[name[ET].SubElement, parameter[name[chassis], constant[virtual-ip]]]
name[virtual_ip].text assign[=] call[name[kwargs].pop, parameter[constant[virtual_ip]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[hide_virtual_ip_holder_chassis_virtual_ip] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[hide_virtual_ip_holder] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[chassis] = identifier[ET] . identifier[SubElement] ( identifier[hide_virtual_ip_holder] , literal[string] )
identifier[virtual_ip] = identifier[ET] . identifier[SubElement] ( identifier[chassis] , literal[string] )
identifier[virtual_ip] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def hide_virtual_ip_holder_chassis_virtual_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
hide_virtual_ip_holder = ET.SubElement(config, 'hide-virtual-ip-holder', xmlns='urn:brocade.com:mgmt:brocade-chassis')
chassis = ET.SubElement(hide_virtual_ip_holder, 'chassis')
virtual_ip = ET.SubElement(chassis, 'virtual-ip')
virtual_ip.text = kwargs.pop('virtual_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def add_connection(self, addr, **kwargs):
"""Add an agent with given address to current :attr:`connections` with
given information.
Does nothing if address is already in :attr:`connections`. Given
``**kwargs`` are stored as key-value pairs to ``connections[addr]``
dictionary.
:param str addr:
Address of the agent to be added
:returns:
``True`` if the agent was successfully added, ``False`` otherwise.
"""
if addr not in self._connections:
self.connections[addr] = {}
for k, v in kwargs.items():
self.connections[addr][k] = v
return True
return False | def function[add_connection, parameter[self, addr]]:
constant[Add an agent with given address to current :attr:`connections` with
given information.
Does nothing if address is already in :attr:`connections`. Given
``**kwargs`` are stored as key-value pairs to ``connections[addr]``
dictionary.
:param str addr:
Address of the agent to be added
:returns:
``True`` if the agent was successfully added, ``False`` otherwise.
]
if compare[name[addr] <ast.NotIn object at 0x7da2590d7190> name[self]._connections] begin[:]
call[name[self].connections][name[addr]] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b24d74c0>, <ast.Name object at 0x7da1b24d76a0>]]] in starred[call[name[kwargs].items, parameter[]]] begin[:]
call[call[name[self].connections][name[addr]]][name[k]] assign[=] name[v]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[add_connection] ( identifier[self] , identifier[addr] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[addr] keyword[not] keyword[in] identifier[self] . identifier[_connections] :
identifier[self] . identifier[connections] [ identifier[addr] ]={}
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[kwargs] . identifier[items] ():
identifier[self] . identifier[connections] [ identifier[addr] ][ identifier[k] ]= identifier[v]
keyword[return] keyword[True]
keyword[return] keyword[False] | def add_connection(self, addr, **kwargs):
"""Add an agent with given address to current :attr:`connections` with
given information.
Does nothing if address is already in :attr:`connections`. Given
``**kwargs`` are stored as key-value pairs to ``connections[addr]``
dictionary.
:param str addr:
Address of the agent to be added
:returns:
``True`` if the agent was successfully added, ``False`` otherwise.
"""
if addr not in self._connections:
self.connections[addr] = {}
for (k, v) in kwargs.items():
self.connections[addr][k] = v # depends on [control=['for'], data=[]]
return True # depends on [control=['if'], data=['addr']]
return False |
def move_file(source_file_pathname, destination_file_pathname):
"""
Move the the specified file to another location. If the destination
already exists, it is replaced silently.
This function is an alternative to ``shutil.move(src, dst)``, which
might raise ``OSError`` if the destination already exists.
@param source_file_pathname: the complete path and name of the file to
move.
@param destination_file_pathname: the complete path and name of the
file once moved.
"""
if os.path.exists(destination_file_pathname):
os.remove(destination_file_pathname)
shutil.move(source_file_pathname, destination_file_pathname) | def function[move_file, parameter[source_file_pathname, destination_file_pathname]]:
constant[
Move the the specified file to another location. If the destination
already exists, it is replaced silently.
This function is an alternative to ``shutil.move(src, dst)``, which
might raise ``OSError`` if the destination already exists.
@param source_file_pathname: the complete path and name of the file to
move.
@param destination_file_pathname: the complete path and name of the
file once moved.
]
if call[name[os].path.exists, parameter[name[destination_file_pathname]]] begin[:]
call[name[os].remove, parameter[name[destination_file_pathname]]]
call[name[shutil].move, parameter[name[source_file_pathname], name[destination_file_pathname]]] | keyword[def] identifier[move_file] ( identifier[source_file_pathname] , identifier[destination_file_pathname] ):
literal[string]
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[destination_file_pathname] ):
identifier[os] . identifier[remove] ( identifier[destination_file_pathname] )
identifier[shutil] . identifier[move] ( identifier[source_file_pathname] , identifier[destination_file_pathname] ) | def move_file(source_file_pathname, destination_file_pathname):
"""
Move the the specified file to another location. If the destination
already exists, it is replaced silently.
This function is an alternative to ``shutil.move(src, dst)``, which
might raise ``OSError`` if the destination already exists.
@param source_file_pathname: the complete path and name of the file to
move.
@param destination_file_pathname: the complete path and name of the
file once moved.
"""
if os.path.exists(destination_file_pathname):
os.remove(destination_file_pathname) # depends on [control=['if'], data=[]]
shutil.move(source_file_pathname, destination_file_pathname) |
def match_score(self, supported: 'Language') -> int:
"""
Suppose that `self` is the language that the user desires, and
`supported` is a language that is actually supported. This method
returns a number from 0 to 100 indicating how similar the supported
language is (higher numbers are better). This is not a symmetric
relation.
The algorithm here is described (badly) in a Unicode technical report
at http://unicode.org/reports/tr35/#LanguageMatching. If you find these
results bothersome, take it up with Unicode, unless it's particular
tweaks we implemented such as macrolanguage matching.
See :func:`tag_match_score` for a function that works on strings,
instead of requiring you to instantiate Language objects first.
Further documentation and examples appear with that function.
"""
if supported == self:
return 100
desired_complete = self.prefer_macrolanguage().maximize()
supported_complete = supported.prefer_macrolanguage().maximize()
desired_triple = (desired_complete.language, desired_complete.script, desired_complete.region)
supported_triple = (supported_complete.language, supported_complete.script, supported_complete.region)
return 100 - raw_distance(desired_triple, supported_triple) | def function[match_score, parameter[self, supported]]:
constant[
Suppose that `self` is the language that the user desires, and
`supported` is a language that is actually supported. This method
returns a number from 0 to 100 indicating how similar the supported
language is (higher numbers are better). This is not a symmetric
relation.
The algorithm here is described (badly) in a Unicode technical report
at http://unicode.org/reports/tr35/#LanguageMatching. If you find these
results bothersome, take it up with Unicode, unless it's particular
tweaks we implemented such as macrolanguage matching.
See :func:`tag_match_score` for a function that works on strings,
instead of requiring you to instantiate Language objects first.
Further documentation and examples appear with that function.
]
if compare[name[supported] equal[==] name[self]] begin[:]
return[constant[100]]
variable[desired_complete] assign[=] call[call[name[self].prefer_macrolanguage, parameter[]].maximize, parameter[]]
variable[supported_complete] assign[=] call[call[name[supported].prefer_macrolanguage, parameter[]].maximize, parameter[]]
variable[desired_triple] assign[=] tuple[[<ast.Attribute object at 0x7da204620f70>, <ast.Attribute object at 0x7da204620430>, <ast.Attribute object at 0x7da204622620>]]
variable[supported_triple] assign[=] tuple[[<ast.Attribute object at 0x7da204622860>, <ast.Attribute object at 0x7da2046231f0>, <ast.Attribute object at 0x7da2046226e0>]]
return[binary_operation[constant[100] - call[name[raw_distance], parameter[name[desired_triple], name[supported_triple]]]]] | keyword[def] identifier[match_score] ( identifier[self] , identifier[supported] : literal[string] )-> identifier[int] :
literal[string]
keyword[if] identifier[supported] == identifier[self] :
keyword[return] literal[int]
identifier[desired_complete] = identifier[self] . identifier[prefer_macrolanguage] (). identifier[maximize] ()
identifier[supported_complete] = identifier[supported] . identifier[prefer_macrolanguage] (). identifier[maximize] ()
identifier[desired_triple] =( identifier[desired_complete] . identifier[language] , identifier[desired_complete] . identifier[script] , identifier[desired_complete] . identifier[region] )
identifier[supported_triple] =( identifier[supported_complete] . identifier[language] , identifier[supported_complete] . identifier[script] , identifier[supported_complete] . identifier[region] )
keyword[return] literal[int] - identifier[raw_distance] ( identifier[desired_triple] , identifier[supported_triple] ) | def match_score(self, supported: 'Language') -> int:
"""
Suppose that `self` is the language that the user desires, and
`supported` is a language that is actually supported. This method
returns a number from 0 to 100 indicating how similar the supported
language is (higher numbers are better). This is not a symmetric
relation.
The algorithm here is described (badly) in a Unicode technical report
at http://unicode.org/reports/tr35/#LanguageMatching. If you find these
results bothersome, take it up with Unicode, unless it's particular
tweaks we implemented such as macrolanguage matching.
See :func:`tag_match_score` for a function that works on strings,
instead of requiring you to instantiate Language objects first.
Further documentation and examples appear with that function.
"""
if supported == self:
return 100 # depends on [control=['if'], data=[]]
desired_complete = self.prefer_macrolanguage().maximize()
supported_complete = supported.prefer_macrolanguage().maximize()
desired_triple = (desired_complete.language, desired_complete.script, desired_complete.region)
supported_triple = (supported_complete.language, supported_complete.script, supported_complete.region)
return 100 - raw_distance(desired_triple, supported_triple) |
def delete(self, name):
'''
Delete all the data in a named timeseries.
'''
keys = self._client.keys('%s%s:*'%(self._prefix,name))
pipe = self._client.pipeline(transaction=False)
for key in keys:
pipe.delete( key )
pipe.execute()
# Could be not technically the exact number of keys deleted, but is a close
# enough approximation
return len(keys) | def function[delete, parameter[self, name]]:
constant[
Delete all the data in a named timeseries.
]
variable[keys] assign[=] call[name[self]._client.keys, parameter[binary_operation[constant[%s%s:*] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b02e4520>, <ast.Name object at 0x7da1b02e6860>]]]]]
variable[pipe] assign[=] call[name[self]._client.pipeline, parameter[]]
for taget[name[key]] in starred[name[keys]] begin[:]
call[name[pipe].delete, parameter[name[key]]]
call[name[pipe].execute, parameter[]]
return[call[name[len], parameter[name[keys]]]] | keyword[def] identifier[delete] ( identifier[self] , identifier[name] ):
literal[string]
identifier[keys] = identifier[self] . identifier[_client] . identifier[keys] ( literal[string] %( identifier[self] . identifier[_prefix] , identifier[name] ))
identifier[pipe] = identifier[self] . identifier[_client] . identifier[pipeline] ( identifier[transaction] = keyword[False] )
keyword[for] identifier[key] keyword[in] identifier[keys] :
identifier[pipe] . identifier[delete] ( identifier[key] )
identifier[pipe] . identifier[execute] ()
keyword[return] identifier[len] ( identifier[keys] ) | def delete(self, name):
"""
Delete all the data in a named timeseries.
"""
keys = self._client.keys('%s%s:*' % (self._prefix, name))
pipe = self._client.pipeline(transaction=False)
for key in keys:
pipe.delete(key) # depends on [control=['for'], data=['key']]
pipe.execute()
# Could be not technically the exact number of keys deleted, but is a close
# enough approximation
return len(keys) |
def extract_and_process(path_in, path_out):
"""Run Eidos on a set of text files and process output with INDRA.
The output is produced in the specified output folder but
the output files aren't processed by this function.
Parameters
----------
path_in : str
Path to an input folder with some text files
path_out : str
Path to an output folder in which Eidos places the output
JSON-LD files
Returns
-------
stmts : list[indra.statements.Statements]
A list of INDRA Statements
"""
path_in = os.path.realpath(os.path.expanduser(path_in))
path_out = os.path.realpath(os.path.expanduser(path_out))
extract_from_directory(path_in, path_out)
jsons = glob.glob(os.path.join(path_out, '*.jsonld'))
logger.info('Found %d JSON-LD files to process in %s' %
(len(jsons), path_out))
stmts = []
for json in jsons:
ep = process_json_file(json)
if ep:
stmts += ep.statements
return stmts | def function[extract_and_process, parameter[path_in, path_out]]:
constant[Run Eidos on a set of text files and process output with INDRA.
The output is produced in the specified output folder but
the output files aren't processed by this function.
Parameters
----------
path_in : str
Path to an input folder with some text files
path_out : str
Path to an output folder in which Eidos places the output
JSON-LD files
Returns
-------
stmts : list[indra.statements.Statements]
A list of INDRA Statements
]
variable[path_in] assign[=] call[name[os].path.realpath, parameter[call[name[os].path.expanduser, parameter[name[path_in]]]]]
variable[path_out] assign[=] call[name[os].path.realpath, parameter[call[name[os].path.expanduser, parameter[name[path_out]]]]]
call[name[extract_from_directory], parameter[name[path_in], name[path_out]]]
variable[jsons] assign[=] call[name[glob].glob, parameter[call[name[os].path.join, parameter[name[path_out], constant[*.jsonld]]]]]
call[name[logger].info, parameter[binary_operation[constant[Found %d JSON-LD files to process in %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18f00cb50>, <ast.Name object at 0x7da18f00c370>]]]]]
variable[stmts] assign[=] list[[]]
for taget[name[json]] in starred[name[jsons]] begin[:]
variable[ep] assign[=] call[name[process_json_file], parameter[name[json]]]
if name[ep] begin[:]
<ast.AugAssign object at 0x7da18f00d450>
return[name[stmts]] | keyword[def] identifier[extract_and_process] ( identifier[path_in] , identifier[path_out] ):
literal[string]
identifier[path_in] = identifier[os] . identifier[path] . identifier[realpath] ( identifier[os] . identifier[path] . identifier[expanduser] ( identifier[path_in] ))
identifier[path_out] = identifier[os] . identifier[path] . identifier[realpath] ( identifier[os] . identifier[path] . identifier[expanduser] ( identifier[path_out] ))
identifier[extract_from_directory] ( identifier[path_in] , identifier[path_out] )
identifier[jsons] = identifier[glob] . identifier[glob] ( identifier[os] . identifier[path] . identifier[join] ( identifier[path_out] , literal[string] ))
identifier[logger] . identifier[info] ( literal[string] %
( identifier[len] ( identifier[jsons] ), identifier[path_out] ))
identifier[stmts] =[]
keyword[for] identifier[json] keyword[in] identifier[jsons] :
identifier[ep] = identifier[process_json_file] ( identifier[json] )
keyword[if] identifier[ep] :
identifier[stmts] += identifier[ep] . identifier[statements]
keyword[return] identifier[stmts] | def extract_and_process(path_in, path_out):
"""Run Eidos on a set of text files and process output with INDRA.
The output is produced in the specified output folder but
the output files aren't processed by this function.
Parameters
----------
path_in : str
Path to an input folder with some text files
path_out : str
Path to an output folder in which Eidos places the output
JSON-LD files
Returns
-------
stmts : list[indra.statements.Statements]
A list of INDRA Statements
"""
path_in = os.path.realpath(os.path.expanduser(path_in))
path_out = os.path.realpath(os.path.expanduser(path_out))
extract_from_directory(path_in, path_out)
jsons = glob.glob(os.path.join(path_out, '*.jsonld'))
logger.info('Found %d JSON-LD files to process in %s' % (len(jsons), path_out))
stmts = []
for json in jsons:
ep = process_json_file(json)
if ep:
stmts += ep.statements # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['json']]
return stmts |
def set_config_file(self, path):
"""
Set the config file. The contents must be valid YAML and there
must be a top-level element 'tasks'. The listed tasks will be
started according to their configuration, and the file will
be watched for future changes. The changes will be activated
by appropriate changes to the running tasks.
"""
log = self._params.get('log', self._discard)
if path != self._config_file:
if self._config_file:
log.info("Config file changed from '%s' to '%s'", self._config_file, path)
self.file_del(self, paths=[self._config_file])
else:
log.info("Config file set to '%s'", path)
self._config_file = path
self.file_add(event_target(self, 'legion_config', log=log), path)
return self._load_config() | def function[set_config_file, parameter[self, path]]:
constant[
Set the config file. The contents must be valid YAML and there
must be a top-level element 'tasks'. The listed tasks will be
started according to their configuration, and the file will
be watched for future changes. The changes will be activated
by appropriate changes to the running tasks.
]
variable[log] assign[=] call[name[self]._params.get, parameter[constant[log], name[self]._discard]]
if compare[name[path] not_equal[!=] name[self]._config_file] begin[:]
if name[self]._config_file begin[:]
call[name[log].info, parameter[constant[Config file changed from '%s' to '%s'], name[self]._config_file, name[path]]]
call[name[self].file_del, parameter[name[self]]]
name[self]._config_file assign[=] name[path]
call[name[self].file_add, parameter[call[name[event_target], parameter[name[self], constant[legion_config]]], name[path]]]
return[call[name[self]._load_config, parameter[]]] | keyword[def] identifier[set_config_file] ( identifier[self] , identifier[path] ):
literal[string]
identifier[log] = identifier[self] . identifier[_params] . identifier[get] ( literal[string] , identifier[self] . identifier[_discard] )
keyword[if] identifier[path] != identifier[self] . identifier[_config_file] :
keyword[if] identifier[self] . identifier[_config_file] :
identifier[log] . identifier[info] ( literal[string] , identifier[self] . identifier[_config_file] , identifier[path] )
identifier[self] . identifier[file_del] ( identifier[self] , identifier[paths] =[ identifier[self] . identifier[_config_file] ])
keyword[else] :
identifier[log] . identifier[info] ( literal[string] , identifier[path] )
identifier[self] . identifier[_config_file] = identifier[path]
identifier[self] . identifier[file_add] ( identifier[event_target] ( identifier[self] , literal[string] , identifier[log] = identifier[log] ), identifier[path] )
keyword[return] identifier[self] . identifier[_load_config] () | def set_config_file(self, path):
"""
Set the config file. The contents must be valid YAML and there
must be a top-level element 'tasks'. The listed tasks will be
started according to their configuration, and the file will
be watched for future changes. The changes will be activated
by appropriate changes to the running tasks.
"""
log = self._params.get('log', self._discard)
if path != self._config_file:
if self._config_file:
log.info("Config file changed from '%s' to '%s'", self._config_file, path)
self.file_del(self, paths=[self._config_file]) # depends on [control=['if'], data=[]]
else:
log.info("Config file set to '%s'", path)
self._config_file = path
self.file_add(event_target(self, 'legion_config', log=log), path) # depends on [control=['if'], data=['path']]
return self._load_config() |
def _input_to_dict(i, records, remapped=None):
"""Convert CWL input into dictionary required for a cwl2wdl Input object.
"""
if not remapped: remapped = {}
var_type, records = _to_variable_type(i["type"], records)
if var_type.startswith("Array") and "inputBinding" in i.get("type", {}):
ib = i["type"]["inputBinding"]
elif "inputBinding" in i:
ib = i["inputBinding"]
else:
ib = {"prefix": None, "itemSeparator": ";;", "position": None}
name = _id_to_localname(i["id"]) if "id" in i else i["name"]
return {"name": remapped.get(name) or name,
"variable_type": var_type,
"prefix": ib["prefix"], "separator": ib["itemSeparator"],
"position": ib["position"], "is_required": True,
"default": i.get("default", None), "separate": ib.get("separate", True)}, records | def function[_input_to_dict, parameter[i, records, remapped]]:
constant[Convert CWL input into dictionary required for a cwl2wdl Input object.
]
if <ast.UnaryOp object at 0x7da1b19d9720> begin[:]
variable[remapped] assign[=] dictionary[[], []]
<ast.Tuple object at 0x7da1b19d8460> assign[=] call[name[_to_variable_type], parameter[call[name[i]][constant[type]], name[records]]]
if <ast.BoolOp object at 0x7da1b19d9360> begin[:]
variable[ib] assign[=] call[call[name[i]][constant[type]]][constant[inputBinding]]
variable[name] assign[=] <ast.IfExp object at 0x7da1b19da0e0>
return[tuple[[<ast.Dict object at 0x7da1b19daf80>, <ast.Name object at 0x7da1b19da320>]]] | keyword[def] identifier[_input_to_dict] ( identifier[i] , identifier[records] , identifier[remapped] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[remapped] : identifier[remapped] ={}
identifier[var_type] , identifier[records] = identifier[_to_variable_type] ( identifier[i] [ literal[string] ], identifier[records] )
keyword[if] identifier[var_type] . identifier[startswith] ( literal[string] ) keyword[and] literal[string] keyword[in] identifier[i] . identifier[get] ( literal[string] ,{}):
identifier[ib] = identifier[i] [ literal[string] ][ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[i] :
identifier[ib] = identifier[i] [ literal[string] ]
keyword[else] :
identifier[ib] ={ literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : keyword[None] }
identifier[name] = identifier[_id_to_localname] ( identifier[i] [ literal[string] ]) keyword[if] literal[string] keyword[in] identifier[i] keyword[else] identifier[i] [ literal[string] ]
keyword[return] { literal[string] : identifier[remapped] . identifier[get] ( identifier[name] ) keyword[or] identifier[name] ,
literal[string] : identifier[var_type] ,
literal[string] : identifier[ib] [ literal[string] ], literal[string] : identifier[ib] [ literal[string] ],
literal[string] : identifier[ib] [ literal[string] ], literal[string] : keyword[True] ,
literal[string] : identifier[i] . identifier[get] ( literal[string] , keyword[None] ), literal[string] : identifier[ib] . identifier[get] ( literal[string] , keyword[True] )}, identifier[records] | def _input_to_dict(i, records, remapped=None):
"""Convert CWL input into dictionary required for a cwl2wdl Input object.
"""
if not remapped:
remapped = {} # depends on [control=['if'], data=[]]
(var_type, records) = _to_variable_type(i['type'], records)
if var_type.startswith('Array') and 'inputBinding' in i.get('type', {}):
ib = i['type']['inputBinding'] # depends on [control=['if'], data=[]]
elif 'inputBinding' in i:
ib = i['inputBinding'] # depends on [control=['if'], data=['i']]
else:
ib = {'prefix': None, 'itemSeparator': ';;', 'position': None}
name = _id_to_localname(i['id']) if 'id' in i else i['name']
return ({'name': remapped.get(name) or name, 'variable_type': var_type, 'prefix': ib['prefix'], 'separator': ib['itemSeparator'], 'position': ib['position'], 'is_required': True, 'default': i.get('default', None), 'separate': ib.get('separate', True)}, records) |
def r_date_num(obj, multiple=False):
"""Read date-value table."""
if isinstance(obj, (list, tuple)):
it = iter
else:
it = LinesIterator
if multiple:
datasets = {}
for line in it(obj):
label = line[2]
if label not in datasets:
datasets[label] = Dataset([Dataset.DATE, Dataset.FLOAT])
datasets[label].name = label
datasets[label].parse_elements(line[0:2])
return datasets.values()
dataset = Dataset([Dataset.DATE, Dataset.FLOAT])
return dataset.load(it(obj)) | def function[r_date_num, parameter[obj, multiple]]:
constant[Read date-value table.]
if call[name[isinstance], parameter[name[obj], tuple[[<ast.Name object at 0x7da207f00c70>, <ast.Name object at 0x7da207f03610>]]]] begin[:]
variable[it] assign[=] name[iter]
if name[multiple] begin[:]
variable[datasets] assign[=] dictionary[[], []]
for taget[name[line]] in starred[call[name[it], parameter[name[obj]]]] begin[:]
variable[label] assign[=] call[name[line]][constant[2]]
if compare[name[label] <ast.NotIn object at 0x7da2590d7190> name[datasets]] begin[:]
call[name[datasets]][name[label]] assign[=] call[name[Dataset], parameter[list[[<ast.Attribute object at 0x7da18fe93940>, <ast.Attribute object at 0x7da18fe930d0>]]]]
call[name[datasets]][name[label]].name assign[=] name[label]
call[call[name[datasets]][name[label]].parse_elements, parameter[call[name[line]][<ast.Slice object at 0x7da1b14c7d90>]]]
return[call[name[datasets].values, parameter[]]]
variable[dataset] assign[=] call[name[Dataset], parameter[list[[<ast.Attribute object at 0x7da20e9551b0>, <ast.Attribute object at 0x7da20e954250>]]]]
return[call[name[dataset].load, parameter[call[name[it], parameter[name[obj]]]]]] | keyword[def] identifier[r_date_num] ( identifier[obj] , identifier[multiple] = keyword[False] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[obj] ,( identifier[list] , identifier[tuple] )):
identifier[it] = identifier[iter]
keyword[else] :
identifier[it] = identifier[LinesIterator]
keyword[if] identifier[multiple] :
identifier[datasets] ={}
keyword[for] identifier[line] keyword[in] identifier[it] ( identifier[obj] ):
identifier[label] = identifier[line] [ literal[int] ]
keyword[if] identifier[label] keyword[not] keyword[in] identifier[datasets] :
identifier[datasets] [ identifier[label] ]= identifier[Dataset] ([ identifier[Dataset] . identifier[DATE] , identifier[Dataset] . identifier[FLOAT] ])
identifier[datasets] [ identifier[label] ]. identifier[name] = identifier[label]
identifier[datasets] [ identifier[label] ]. identifier[parse_elements] ( identifier[line] [ literal[int] : literal[int] ])
keyword[return] identifier[datasets] . identifier[values] ()
identifier[dataset] = identifier[Dataset] ([ identifier[Dataset] . identifier[DATE] , identifier[Dataset] . identifier[FLOAT] ])
keyword[return] identifier[dataset] . identifier[load] ( identifier[it] ( identifier[obj] )) | def r_date_num(obj, multiple=False):
"""Read date-value table."""
if isinstance(obj, (list, tuple)):
it = iter # depends on [control=['if'], data=[]]
else:
it = LinesIterator
if multiple:
datasets = {}
for line in it(obj):
label = line[2]
if label not in datasets:
datasets[label] = Dataset([Dataset.DATE, Dataset.FLOAT])
datasets[label].name = label # depends on [control=['if'], data=['label', 'datasets']]
datasets[label].parse_elements(line[0:2]) # depends on [control=['for'], data=['line']]
return datasets.values() # depends on [control=['if'], data=[]]
dataset = Dataset([Dataset.DATE, Dataset.FLOAT])
return dataset.load(it(obj)) |
def memory_objects_for_hash(self, n):
"""
Returns a set of :class:`SimMemoryObjects` that contain expressions that contain a variable with the hash
`h`.
"""
return set([self[i] for i in self.addrs_for_hash(n)]) | def function[memory_objects_for_hash, parameter[self, n]]:
constant[
Returns a set of :class:`SimMemoryObjects` that contain expressions that contain a variable with the hash
`h`.
]
return[call[name[set], parameter[<ast.ListComp object at 0x7da18c4cdae0>]]] | keyword[def] identifier[memory_objects_for_hash] ( identifier[self] , identifier[n] ):
literal[string]
keyword[return] identifier[set] ([ identifier[self] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[self] . identifier[addrs_for_hash] ( identifier[n] )]) | def memory_objects_for_hash(self, n):
"""
Returns a set of :class:`SimMemoryObjects` that contain expressions that contain a variable with the hash
`h`.
"""
return set([self[i] for i in self.addrs_for_hash(n)]) |
def ip_address(address):
"""Take an IP string/int and return an object of the correct type.
Args:
address: A string or integer, the IP address. Either IPv4 or
IPv6 addresses may be supplied; integers less than 2**32 will
be considered to be IPv4 by default.
Returns:
An IPv4Address or IPv6Address object.
Raises:
ValueError: if the *address* passed isn't either a v4 or a v6
address
"""
try:
return IPv4Address(address)
except (AddressValueError, NetmaskValueError):
pass
try:
return IPv6Address(address)
except (AddressValueError, NetmaskValueError):
pass
raise ValueError('%r does not appear to be an IPv4 or IPv6 address' %
address) | def function[ip_address, parameter[address]]:
constant[Take an IP string/int and return an object of the correct type.
Args:
address: A string or integer, the IP address. Either IPv4 or
IPv6 addresses may be supplied; integers less than 2**32 will
be considered to be IPv4 by default.
Returns:
An IPv4Address or IPv6Address object.
Raises:
ValueError: if the *address* passed isn't either a v4 or a v6
address
]
<ast.Try object at 0x7da18f813c40>
<ast.Try object at 0x7da18f812920>
<ast.Raise object at 0x7da18f812410> | keyword[def] identifier[ip_address] ( identifier[address] ):
literal[string]
keyword[try] :
keyword[return] identifier[IPv4Address] ( identifier[address] )
keyword[except] ( identifier[AddressValueError] , identifier[NetmaskValueError] ):
keyword[pass]
keyword[try] :
keyword[return] identifier[IPv6Address] ( identifier[address] )
keyword[except] ( identifier[AddressValueError] , identifier[NetmaskValueError] ):
keyword[pass]
keyword[raise] identifier[ValueError] ( literal[string] %
identifier[address] ) | def ip_address(address):
"""Take an IP string/int and return an object of the correct type.
Args:
address: A string or integer, the IP address. Either IPv4 or
IPv6 addresses may be supplied; integers less than 2**32 will
be considered to be IPv4 by default.
Returns:
An IPv4Address or IPv6Address object.
Raises:
ValueError: if the *address* passed isn't either a v4 or a v6
address
"""
try:
return IPv4Address(address) # depends on [control=['try'], data=[]]
except (AddressValueError, NetmaskValueError):
pass # depends on [control=['except'], data=[]]
try:
return IPv6Address(address) # depends on [control=['try'], data=[]]
except (AddressValueError, NetmaskValueError):
pass # depends on [control=['except'], data=[]]
raise ValueError('%r does not appear to be an IPv4 or IPv6 address' % address) |
def department_delete(self, department_id, **kwargs):
"https://developer.zendesk.com/rest_api/docs/chat/departments#delete-department-by-id"
api_path = "/api/v2/departments/{department_id}"
api_path = api_path.format(department_id=department_id)
return self.call(api_path, method="DELETE", **kwargs) | def function[department_delete, parameter[self, department_id]]:
constant[https://developer.zendesk.com/rest_api/docs/chat/departments#delete-department-by-id]
variable[api_path] assign[=] constant[/api/v2/departments/{department_id}]
variable[api_path] assign[=] call[name[api_path].format, parameter[]]
return[call[name[self].call, parameter[name[api_path]]]] | keyword[def] identifier[department_delete] ( identifier[self] , identifier[department_id] ,** identifier[kwargs] ):
literal[string]
identifier[api_path] = literal[string]
identifier[api_path] = identifier[api_path] . identifier[format] ( identifier[department_id] = identifier[department_id] )
keyword[return] identifier[self] . identifier[call] ( identifier[api_path] , identifier[method] = literal[string] ,** identifier[kwargs] ) | def department_delete(self, department_id, **kwargs):
"""https://developer.zendesk.com/rest_api/docs/chat/departments#delete-department-by-id"""
api_path = '/api/v2/departments/{department_id}'
api_path = api_path.format(department_id=department_id)
return self.call(api_path, method='DELETE', **kwargs) |
def get_active_terms_ids():
"""Returns a list of the IDs of of all terms and conditions"""
active_terms_ids = cache.get('tandc.active_terms_ids')
if active_terms_ids is None:
active_terms_dict = {}
active_terms_ids = []
active_terms_set = TermsAndConditions.objects.filter(date_active__isnull=False, date_active__lte=timezone.now()).order_by('date_active')
for active_terms in active_terms_set:
active_terms_dict[active_terms.slug] = active_terms.id
active_terms_dict = OrderedDict(sorted(active_terms_dict.items(), key=lambda t: t[0]))
for terms in active_terms_dict:
active_terms_ids.append(active_terms_dict[terms])
cache.set('tandc.active_terms_ids', active_terms_ids, TERMS_CACHE_SECONDS)
return active_terms_ids | def function[get_active_terms_ids, parameter[]]:
constant[Returns a list of the IDs of of all terms and conditions]
variable[active_terms_ids] assign[=] call[name[cache].get, parameter[constant[tandc.active_terms_ids]]]
if compare[name[active_terms_ids] is constant[None]] begin[:]
variable[active_terms_dict] assign[=] dictionary[[], []]
variable[active_terms_ids] assign[=] list[[]]
variable[active_terms_set] assign[=] call[call[name[TermsAndConditions].objects.filter, parameter[]].order_by, parameter[constant[date_active]]]
for taget[name[active_terms]] in starred[name[active_terms_set]] begin[:]
call[name[active_terms_dict]][name[active_terms].slug] assign[=] name[active_terms].id
variable[active_terms_dict] assign[=] call[name[OrderedDict], parameter[call[name[sorted], parameter[call[name[active_terms_dict].items, parameter[]]]]]]
for taget[name[terms]] in starred[name[active_terms_dict]] begin[:]
call[name[active_terms_ids].append, parameter[call[name[active_terms_dict]][name[terms]]]]
call[name[cache].set, parameter[constant[tandc.active_terms_ids], name[active_terms_ids], name[TERMS_CACHE_SECONDS]]]
return[name[active_terms_ids]] | keyword[def] identifier[get_active_terms_ids] ():
literal[string]
identifier[active_terms_ids] = identifier[cache] . identifier[get] ( literal[string] )
keyword[if] identifier[active_terms_ids] keyword[is] keyword[None] :
identifier[active_terms_dict] ={}
identifier[active_terms_ids] =[]
identifier[active_terms_set] = identifier[TermsAndConditions] . identifier[objects] . identifier[filter] ( identifier[date_active__isnull] = keyword[False] , identifier[date_active__lte] = identifier[timezone] . identifier[now] ()). identifier[order_by] ( literal[string] )
keyword[for] identifier[active_terms] keyword[in] identifier[active_terms_set] :
identifier[active_terms_dict] [ identifier[active_terms] . identifier[slug] ]= identifier[active_terms] . identifier[id]
identifier[active_terms_dict] = identifier[OrderedDict] ( identifier[sorted] ( identifier[active_terms_dict] . identifier[items] (), identifier[key] = keyword[lambda] identifier[t] : identifier[t] [ literal[int] ]))
keyword[for] identifier[terms] keyword[in] identifier[active_terms_dict] :
identifier[active_terms_ids] . identifier[append] ( identifier[active_terms_dict] [ identifier[terms] ])
identifier[cache] . identifier[set] ( literal[string] , identifier[active_terms_ids] , identifier[TERMS_CACHE_SECONDS] )
keyword[return] identifier[active_terms_ids] | def get_active_terms_ids():
"""Returns a list of the IDs of of all terms and conditions"""
active_terms_ids = cache.get('tandc.active_terms_ids')
if active_terms_ids is None:
active_terms_dict = {}
active_terms_ids = []
active_terms_set = TermsAndConditions.objects.filter(date_active__isnull=False, date_active__lte=timezone.now()).order_by('date_active')
for active_terms in active_terms_set:
active_terms_dict[active_terms.slug] = active_terms.id # depends on [control=['for'], data=['active_terms']]
active_terms_dict = OrderedDict(sorted(active_terms_dict.items(), key=lambda t: t[0]))
for terms in active_terms_dict:
active_terms_ids.append(active_terms_dict[terms]) # depends on [control=['for'], data=['terms']]
cache.set('tandc.active_terms_ids', active_terms_ids, TERMS_CACHE_SECONDS) # depends on [control=['if'], data=['active_terms_ids']]
return active_terms_ids |
def getValue(words):
"""Computes the sum of the values of the words."""
value = 0
for word in words:
for letter in word:
# shared.getConst will evaluate to the dictionary broadcasted by
# the root Future
value += shared.getConst('lettersValue')[letter]
return value | def function[getValue, parameter[words]]:
constant[Computes the sum of the values of the words.]
variable[value] assign[=] constant[0]
for taget[name[word]] in starred[name[words]] begin[:]
for taget[name[letter]] in starred[name[word]] begin[:]
<ast.AugAssign object at 0x7da18c4cd930>
return[name[value]] | keyword[def] identifier[getValue] ( identifier[words] ):
literal[string]
identifier[value] = literal[int]
keyword[for] identifier[word] keyword[in] identifier[words] :
keyword[for] identifier[letter] keyword[in] identifier[word] :
identifier[value] += identifier[shared] . identifier[getConst] ( literal[string] )[ identifier[letter] ]
keyword[return] identifier[value] | def getValue(words):
"""Computes the sum of the values of the words."""
value = 0
for word in words:
for letter in word:
# shared.getConst will evaluate to the dictionary broadcasted by
# the root Future
value += shared.getConst('lettersValue')[letter] # depends on [control=['for'], data=['letter']] # depends on [control=['for'], data=['word']]
return value |
def _rnd_datetime(self, start, end):
"""Internal random datetime generator.
"""
return self.from_utctimestamp(
random.randint(
int(self.to_utctimestamp(start)),
int(self.to_utctimestamp(end)),
)
) | def function[_rnd_datetime, parameter[self, start, end]]:
constant[Internal random datetime generator.
]
return[call[name[self].from_utctimestamp, parameter[call[name[random].randint, parameter[call[name[int], parameter[call[name[self].to_utctimestamp, parameter[name[start]]]]], call[name[int], parameter[call[name[self].to_utctimestamp, parameter[name[end]]]]]]]]]] | keyword[def] identifier[_rnd_datetime] ( identifier[self] , identifier[start] , identifier[end] ):
literal[string]
keyword[return] identifier[self] . identifier[from_utctimestamp] (
identifier[random] . identifier[randint] (
identifier[int] ( identifier[self] . identifier[to_utctimestamp] ( identifier[start] )),
identifier[int] ( identifier[self] . identifier[to_utctimestamp] ( identifier[end] )),
)
) | def _rnd_datetime(self, start, end):
"""Internal random datetime generator.
"""
return self.from_utctimestamp(random.randint(int(self.to_utctimestamp(start)), int(self.to_utctimestamp(end)))) |
def median(self, **kwargs):
"""
Compute median of groups, excluding missing values.
For multiple groupings, the result index will be a MultiIndex
"""
try:
return self._cython_agg_general('median', **kwargs)
except GroupByError:
raise
except Exception: # pragma: no cover
def f(x):
if isinstance(x, np.ndarray):
x = Series(x)
return x.median(axis=self.axis, **kwargs)
with _group_selection_context(self):
return self._python_agg_general(f) | def function[median, parameter[self]]:
constant[
Compute median of groups, excluding missing values.
For multiple groupings, the result index will be a MultiIndex
]
<ast.Try object at 0x7da1b2068bb0> | keyword[def] identifier[median] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
keyword[try] :
keyword[return] identifier[self] . identifier[_cython_agg_general] ( literal[string] ,** identifier[kwargs] )
keyword[except] identifier[GroupByError] :
keyword[raise]
keyword[except] identifier[Exception] :
keyword[def] identifier[f] ( identifier[x] ):
keyword[if] identifier[isinstance] ( identifier[x] , identifier[np] . identifier[ndarray] ):
identifier[x] = identifier[Series] ( identifier[x] )
keyword[return] identifier[x] . identifier[median] ( identifier[axis] = identifier[self] . identifier[axis] ,** identifier[kwargs] )
keyword[with] identifier[_group_selection_context] ( identifier[self] ):
keyword[return] identifier[self] . identifier[_python_agg_general] ( identifier[f] ) | def median(self, **kwargs):
"""
Compute median of groups, excluding missing values.
For multiple groupings, the result index will be a MultiIndex
"""
try:
return self._cython_agg_general('median', **kwargs) # depends on [control=['try'], data=[]]
except GroupByError:
raise # depends on [control=['except'], data=[]]
except Exception: # pragma: no cover
def f(x):
if isinstance(x, np.ndarray):
x = Series(x) # depends on [control=['if'], data=[]]
return x.median(axis=self.axis, **kwargs)
with _group_selection_context(self):
return self._python_agg_general(f) # depends on [control=['with'], data=[]] # depends on [control=['except'], data=[]] |
def _is_valid_dataset(config_value):
'''Datasets must be of form "project.dataset" or "dataset"
'''
return re.match(
# regex matches: project.table -- OR -- table
r'^' + RE_PROJECT + r'\.' + RE_DS_TABLE + r'$|^' + RE_DS_TABLE + r'$',
config_value,
) | def function[_is_valid_dataset, parameter[config_value]]:
constant[Datasets must be of form "project.dataset" or "dataset"
]
return[call[name[re].match, parameter[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[^] + name[RE_PROJECT]] + constant[\.]] + name[RE_DS_TABLE]] + constant[$|^]] + name[RE_DS_TABLE]] + constant[$]], name[config_value]]]] | keyword[def] identifier[_is_valid_dataset] ( identifier[config_value] ):
literal[string]
keyword[return] identifier[re] . identifier[match] (
literal[string] + identifier[RE_PROJECT] + literal[string] + identifier[RE_DS_TABLE] + literal[string] + identifier[RE_DS_TABLE] + literal[string] ,
identifier[config_value] ,
) | def _is_valid_dataset(config_value):
"""Datasets must be of form "project.dataset" or "dataset"
"""
# regex matches: project.table -- OR -- table
return re.match('^' + RE_PROJECT + '\\.' + RE_DS_TABLE + '$|^' + RE_DS_TABLE + '$', config_value) |
def load(overlay, path=""):
"""
load - Load a DTB Overlay
Inputs:
overlay - Overlay Key: SPI2, PWM0, CUST
path - Full Path to where the custom overlay is stored
Returns:
0 - Successful Load
1 - Unsuccessful Load
2 - Overlay was previously set
"""
global DEBUG
global _LOADED
if DEBUG:
print("LOAD OVERLAY: {0} @ {1}".format(overlay,path))
# SEE IF OUR OVERLAY NAME IS IN THE KEYS
if overlay.upper() in _OVERLAYS.keys():
cpath = OVERLAYCONFIGPATH + "/" + _FOLDERS[overlay.upper()]
if DEBUG:
print("VALID OVERLAY")
print("CONFIG PATH: {0}".format(cpath))
# CHECK TO SEE IF WE HAVE A PATH FOR CUSTOM OVERLAY
if overlay.upper() == "CUST" and path == "":
raise ValueError("Path must be specified for Custom Overlay Choice")
elif overlay.upper() == "CUST" and _LOADED[overlay.upper()]:
print("Custom Overlay already loaded")
return 2
elif overlay.upper() == "CUST" and not os.path.exists(path):
print("Custom Overlay path does not exist")
return 1
# DETERMINE IF WE ARE A CHIP PRO AND WE ARE COMMANDED TO LOAD PWM0
if is_chip_pro() and overlay.upper() == "PWM0":
print("CHIP Pro supports PWM0 in base DTB, exiting")
return 1
# SET UP THE OVERLAY PATH FOR OUR USE
if overlay.upper() != "CUST":
opath = OVERLAYINSTALLPATH
opath += "/" + _OVERLAYS[overlay.upper()]
else:
opath = path
if DEBUG:
print("OVERLAY PATH: {0}".format(opath))
if overlay.upper() == "PWM0" and _LOADED[overlay.upper()]:
print("PWM0 Overlay already loaded")
return 2
if overlay.upper() == "SPI2" and _LOADED[overlay.upper()]:
print("SPI2 Overlay already loaded")
return 2
# LOAD THE OVERLAY
errc = _set_overlay_verify(overlay.upper(), opath, cpath)
if DEBUG:
print("_SET_OVERLAY_VERIFY ERRC: {0}".format(errc))
if errc == 0:
_LOADED[overlay.upper()] = True
else:
raise ValueError("Invalid Overlay name specified! Choose between: SPI2, PWM0, CUST") | def function[load, parameter[overlay, path]]:
constant[
load - Load a DTB Overlay
Inputs:
overlay - Overlay Key: SPI2, PWM0, CUST
path - Full Path to where the custom overlay is stored
Returns:
0 - Successful Load
1 - Unsuccessful Load
2 - Overlay was previously set
]
<ast.Global object at 0x7da1b0b34220>
<ast.Global object at 0x7da1b0b34280>
if name[DEBUG] begin[:]
call[name[print], parameter[call[constant[LOAD OVERLAY: {0} @ {1}].format, parameter[name[overlay], name[path]]]]]
if compare[call[name[overlay].upper, parameter[]] in call[name[_OVERLAYS].keys, parameter[]]] begin[:]
variable[cpath] assign[=] binary_operation[binary_operation[name[OVERLAYCONFIGPATH] + constant[/]] + call[name[_FOLDERS]][call[name[overlay].upper, parameter[]]]]
if name[DEBUG] begin[:]
call[name[print], parameter[constant[VALID OVERLAY]]]
call[name[print], parameter[call[constant[CONFIG PATH: {0}].format, parameter[name[cpath]]]]]
if <ast.BoolOp object at 0x7da1b0b34b80> begin[:]
<ast.Raise object at 0x7da1b0b34d30>
if <ast.BoolOp object at 0x7da1b0b594e0> begin[:]
call[name[print], parameter[constant[CHIP Pro supports PWM0 in base DTB, exiting]]]
return[constant[1]]
if compare[call[name[overlay].upper, parameter[]] not_equal[!=] constant[CUST]] begin[:]
variable[opath] assign[=] name[OVERLAYINSTALLPATH]
<ast.AugAssign object at 0x7da1b0b5a200>
if name[DEBUG] begin[:]
call[name[print], parameter[call[constant[OVERLAY PATH: {0}].format, parameter[name[opath]]]]]
if <ast.BoolOp object at 0x7da1b0b5a2f0> begin[:]
call[name[print], parameter[constant[PWM0 Overlay already loaded]]]
return[constant[2]]
if <ast.BoolOp object at 0x7da1b0b59600> begin[:]
call[name[print], parameter[constant[SPI2 Overlay already loaded]]]
return[constant[2]]
variable[errc] assign[=] call[name[_set_overlay_verify], parameter[call[name[overlay].upper, parameter[]], name[opath], name[cpath]]]
if name[DEBUG] begin[:]
call[name[print], parameter[call[constant[_SET_OVERLAY_VERIFY ERRC: {0}].format, parameter[name[errc]]]]]
if compare[name[errc] equal[==] constant[0]] begin[:]
call[name[_LOADED]][call[name[overlay].upper, parameter[]]] assign[=] constant[True] | keyword[def] identifier[load] ( identifier[overlay] , identifier[path] = literal[string] ):
literal[string]
keyword[global] identifier[DEBUG]
keyword[global] identifier[_LOADED]
keyword[if] identifier[DEBUG] :
identifier[print] ( literal[string] . identifier[format] ( identifier[overlay] , identifier[path] ))
keyword[if] identifier[overlay] . identifier[upper] () keyword[in] identifier[_OVERLAYS] . identifier[keys] ():
identifier[cpath] = identifier[OVERLAYCONFIGPATH] + literal[string] + identifier[_FOLDERS] [ identifier[overlay] . identifier[upper] ()]
keyword[if] identifier[DEBUG] :
identifier[print] ( literal[string] )
identifier[print] ( literal[string] . identifier[format] ( identifier[cpath] ))
keyword[if] identifier[overlay] . identifier[upper] ()== literal[string] keyword[and] identifier[path] == literal[string] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[elif] identifier[overlay] . identifier[upper] ()== literal[string] keyword[and] identifier[_LOADED] [ identifier[overlay] . identifier[upper] ()]:
identifier[print] ( literal[string] )
keyword[return] literal[int]
keyword[elif] identifier[overlay] . identifier[upper] ()== literal[string] keyword[and] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[path] ):
identifier[print] ( literal[string] )
keyword[return] literal[int]
keyword[if] identifier[is_chip_pro] () keyword[and] identifier[overlay] . identifier[upper] ()== literal[string] :
identifier[print] ( literal[string] )
keyword[return] literal[int]
keyword[if] identifier[overlay] . identifier[upper] ()!= literal[string] :
identifier[opath] = identifier[OVERLAYINSTALLPATH]
identifier[opath] += literal[string] + identifier[_OVERLAYS] [ identifier[overlay] . identifier[upper] ()]
keyword[else] :
identifier[opath] = identifier[path]
keyword[if] identifier[DEBUG] :
identifier[print] ( literal[string] . identifier[format] ( identifier[opath] ))
keyword[if] identifier[overlay] . identifier[upper] ()== literal[string] keyword[and] identifier[_LOADED] [ identifier[overlay] . identifier[upper] ()]:
identifier[print] ( literal[string] )
keyword[return] literal[int]
keyword[if] identifier[overlay] . identifier[upper] ()== literal[string] keyword[and] identifier[_LOADED] [ identifier[overlay] . identifier[upper] ()]:
identifier[print] ( literal[string] )
keyword[return] literal[int]
identifier[errc] = identifier[_set_overlay_verify] ( identifier[overlay] . identifier[upper] (), identifier[opath] , identifier[cpath] )
keyword[if] identifier[DEBUG] :
identifier[print] ( literal[string] . identifier[format] ( identifier[errc] ))
keyword[if] identifier[errc] == literal[int] :
identifier[_LOADED] [ identifier[overlay] . identifier[upper] ()]= keyword[True]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] ) | def load(overlay, path=''):
"""
load - Load a DTB Overlay
Inputs:
overlay - Overlay Key: SPI2, PWM0, CUST
path - Full Path to where the custom overlay is stored
Returns:
0 - Successful Load
1 - Unsuccessful Load
2 - Overlay was previously set
"""
global DEBUG
global _LOADED
if DEBUG:
print('LOAD OVERLAY: {0} @ {1}'.format(overlay, path)) # depends on [control=['if'], data=[]]
# SEE IF OUR OVERLAY NAME IS IN THE KEYS
if overlay.upper() in _OVERLAYS.keys():
cpath = OVERLAYCONFIGPATH + '/' + _FOLDERS[overlay.upper()]
if DEBUG:
print('VALID OVERLAY')
print('CONFIG PATH: {0}'.format(cpath)) # depends on [control=['if'], data=[]]
# CHECK TO SEE IF WE HAVE A PATH FOR CUSTOM OVERLAY
if overlay.upper() == 'CUST' and path == '':
raise ValueError('Path must be specified for Custom Overlay Choice') # depends on [control=['if'], data=[]]
elif overlay.upper() == 'CUST' and _LOADED[overlay.upper()]:
print('Custom Overlay already loaded')
return 2 # depends on [control=['if'], data=[]]
elif overlay.upper() == 'CUST' and (not os.path.exists(path)):
print('Custom Overlay path does not exist')
return 1 # depends on [control=['if'], data=[]]
# DETERMINE IF WE ARE A CHIP PRO AND WE ARE COMMANDED TO LOAD PWM0
if is_chip_pro() and overlay.upper() == 'PWM0':
print('CHIP Pro supports PWM0 in base DTB, exiting')
return 1 # depends on [control=['if'], data=[]]
# SET UP THE OVERLAY PATH FOR OUR USE
if overlay.upper() != 'CUST':
opath = OVERLAYINSTALLPATH
opath += '/' + _OVERLAYS[overlay.upper()] # depends on [control=['if'], data=[]]
else:
opath = path
if DEBUG:
print('OVERLAY PATH: {0}'.format(opath)) # depends on [control=['if'], data=[]]
if overlay.upper() == 'PWM0' and _LOADED[overlay.upper()]:
print('PWM0 Overlay already loaded')
return 2 # depends on [control=['if'], data=[]]
if overlay.upper() == 'SPI2' and _LOADED[overlay.upper()]:
print('SPI2 Overlay already loaded')
return 2 # depends on [control=['if'], data=[]]
# LOAD THE OVERLAY
errc = _set_overlay_verify(overlay.upper(), opath, cpath)
if DEBUG:
print('_SET_OVERLAY_VERIFY ERRC: {0}'.format(errc)) # depends on [control=['if'], data=[]]
if errc == 0:
_LOADED[overlay.upper()] = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
raise ValueError('Invalid Overlay name specified! Choose between: SPI2, PWM0, CUST') |
def sens_from_spec(spec, labels, scores, *args, **kwargs):
r"""Find the sensitivity that corresponds to the indicated specificity (ROC function)
sensitivity = Num_True_Positive / (Num_True_Postive + Num_False_Negative)
specificity = Num_True_Negative / (Num_True_Negative + Num_False_Positive)
"""
thresh = thresh_from_spec(spec, labels, scores)
df = pd.DataFrame(list(zip(labels, scores[scores > thresh].astype(int))))
c = Confusion(df, *args, **kwargs)
return c._binary_sensitivity | def function[sens_from_spec, parameter[spec, labels, scores]]:
constant[Find the sensitivity that corresponds to the indicated specificity (ROC function)
sensitivity = Num_True_Positive / (Num_True_Postive + Num_False_Negative)
specificity = Num_True_Negative / (Num_True_Negative + Num_False_Positive)
]
variable[thresh] assign[=] call[name[thresh_from_spec], parameter[name[spec], name[labels], name[scores]]]
variable[df] assign[=] call[name[pd].DataFrame, parameter[call[name[list], parameter[call[name[zip], parameter[name[labels], call[call[name[scores]][compare[name[scores] greater[>] name[thresh]]].astype, parameter[name[int]]]]]]]]]
variable[c] assign[=] call[name[Confusion], parameter[name[df], <ast.Starred object at 0x7da2043473a0>]]
return[name[c]._binary_sensitivity] | keyword[def] identifier[sens_from_spec] ( identifier[spec] , identifier[labels] , identifier[scores] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[thresh] = identifier[thresh_from_spec] ( identifier[spec] , identifier[labels] , identifier[scores] )
identifier[df] = identifier[pd] . identifier[DataFrame] ( identifier[list] ( identifier[zip] ( identifier[labels] , identifier[scores] [ identifier[scores] > identifier[thresh] ]. identifier[astype] ( identifier[int] ))))
identifier[c] = identifier[Confusion] ( identifier[df] ,* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[c] . identifier[_binary_sensitivity] | def sens_from_spec(spec, labels, scores, *args, **kwargs):
"""Find the sensitivity that corresponds to the indicated specificity (ROC function)
sensitivity = Num_True_Positive / (Num_True_Postive + Num_False_Negative)
specificity = Num_True_Negative / (Num_True_Negative + Num_False_Positive)
"""
thresh = thresh_from_spec(spec, labels, scores)
df = pd.DataFrame(list(zip(labels, scores[scores > thresh].astype(int))))
c = Confusion(df, *args, **kwargs)
return c._binary_sensitivity |
def cmd_show(docid):
"""
Arguments: <doc_id>
Show document information (but not its content, see 'dump').
See 'search' for the document id.
Possible JSON replies:
--
{
"status": "error", "exception": "yyy",
"reason": "xxxx", "args": "(xxxx, )"
}
--
{
"status": "ok",
"type": "ImgDoc",
"nb_pages": 3,
"pages": [
{"n": 1, "lines": 10, "words": 22},
{"n": 2, "lines": 20, "words": 22},
{"n": 3, "lines": 30, "words": 34},
],
"labels": ["aaa", "bbb"],
"first_line": "vwklsd wldkwq",
}
"""
dsearch = get_docsearch()
doc = dsearch.get(docid)
r = {
'type': str(type(doc)),
'nb_pages': doc.nb_pages,
'labels': [l.name for l in doc.labels],
'first_line': _get_first_line(doc),
'pages': []
}
for page in doc.pages:
nb_lines = 0
nb_words = 0
for line in page.boxes:
nb_lines += 1
nb_words += len(line.word_boxes)
r['pages'].append({
"n": page.page_nb + 1,
"lines": nb_lines,
"words": nb_words,
})
reply(r) | def function[cmd_show, parameter[docid]]:
constant[
Arguments: <doc_id>
Show document information (but not its content, see 'dump').
See 'search' for the document id.
Possible JSON replies:
--
{
"status": "error", "exception": "yyy",
"reason": "xxxx", "args": "(xxxx, )"
}
--
{
"status": "ok",
"type": "ImgDoc",
"nb_pages": 3,
"pages": [
{"n": 1, "lines": 10, "words": 22},
{"n": 2, "lines": 20, "words": 22},
{"n": 3, "lines": 30, "words": 34},
],
"labels": ["aaa", "bbb"],
"first_line": "vwklsd wldkwq",
}
]
variable[dsearch] assign[=] call[name[get_docsearch], parameter[]]
variable[doc] assign[=] call[name[dsearch].get, parameter[name[docid]]]
variable[r] assign[=] dictionary[[<ast.Constant object at 0x7da18ede4dc0>, <ast.Constant object at 0x7da18ede7640>, <ast.Constant object at 0x7da18ede7e80>, <ast.Constant object at 0x7da18ede5600>, <ast.Constant object at 0x7da18ede6080>], [<ast.Call object at 0x7da18ede4e80>, <ast.Attribute object at 0x7da18ede5a20>, <ast.ListComp object at 0x7da18ede56c0>, <ast.Call object at 0x7da18ede6d10>, <ast.List object at 0x7da18ede4eb0>]]
for taget[name[page]] in starred[name[doc].pages] begin[:]
variable[nb_lines] assign[=] constant[0]
variable[nb_words] assign[=] constant[0]
for taget[name[line]] in starred[name[page].boxes] begin[:]
<ast.AugAssign object at 0x7da18ede49a0>
<ast.AugAssign object at 0x7da18ede7730>
call[call[name[r]][constant[pages]].append, parameter[dictionary[[<ast.Constant object at 0x7da18ede5750>, <ast.Constant object at 0x7da18ede7eb0>, <ast.Constant object at 0x7da18ede4160>], [<ast.BinOp object at 0x7da18ede73d0>, <ast.Name object at 0x7da18ede7e50>, <ast.Name object at 0x7da18ede6680>]]]]
call[name[reply], parameter[name[r]]] | keyword[def] identifier[cmd_show] ( identifier[docid] ):
literal[string]
identifier[dsearch] = identifier[get_docsearch] ()
identifier[doc] = identifier[dsearch] . identifier[get] ( identifier[docid] )
identifier[r] ={
literal[string] : identifier[str] ( identifier[type] ( identifier[doc] )),
literal[string] : identifier[doc] . identifier[nb_pages] ,
literal[string] :[ identifier[l] . identifier[name] keyword[for] identifier[l] keyword[in] identifier[doc] . identifier[labels] ],
literal[string] : identifier[_get_first_line] ( identifier[doc] ),
literal[string] :[]
}
keyword[for] identifier[page] keyword[in] identifier[doc] . identifier[pages] :
identifier[nb_lines] = literal[int]
identifier[nb_words] = literal[int]
keyword[for] identifier[line] keyword[in] identifier[page] . identifier[boxes] :
identifier[nb_lines] += literal[int]
identifier[nb_words] += identifier[len] ( identifier[line] . identifier[word_boxes] )
identifier[r] [ literal[string] ]. identifier[append] ({
literal[string] : identifier[page] . identifier[page_nb] + literal[int] ,
literal[string] : identifier[nb_lines] ,
literal[string] : identifier[nb_words] ,
})
identifier[reply] ( identifier[r] ) | def cmd_show(docid):
"""
Arguments: <doc_id>
Show document information (but not its content, see 'dump').
See 'search' for the document id.
Possible JSON replies:
--
{
"status": "error", "exception": "yyy",
"reason": "xxxx", "args": "(xxxx, )"
}
--
{
"status": "ok",
"type": "ImgDoc",
"nb_pages": 3,
"pages": [
{"n": 1, "lines": 10, "words": 22},
{"n": 2, "lines": 20, "words": 22},
{"n": 3, "lines": 30, "words": 34},
],
"labels": ["aaa", "bbb"],
"first_line": "vwklsd wldkwq",
}
"""
dsearch = get_docsearch()
doc = dsearch.get(docid)
r = {'type': str(type(doc)), 'nb_pages': doc.nb_pages, 'labels': [l.name for l in doc.labels], 'first_line': _get_first_line(doc), 'pages': []}
for page in doc.pages:
nb_lines = 0
nb_words = 0
for line in page.boxes:
nb_lines += 1
nb_words += len(line.word_boxes) # depends on [control=['for'], data=['line']]
r['pages'].append({'n': page.page_nb + 1, 'lines': nb_lines, 'words': nb_words}) # depends on [control=['for'], data=['page']]
reply(r) |
def sample_N(a, N):
""" When size of N is >= size of a, random.sample() will emit an error:
ValueError: sample larger than population
This method handles such restrictions by repeatedly sampling when that
happens.
Examples:
>>> sample_N([1, 2, 3], 2)
>>> sample_N([1, 2, 3], 3)
>>> sample_N([1, 2, 3], 4)
"""
import random
if N < len(a):
return random.sample(a, N)
return [random.choice(a) for x in range(N)] | def function[sample_N, parameter[a, N]]:
constant[ When size of N is >= size of a, random.sample() will emit an error:
ValueError: sample larger than population
This method handles such restrictions by repeatedly sampling when that
happens.
Examples:
>>> sample_N([1, 2, 3], 2)
>>> sample_N([1, 2, 3], 3)
>>> sample_N([1, 2, 3], 4)
]
import module[random]
if compare[name[N] less[<] call[name[len], parameter[name[a]]]] begin[:]
return[call[name[random].sample, parameter[name[a], name[N]]]]
return[<ast.ListComp object at 0x7da207f98730>] | keyword[def] identifier[sample_N] ( identifier[a] , identifier[N] ):
literal[string]
keyword[import] identifier[random]
keyword[if] identifier[N] < identifier[len] ( identifier[a] ):
keyword[return] identifier[random] . identifier[sample] ( identifier[a] , identifier[N] )
keyword[return] [ identifier[random] . identifier[choice] ( identifier[a] ) keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[N] )] | def sample_N(a, N):
""" When size of N is >= size of a, random.sample() will emit an error:
ValueError: sample larger than population
This method handles such restrictions by repeatedly sampling when that
happens.
Examples:
>>> sample_N([1, 2, 3], 2)
>>> sample_N([1, 2, 3], 3)
>>> sample_N([1, 2, 3], 4)
"""
import random
if N < len(a):
return random.sample(a, N) # depends on [control=['if'], data=['N']]
return [random.choice(a) for x in range(N)] |
def search_packages_info(query):
"""
Gather details from installed distributions. Print distribution name,
version, location, and installed files. Installed files requires a
pip generated 'installed-files.txt' in the distributions '.egg-info'
directory.
"""
installed = {}
for p in pkg_resources.working_set:
installed[canonicalize_name(p.project_name)] = p
query_names = [canonicalize_name(name) for name in query]
for dist in [installed[pkg] for pkg in query_names if pkg in installed]:
package = {
'name': dist.project_name,
'version': dist.version,
'location': dist.location,
'requires': [dep.project_name for dep in dist.requires()],
}
file_list = None
metadata = None
if isinstance(dist, pkg_resources.DistInfoDistribution):
# RECORDs should be part of .dist-info metadatas
if dist.has_metadata('RECORD'):
lines = dist.get_metadata_lines('RECORD')
paths = [l.split(',')[0] for l in lines]
paths = [os.path.join(dist.location, p) for p in paths]
file_list = [os.path.relpath(p, dist.location) for p in paths]
if dist.has_metadata('METADATA'):
metadata = dist.get_metadata('METADATA')
else:
# Otherwise use pip's log for .egg-info's
if dist.has_metadata('installed-files.txt'):
paths = dist.get_metadata_lines('installed-files.txt')
paths = [os.path.join(dist.egg_info, p) for p in paths]
file_list = [os.path.relpath(p, dist.location) for p in paths]
if dist.has_metadata('PKG-INFO'):
metadata = dist.get_metadata('PKG-INFO')
if dist.has_metadata('entry_points.txt'):
entry_points = dist.get_metadata_lines('entry_points.txt')
package['entry_points'] = entry_points
if dist.has_metadata('INSTALLER'):
for line in dist.get_metadata_lines('INSTALLER'):
if line.strip():
package['installer'] = line.strip()
break
# @todo: Should pkg_resources.Distribution have a
# `get_pkg_info` method?
feed_parser = FeedParser()
feed_parser.feed(metadata)
pkg_info_dict = feed_parser.close()
for key in ('metadata-version', 'summary',
'home-page', 'author', 'author-email', 'license'):
package[key] = pkg_info_dict.get(key)
# It looks like FeedParser cannot deal with repeated headers
classifiers = []
for line in metadata.splitlines():
if line.startswith('Classifier: '):
classifiers.append(line[len('Classifier: '):])
package['classifiers'] = classifiers
if file_list:
package['files'] = sorted(file_list)
yield package | def function[search_packages_info, parameter[query]]:
constant[
Gather details from installed distributions. Print distribution name,
version, location, and installed files. Installed files requires a
pip generated 'installed-files.txt' in the distributions '.egg-info'
directory.
]
variable[installed] assign[=] dictionary[[], []]
for taget[name[p]] in starred[name[pkg_resources].working_set] begin[:]
call[name[installed]][call[name[canonicalize_name], parameter[name[p].project_name]]] assign[=] name[p]
variable[query_names] assign[=] <ast.ListComp object at 0x7da2041da530>
for taget[name[dist]] in starred[<ast.ListComp object at 0x7da2041dacb0>] begin[:]
variable[package] assign[=] dictionary[[<ast.Constant object at 0x7da2041da6b0>, <ast.Constant object at 0x7da2041db1c0>, <ast.Constant object at 0x7da2041d8f40>, <ast.Constant object at 0x7da2041db6d0>], [<ast.Attribute object at 0x7da2041db1f0>, <ast.Attribute object at 0x7da2041d8ac0>, <ast.Attribute object at 0x7da2041da140>, <ast.ListComp object at 0x7da2041d9030>]]
variable[file_list] assign[=] constant[None]
variable[metadata] assign[=] constant[None]
if call[name[isinstance], parameter[name[dist], name[pkg_resources].DistInfoDistribution]] begin[:]
if call[name[dist].has_metadata, parameter[constant[RECORD]]] begin[:]
variable[lines] assign[=] call[name[dist].get_metadata_lines, parameter[constant[RECORD]]]
variable[paths] assign[=] <ast.ListComp object at 0x7da2041d8df0>
variable[paths] assign[=] <ast.ListComp object at 0x7da2041da650>
variable[file_list] assign[=] <ast.ListComp object at 0x7da2041d9d80>
if call[name[dist].has_metadata, parameter[constant[METADATA]]] begin[:]
variable[metadata] assign[=] call[name[dist].get_metadata, parameter[constant[METADATA]]]
if call[name[dist].has_metadata, parameter[constant[entry_points.txt]]] begin[:]
variable[entry_points] assign[=] call[name[dist].get_metadata_lines, parameter[constant[entry_points.txt]]]
call[name[package]][constant[entry_points]] assign[=] name[entry_points]
if call[name[dist].has_metadata, parameter[constant[INSTALLER]]] begin[:]
for taget[name[line]] in starred[call[name[dist].get_metadata_lines, parameter[constant[INSTALLER]]]] begin[:]
if call[name[line].strip, parameter[]] begin[:]
call[name[package]][constant[installer]] assign[=] call[name[line].strip, parameter[]]
break
variable[feed_parser] assign[=] call[name[FeedParser], parameter[]]
call[name[feed_parser].feed, parameter[name[metadata]]]
variable[pkg_info_dict] assign[=] call[name[feed_parser].close, parameter[]]
for taget[name[key]] in starred[tuple[[<ast.Constant object at 0x7da18bcc9ed0>, <ast.Constant object at 0x7da18bcc9270>, <ast.Constant object at 0x7da18bccbaf0>, <ast.Constant object at 0x7da18bcca290>, <ast.Constant object at 0x7da18bccad10>, <ast.Constant object at 0x7da18bcc8fa0>]]] begin[:]
call[name[package]][name[key]] assign[=] call[name[pkg_info_dict].get, parameter[name[key]]]
variable[classifiers] assign[=] list[[]]
for taget[name[line]] in starred[call[name[metadata].splitlines, parameter[]]] begin[:]
if call[name[line].startswith, parameter[constant[Classifier: ]]] begin[:]
call[name[classifiers].append, parameter[call[name[line]][<ast.Slice object at 0x7da18bcca440>]]]
call[name[package]][constant[classifiers]] assign[=] name[classifiers]
if name[file_list] begin[:]
call[name[package]][constant[files]] assign[=] call[name[sorted], parameter[name[file_list]]]
<ast.Yield object at 0x7da18bccb0d0> | keyword[def] identifier[search_packages_info] ( identifier[query] ):
literal[string]
identifier[installed] ={}
keyword[for] identifier[p] keyword[in] identifier[pkg_resources] . identifier[working_set] :
identifier[installed] [ identifier[canonicalize_name] ( identifier[p] . identifier[project_name] )]= identifier[p]
identifier[query_names] =[ identifier[canonicalize_name] ( identifier[name] ) keyword[for] identifier[name] keyword[in] identifier[query] ]
keyword[for] identifier[dist] keyword[in] [ identifier[installed] [ identifier[pkg] ] keyword[for] identifier[pkg] keyword[in] identifier[query_names] keyword[if] identifier[pkg] keyword[in] identifier[installed] ]:
identifier[package] ={
literal[string] : identifier[dist] . identifier[project_name] ,
literal[string] : identifier[dist] . identifier[version] ,
literal[string] : identifier[dist] . identifier[location] ,
literal[string] :[ identifier[dep] . identifier[project_name] keyword[for] identifier[dep] keyword[in] identifier[dist] . identifier[requires] ()],
}
identifier[file_list] = keyword[None]
identifier[metadata] = keyword[None]
keyword[if] identifier[isinstance] ( identifier[dist] , identifier[pkg_resources] . identifier[DistInfoDistribution] ):
keyword[if] identifier[dist] . identifier[has_metadata] ( literal[string] ):
identifier[lines] = identifier[dist] . identifier[get_metadata_lines] ( literal[string] )
identifier[paths] =[ identifier[l] . identifier[split] ( literal[string] )[ literal[int] ] keyword[for] identifier[l] keyword[in] identifier[lines] ]
identifier[paths] =[ identifier[os] . identifier[path] . identifier[join] ( identifier[dist] . identifier[location] , identifier[p] ) keyword[for] identifier[p] keyword[in] identifier[paths] ]
identifier[file_list] =[ identifier[os] . identifier[path] . identifier[relpath] ( identifier[p] , identifier[dist] . identifier[location] ) keyword[for] identifier[p] keyword[in] identifier[paths] ]
keyword[if] identifier[dist] . identifier[has_metadata] ( literal[string] ):
identifier[metadata] = identifier[dist] . identifier[get_metadata] ( literal[string] )
keyword[else] :
keyword[if] identifier[dist] . identifier[has_metadata] ( literal[string] ):
identifier[paths] = identifier[dist] . identifier[get_metadata_lines] ( literal[string] )
identifier[paths] =[ identifier[os] . identifier[path] . identifier[join] ( identifier[dist] . identifier[egg_info] , identifier[p] ) keyword[for] identifier[p] keyword[in] identifier[paths] ]
identifier[file_list] =[ identifier[os] . identifier[path] . identifier[relpath] ( identifier[p] , identifier[dist] . identifier[location] ) keyword[for] identifier[p] keyword[in] identifier[paths] ]
keyword[if] identifier[dist] . identifier[has_metadata] ( literal[string] ):
identifier[metadata] = identifier[dist] . identifier[get_metadata] ( literal[string] )
keyword[if] identifier[dist] . identifier[has_metadata] ( literal[string] ):
identifier[entry_points] = identifier[dist] . identifier[get_metadata_lines] ( literal[string] )
identifier[package] [ literal[string] ]= identifier[entry_points]
keyword[if] identifier[dist] . identifier[has_metadata] ( literal[string] ):
keyword[for] identifier[line] keyword[in] identifier[dist] . identifier[get_metadata_lines] ( literal[string] ):
keyword[if] identifier[line] . identifier[strip] ():
identifier[package] [ literal[string] ]= identifier[line] . identifier[strip] ()
keyword[break]
identifier[feed_parser] = identifier[FeedParser] ()
identifier[feed_parser] . identifier[feed] ( identifier[metadata] )
identifier[pkg_info_dict] = identifier[feed_parser] . identifier[close] ()
keyword[for] identifier[key] keyword[in] ( literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] ):
identifier[package] [ identifier[key] ]= identifier[pkg_info_dict] . identifier[get] ( identifier[key] )
identifier[classifiers] =[]
keyword[for] identifier[line] keyword[in] identifier[metadata] . identifier[splitlines] ():
keyword[if] identifier[line] . identifier[startswith] ( literal[string] ):
identifier[classifiers] . identifier[append] ( identifier[line] [ identifier[len] ( literal[string] ):])
identifier[package] [ literal[string] ]= identifier[classifiers]
keyword[if] identifier[file_list] :
identifier[package] [ literal[string] ]= identifier[sorted] ( identifier[file_list] )
keyword[yield] identifier[package] | def search_packages_info(query):
"""
Gather details from installed distributions. Print distribution name,
version, location, and installed files. Installed files requires a
pip generated 'installed-files.txt' in the distributions '.egg-info'
directory.
"""
installed = {}
for p in pkg_resources.working_set:
installed[canonicalize_name(p.project_name)] = p # depends on [control=['for'], data=['p']]
query_names = [canonicalize_name(name) for name in query]
for dist in [installed[pkg] for pkg in query_names if pkg in installed]:
package = {'name': dist.project_name, 'version': dist.version, 'location': dist.location, 'requires': [dep.project_name for dep in dist.requires()]}
file_list = None
metadata = None
if isinstance(dist, pkg_resources.DistInfoDistribution):
# RECORDs should be part of .dist-info metadatas
if dist.has_metadata('RECORD'):
lines = dist.get_metadata_lines('RECORD')
paths = [l.split(',')[0] for l in lines]
paths = [os.path.join(dist.location, p) for p in paths]
file_list = [os.path.relpath(p, dist.location) for p in paths] # depends on [control=['if'], data=[]]
if dist.has_metadata('METADATA'):
metadata = dist.get_metadata('METADATA') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
# Otherwise use pip's log for .egg-info's
if dist.has_metadata('installed-files.txt'):
paths = dist.get_metadata_lines('installed-files.txt')
paths = [os.path.join(dist.egg_info, p) for p in paths]
file_list = [os.path.relpath(p, dist.location) for p in paths] # depends on [control=['if'], data=[]]
if dist.has_metadata('PKG-INFO'):
metadata = dist.get_metadata('PKG-INFO') # depends on [control=['if'], data=[]]
if dist.has_metadata('entry_points.txt'):
entry_points = dist.get_metadata_lines('entry_points.txt')
package['entry_points'] = entry_points # depends on [control=['if'], data=[]]
if dist.has_metadata('INSTALLER'):
for line in dist.get_metadata_lines('INSTALLER'):
if line.strip():
package['installer'] = line.strip()
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] # depends on [control=['if'], data=[]]
# @todo: Should pkg_resources.Distribution have a
# `get_pkg_info` method?
feed_parser = FeedParser()
feed_parser.feed(metadata)
pkg_info_dict = feed_parser.close()
for key in ('metadata-version', 'summary', 'home-page', 'author', 'author-email', 'license'):
package[key] = pkg_info_dict.get(key) # depends on [control=['for'], data=['key']]
# It looks like FeedParser cannot deal with repeated headers
classifiers = []
for line in metadata.splitlines():
if line.startswith('Classifier: '):
classifiers.append(line[len('Classifier: '):]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
package['classifiers'] = classifiers
if file_list:
package['files'] = sorted(file_list) # depends on [control=['if'], data=[]]
yield package # depends on [control=['for'], data=['dist']] |
def sync(self):
""" Send and fetch all outstanding messages.
:return: 2-tuple of number of detail messages and number of summary messages fetched
"""
self.send()
detail_count = summary_count = 0
while self.responses:
response = self.responses[0]
while not response.complete:
detail_delta, summary_delta = self.fetch()
detail_count += detail_delta
summary_count += summary_delta
return detail_count, summary_count | def function[sync, parameter[self]]:
constant[ Send and fetch all outstanding messages.
:return: 2-tuple of number of detail messages and number of summary messages fetched
]
call[name[self].send, parameter[]]
variable[detail_count] assign[=] constant[0]
while name[self].responses begin[:]
variable[response] assign[=] call[name[self].responses][constant[0]]
while <ast.UnaryOp object at 0x7da207f9a950> begin[:]
<ast.Tuple object at 0x7da207f99f30> assign[=] call[name[self].fetch, parameter[]]
<ast.AugAssign object at 0x7da207f03190>
<ast.AugAssign object at 0x7da207f03850>
return[tuple[[<ast.Name object at 0x7da207f01bd0>, <ast.Name object at 0x7da207f03130>]]] | keyword[def] identifier[sync] ( identifier[self] ):
literal[string]
identifier[self] . identifier[send] ()
identifier[detail_count] = identifier[summary_count] = literal[int]
keyword[while] identifier[self] . identifier[responses] :
identifier[response] = identifier[self] . identifier[responses] [ literal[int] ]
keyword[while] keyword[not] identifier[response] . identifier[complete] :
identifier[detail_delta] , identifier[summary_delta] = identifier[self] . identifier[fetch] ()
identifier[detail_count] += identifier[detail_delta]
identifier[summary_count] += identifier[summary_delta]
keyword[return] identifier[detail_count] , identifier[summary_count] | def sync(self):
""" Send and fetch all outstanding messages.
:return: 2-tuple of number of detail messages and number of summary messages fetched
"""
self.send()
detail_count = summary_count = 0
while self.responses:
response = self.responses[0]
while not response.complete:
(detail_delta, summary_delta) = self.fetch()
detail_count += detail_delta
summary_count += summary_delta # depends on [control=['while'], data=[]] # depends on [control=['while'], data=[]]
return (detail_count, summary_count) |
def replay_bundle(
self,
transaction,
depth=3,
min_weight_magnitude=None,
):
# type: (TransactionHash, int, Optional[int]) -> dict
"""
Takes a tail transaction hash as input, gets the bundle
associated with the transaction and then replays the bundle by
attaching it to the Tangle.
:param transaction:
Transaction hash. Must be a tail.
:param depth:
Depth at which to attach the bundle.
Defaults to 3.
:param min_weight_magnitude:
Min weight magnitude, used by the node to calibrate Proof of
Work.
If not provided, a default value will be used.
:return:
Dict with the following structure::
{
'trytes': List[TransactionTrytes],
Raw trytes that were published to the Tangle.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#replaytransfer
"""
if min_weight_magnitude is None:
min_weight_magnitude = self.default_min_weight_magnitude
return extended.ReplayBundleCommand(self.adapter)(
transaction=transaction,
depth=depth,
minWeightMagnitude=min_weight_magnitude,
) | def function[replay_bundle, parameter[self, transaction, depth, min_weight_magnitude]]:
constant[
Takes a tail transaction hash as input, gets the bundle
associated with the transaction and then replays the bundle by
attaching it to the Tangle.
:param transaction:
Transaction hash. Must be a tail.
:param depth:
Depth at which to attach the bundle.
Defaults to 3.
:param min_weight_magnitude:
Min weight magnitude, used by the node to calibrate Proof of
Work.
If not provided, a default value will be used.
:return:
Dict with the following structure::
{
'trytes': List[TransactionTrytes],
Raw trytes that were published to the Tangle.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#replaytransfer
]
if compare[name[min_weight_magnitude] is constant[None]] begin[:]
variable[min_weight_magnitude] assign[=] name[self].default_min_weight_magnitude
return[call[call[name[extended].ReplayBundleCommand, parameter[name[self].adapter]], parameter[]]] | keyword[def] identifier[replay_bundle] (
identifier[self] ,
identifier[transaction] ,
identifier[depth] = literal[int] ,
identifier[min_weight_magnitude] = keyword[None] ,
):
literal[string]
keyword[if] identifier[min_weight_magnitude] keyword[is] keyword[None] :
identifier[min_weight_magnitude] = identifier[self] . identifier[default_min_weight_magnitude]
keyword[return] identifier[extended] . identifier[ReplayBundleCommand] ( identifier[self] . identifier[adapter] )(
identifier[transaction] = identifier[transaction] ,
identifier[depth] = identifier[depth] ,
identifier[minWeightMagnitude] = identifier[min_weight_magnitude] ,
) | def replay_bundle(self, transaction, depth=3, min_weight_magnitude=None):
# type: (TransactionHash, int, Optional[int]) -> dict
"\n Takes a tail transaction hash as input, gets the bundle\n associated with the transaction and then replays the bundle by\n attaching it to the Tangle.\n\n :param transaction:\n Transaction hash. Must be a tail.\n\n :param depth:\n Depth at which to attach the bundle.\n Defaults to 3.\n\n :param min_weight_magnitude:\n Min weight magnitude, used by the node to calibrate Proof of\n Work.\n\n If not provided, a default value will be used.\n\n :return:\n Dict with the following structure::\n\n {\n 'trytes': List[TransactionTrytes],\n Raw trytes that were published to the Tangle.\n }\n\n References:\n\n - https://github.com/iotaledger/wiki/blob/master/api-proposal.md#replaytransfer\n "
if min_weight_magnitude is None:
min_weight_magnitude = self.default_min_weight_magnitude # depends on [control=['if'], data=['min_weight_magnitude']]
return extended.ReplayBundleCommand(self.adapter)(transaction=transaction, depth=depth, minWeightMagnitude=min_weight_magnitude) |
def add_job():
"""Adds a new job."""
data = request.get_json(force=True)
try:
job = current_app.apscheduler.add_job(**data)
return jsonify(job)
except ConflictingIdError:
return jsonify(dict(error_message='Job %s already exists.' % data.get('id')), status=409)
except Exception as e:
return jsonify(dict(error_message=str(e)), status=500) | def function[add_job, parameter[]]:
constant[Adds a new job.]
variable[data] assign[=] call[name[request].get_json, parameter[]]
<ast.Try object at 0x7da18eb569e0> | keyword[def] identifier[add_job] ():
literal[string]
identifier[data] = identifier[request] . identifier[get_json] ( identifier[force] = keyword[True] )
keyword[try] :
identifier[job] = identifier[current_app] . identifier[apscheduler] . identifier[add_job] (** identifier[data] )
keyword[return] identifier[jsonify] ( identifier[job] )
keyword[except] identifier[ConflictingIdError] :
keyword[return] identifier[jsonify] ( identifier[dict] ( identifier[error_message] = literal[string] % identifier[data] . identifier[get] ( literal[string] )), identifier[status] = literal[int] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[return] identifier[jsonify] ( identifier[dict] ( identifier[error_message] = identifier[str] ( identifier[e] )), identifier[status] = literal[int] ) | def add_job():
"""Adds a new job."""
data = request.get_json(force=True)
try:
job = current_app.apscheduler.add_job(**data)
return jsonify(job) # depends on [control=['try'], data=[]]
except ConflictingIdError:
return jsonify(dict(error_message='Job %s already exists.' % data.get('id')), status=409) # depends on [control=['except'], data=[]]
except Exception as e:
return jsonify(dict(error_message=str(e)), status=500) # depends on [control=['except'], data=['e']] |
def load(self, options):
"""
Load the schema objects for the root nodes.
- de-references schemas
- merge schemas
@param options: An options dictionary.
@type options: L{options.Options}
@return: The merged schema.
@rtype: L{Schema}
"""
if options.autoblend:
self.autoblend()
for child in self.children:
child.build()
for child in self.children:
child.open_imports(options)
for child in self.children:
child.dereference()
log.debug('loaded:\n%s', self)
merged = self.merge()
log.debug('MERGED:\n%s', merged)
return merged | def function[load, parameter[self, options]]:
constant[
Load the schema objects for the root nodes.
- de-references schemas
- merge schemas
@param options: An options dictionary.
@type options: L{options.Options}
@return: The merged schema.
@rtype: L{Schema}
]
if name[options].autoblend begin[:]
call[name[self].autoblend, parameter[]]
for taget[name[child]] in starred[name[self].children] begin[:]
call[name[child].build, parameter[]]
for taget[name[child]] in starred[name[self].children] begin[:]
call[name[child].open_imports, parameter[name[options]]]
for taget[name[child]] in starred[name[self].children] begin[:]
call[name[child].dereference, parameter[]]
call[name[log].debug, parameter[constant[loaded:
%s], name[self]]]
variable[merged] assign[=] call[name[self].merge, parameter[]]
call[name[log].debug, parameter[constant[MERGED:
%s], name[merged]]]
return[name[merged]] | keyword[def] identifier[load] ( identifier[self] , identifier[options] ):
literal[string]
keyword[if] identifier[options] . identifier[autoblend] :
identifier[self] . identifier[autoblend] ()
keyword[for] identifier[child] keyword[in] identifier[self] . identifier[children] :
identifier[child] . identifier[build] ()
keyword[for] identifier[child] keyword[in] identifier[self] . identifier[children] :
identifier[child] . identifier[open_imports] ( identifier[options] )
keyword[for] identifier[child] keyword[in] identifier[self] . identifier[children] :
identifier[child] . identifier[dereference] ()
identifier[log] . identifier[debug] ( literal[string] , identifier[self] )
identifier[merged] = identifier[self] . identifier[merge] ()
identifier[log] . identifier[debug] ( literal[string] , identifier[merged] )
keyword[return] identifier[merged] | def load(self, options):
"""
Load the schema objects for the root nodes.
- de-references schemas
- merge schemas
@param options: An options dictionary.
@type options: L{options.Options}
@return: The merged schema.
@rtype: L{Schema}
"""
if options.autoblend:
self.autoblend() # depends on [control=['if'], data=[]]
for child in self.children:
child.build() # depends on [control=['for'], data=['child']]
for child in self.children:
child.open_imports(options) # depends on [control=['for'], data=['child']]
for child in self.children:
child.dereference() # depends on [control=['for'], data=['child']]
log.debug('loaded:\n%s', self)
merged = self.merge()
log.debug('MERGED:\n%s', merged)
return merged |
def withdraw(self, amount):
"""Extends withdraw method to make sure enough funds are in the account, then call withdraw from superclass"""
if amount > self.balance:
raise ValueError('Insufficient Funds')
super().withdraw(amount) | def function[withdraw, parameter[self, amount]]:
constant[Extends withdraw method to make sure enough funds are in the account, then call withdraw from superclass]
if compare[name[amount] greater[>] name[self].balance] begin[:]
<ast.Raise object at 0x7da20c7cb460>
call[call[name[super], parameter[]].withdraw, parameter[name[amount]]] | keyword[def] identifier[withdraw] ( identifier[self] , identifier[amount] ):
literal[string]
keyword[if] identifier[amount] > identifier[self] . identifier[balance] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[super] (). identifier[withdraw] ( identifier[amount] ) | def withdraw(self, amount):
"""Extends withdraw method to make sure enough funds are in the account, then call withdraw from superclass"""
if amount > self.balance:
raise ValueError('Insufficient Funds') # depends on [control=['if'], data=[]]
super().withdraw(amount) |
def scopes(self, scopes):
"""Set scopes.
:param scopes: The list of scopes.
"""
validate_scopes(scopes)
self._scopes = " ".join(set(scopes)) if scopes else "" | def function[scopes, parameter[self, scopes]]:
constant[Set scopes.
:param scopes: The list of scopes.
]
call[name[validate_scopes], parameter[name[scopes]]]
name[self]._scopes assign[=] <ast.IfExp object at 0x7da1b253ee60> | keyword[def] identifier[scopes] ( identifier[self] , identifier[scopes] ):
literal[string]
identifier[validate_scopes] ( identifier[scopes] )
identifier[self] . identifier[_scopes] = literal[string] . identifier[join] ( identifier[set] ( identifier[scopes] )) keyword[if] identifier[scopes] keyword[else] literal[string] | def scopes(self, scopes):
"""Set scopes.
:param scopes: The list of scopes.
"""
validate_scopes(scopes)
self._scopes = ' '.join(set(scopes)) if scopes else '' |
def getTextWords(page):
"""Return the text words as a list with the bbox for each word.
"""
CheckParent(page)
dl = page.getDisplayList()
tp = dl.getTextPage()
l = tp._extractTextWords_AsList()
del dl
del tp
return l | def function[getTextWords, parameter[page]]:
constant[Return the text words as a list with the bbox for each word.
]
call[name[CheckParent], parameter[name[page]]]
variable[dl] assign[=] call[name[page].getDisplayList, parameter[]]
variable[tp] assign[=] call[name[dl].getTextPage, parameter[]]
variable[l] assign[=] call[name[tp]._extractTextWords_AsList, parameter[]]
<ast.Delete object at 0x7da20c6a99c0>
<ast.Delete object at 0x7da20c6ab250>
return[name[l]] | keyword[def] identifier[getTextWords] ( identifier[page] ):
literal[string]
identifier[CheckParent] ( identifier[page] )
identifier[dl] = identifier[page] . identifier[getDisplayList] ()
identifier[tp] = identifier[dl] . identifier[getTextPage] ()
identifier[l] = identifier[tp] . identifier[_extractTextWords_AsList] ()
keyword[del] identifier[dl]
keyword[del] identifier[tp]
keyword[return] identifier[l] | def getTextWords(page):
"""Return the text words as a list with the bbox for each word.
"""
CheckParent(page)
dl = page.getDisplayList()
tp = dl.getTextPage()
l = tp._extractTextWords_AsList()
del dl
del tp
return l |
def list(self, master=True):
"""List current reminders.
"""
params = {}
params.update(self.static_params)
if master:
params.update({
"recurrenceOptions": {
"collapseMode": "MASTER_ONLY",
},
"includeArchived": True,
"includeDeleted": False,
})
else:
current_time = time.time()
start_time = int((current_time - (365 * 24 * 60 * 60)) * 1000)
end_time = int((current_time + (24 * 60 * 60)) * 1000)
params.update({
"recurrenceOptions": {
"collapseMode":"INSTANCES_ONLY",
"recurrencesOnly": True,
},
"includeArchived": False,
"includeCompleted": False,
"includeDeleted": False,
"dueAfterMs": start_time,
"dueBeforeMs": end_time,
"recurrenceId": [],
})
return self.send(
url=self._base_url + 'list',
method='POST',
json=params
) | def function[list, parameter[self, master]]:
constant[List current reminders.
]
variable[params] assign[=] dictionary[[], []]
call[name[params].update, parameter[name[self].static_params]]
if name[master] begin[:]
call[name[params].update, parameter[dictionary[[<ast.Constant object at 0x7da1b26aeb00>, <ast.Constant object at 0x7da1b26ad510>, <ast.Constant object at 0x7da1b26afdf0>], [<ast.Dict object at 0x7da1b26afc70>, <ast.Constant object at 0x7da1b26acbb0>, <ast.Constant object at 0x7da1b26af7f0>]]]]
return[call[name[self].send, parameter[]]] | keyword[def] identifier[list] ( identifier[self] , identifier[master] = keyword[True] ):
literal[string]
identifier[params] ={}
identifier[params] . identifier[update] ( identifier[self] . identifier[static_params] )
keyword[if] identifier[master] :
identifier[params] . identifier[update] ({
literal[string] :{
literal[string] : literal[string] ,
},
literal[string] : keyword[True] ,
literal[string] : keyword[False] ,
})
keyword[else] :
identifier[current_time] = identifier[time] . identifier[time] ()
identifier[start_time] = identifier[int] (( identifier[current_time] -( literal[int] * literal[int] * literal[int] * literal[int] ))* literal[int] )
identifier[end_time] = identifier[int] (( identifier[current_time] +( literal[int] * literal[int] * literal[int] ))* literal[int] )
identifier[params] . identifier[update] ({
literal[string] :{
literal[string] : literal[string] ,
literal[string] : keyword[True] ,
},
literal[string] : keyword[False] ,
literal[string] : keyword[False] ,
literal[string] : keyword[False] ,
literal[string] : identifier[start_time] ,
literal[string] : identifier[end_time] ,
literal[string] :[],
})
keyword[return] identifier[self] . identifier[send] (
identifier[url] = identifier[self] . identifier[_base_url] + literal[string] ,
identifier[method] = literal[string] ,
identifier[json] = identifier[params]
) | def list(self, master=True):
"""List current reminders.
"""
params = {}
params.update(self.static_params)
if master:
params.update({'recurrenceOptions': {'collapseMode': 'MASTER_ONLY'}, 'includeArchived': True, 'includeDeleted': False}) # depends on [control=['if'], data=[]]
else:
current_time = time.time()
start_time = int((current_time - 365 * 24 * 60 * 60) * 1000)
end_time = int((current_time + 24 * 60 * 60) * 1000)
params.update({'recurrenceOptions': {'collapseMode': 'INSTANCES_ONLY', 'recurrencesOnly': True}, 'includeArchived': False, 'includeCompleted': False, 'includeDeleted': False, 'dueAfterMs': start_time, 'dueBeforeMs': end_time, 'recurrenceId': []})
return self.send(url=self._base_url + 'list', method='POST', json=params) |
def ovsdb_server_ip_address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ovsdb_server = ET.SubElement(config, "ovsdb-server", xmlns="urn:brocade.com:mgmt:brocade-tunnels")
name_key = ET.SubElement(ovsdb_server, "name")
name_key.text = kwargs.pop('name')
ip = ET.SubElement(ovsdb_server, "ip")
address = ET.SubElement(ip, "address")
address.text = kwargs.pop('address')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[ovsdb_server_ip_address, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[ovsdb_server] assign[=] call[name[ET].SubElement, parameter[name[config], constant[ovsdb-server]]]
variable[name_key] assign[=] call[name[ET].SubElement, parameter[name[ovsdb_server], constant[name]]]
name[name_key].text assign[=] call[name[kwargs].pop, parameter[constant[name]]]
variable[ip] assign[=] call[name[ET].SubElement, parameter[name[ovsdb_server], constant[ip]]]
variable[address] assign[=] call[name[ET].SubElement, parameter[name[ip], constant[address]]]
name[address].text assign[=] call[name[kwargs].pop, parameter[constant[address]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[ovsdb_server_ip_address] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[ovsdb_server] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[name_key] = identifier[ET] . identifier[SubElement] ( identifier[ovsdb_server] , literal[string] )
identifier[name_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[ip] = identifier[ET] . identifier[SubElement] ( identifier[ovsdb_server] , literal[string] )
identifier[address] = identifier[ET] . identifier[SubElement] ( identifier[ip] , literal[string] )
identifier[address] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def ovsdb_server_ip_address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
ovsdb_server = ET.SubElement(config, 'ovsdb-server', xmlns='urn:brocade.com:mgmt:brocade-tunnels')
name_key = ET.SubElement(ovsdb_server, 'name')
name_key.text = kwargs.pop('name')
ip = ET.SubElement(ovsdb_server, 'ip')
address = ET.SubElement(ip, 'address')
address.text = kwargs.pop('address')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def fcoe_get_interface_output_fcoe_intf_list_fcoe_intf_tx_disc_unsol_adv(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe_get_interface = ET.Element("fcoe_get_interface")
config = fcoe_get_interface
output = ET.SubElement(fcoe_get_interface, "output")
fcoe_intf_list = ET.SubElement(output, "fcoe-intf-list")
fcoe_intf_fcoe_port_id_key = ET.SubElement(fcoe_intf_list, "fcoe-intf-fcoe-port-id")
fcoe_intf_fcoe_port_id_key.text = kwargs.pop('fcoe_intf_fcoe_port_id')
fcoe_intf_tx_disc_unsol_adv = ET.SubElement(fcoe_intf_list, "fcoe-intf-tx-disc-unsol-adv")
fcoe_intf_tx_disc_unsol_adv.text = kwargs.pop('fcoe_intf_tx_disc_unsol_adv')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[fcoe_get_interface_output_fcoe_intf_list_fcoe_intf_tx_disc_unsol_adv, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[fcoe_get_interface] assign[=] call[name[ET].Element, parameter[constant[fcoe_get_interface]]]
variable[config] assign[=] name[fcoe_get_interface]
variable[output] assign[=] call[name[ET].SubElement, parameter[name[fcoe_get_interface], constant[output]]]
variable[fcoe_intf_list] assign[=] call[name[ET].SubElement, parameter[name[output], constant[fcoe-intf-list]]]
variable[fcoe_intf_fcoe_port_id_key] assign[=] call[name[ET].SubElement, parameter[name[fcoe_intf_list], constant[fcoe-intf-fcoe-port-id]]]
name[fcoe_intf_fcoe_port_id_key].text assign[=] call[name[kwargs].pop, parameter[constant[fcoe_intf_fcoe_port_id]]]
variable[fcoe_intf_tx_disc_unsol_adv] assign[=] call[name[ET].SubElement, parameter[name[fcoe_intf_list], constant[fcoe-intf-tx-disc-unsol-adv]]]
name[fcoe_intf_tx_disc_unsol_adv].text assign[=] call[name[kwargs].pop, parameter[constant[fcoe_intf_tx_disc_unsol_adv]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[fcoe_get_interface_output_fcoe_intf_list_fcoe_intf_tx_disc_unsol_adv] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[fcoe_get_interface] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[config] = identifier[fcoe_get_interface]
identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[fcoe_get_interface] , literal[string] )
identifier[fcoe_intf_list] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] )
identifier[fcoe_intf_fcoe_port_id_key] = identifier[ET] . identifier[SubElement] ( identifier[fcoe_intf_list] , literal[string] )
identifier[fcoe_intf_fcoe_port_id_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[fcoe_intf_tx_disc_unsol_adv] = identifier[ET] . identifier[SubElement] ( identifier[fcoe_intf_list] , literal[string] )
identifier[fcoe_intf_tx_disc_unsol_adv] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def fcoe_get_interface_output_fcoe_intf_list_fcoe_intf_tx_disc_unsol_adv(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
fcoe_get_interface = ET.Element('fcoe_get_interface')
config = fcoe_get_interface
output = ET.SubElement(fcoe_get_interface, 'output')
fcoe_intf_list = ET.SubElement(output, 'fcoe-intf-list')
fcoe_intf_fcoe_port_id_key = ET.SubElement(fcoe_intf_list, 'fcoe-intf-fcoe-port-id')
fcoe_intf_fcoe_port_id_key.text = kwargs.pop('fcoe_intf_fcoe_port_id')
fcoe_intf_tx_disc_unsol_adv = ET.SubElement(fcoe_intf_list, 'fcoe-intf-tx-disc-unsol-adv')
fcoe_intf_tx_disc_unsol_adv.text = kwargs.pop('fcoe_intf_tx_disc_unsol_adv')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def add_contact(self, contact_id, scope='contact/invite'):
"""
Add a contact
contact_id can either be the mxit ID of a service or a Mxit user
User authentication required with the following scope: 'contact/invite'
"""
return _put(
token=self.oauth.get_user_token(scope),
uri='/user/socialgraph/contact/' + urllib.quote(contact_id)
) | def function[add_contact, parameter[self, contact_id, scope]]:
constant[
Add a contact
contact_id can either be the mxit ID of a service or a Mxit user
User authentication required with the following scope: 'contact/invite'
]
return[call[name[_put], parameter[]]] | keyword[def] identifier[add_contact] ( identifier[self] , identifier[contact_id] , identifier[scope] = literal[string] ):
literal[string]
keyword[return] identifier[_put] (
identifier[token] = identifier[self] . identifier[oauth] . identifier[get_user_token] ( identifier[scope] ),
identifier[uri] = literal[string] + identifier[urllib] . identifier[quote] ( identifier[contact_id] )
) | def add_contact(self, contact_id, scope='contact/invite'):
"""
Add a contact
contact_id can either be the mxit ID of a service or a Mxit user
User authentication required with the following scope: 'contact/invite'
"""
return _put(token=self.oauth.get_user_token(scope), uri='/user/socialgraph/contact/' + urllib.quote(contact_id)) |
def make_update(cls, table, set_query, where=None):
"""
Make UPDATE query.
:param str table: Table name of executing the query.
:param str set_query: SET part of the UPDATE query.
:param str where:
Add a WHERE clause to execute query,
if the value is not |None|.
:return: Query of SQLite.
:rtype: str
:raises ValueError: If ``set_query`` is empty string.
:raises simplesqlite.NameValidationError:
|raises_validate_table_name|
"""
validate_table_name(table)
if typepy.is_null_string(set_query):
raise ValueError("SET query is null")
query_list = ["UPDATE {:s}".format(Table(table)), "SET {:s}".format(set_query)]
if where and isinstance(where, (six.text_type, Where, And, Or)):
query_list.append("WHERE {:s}".format(where))
return " ".join(query_list) | def function[make_update, parameter[cls, table, set_query, where]]:
constant[
Make UPDATE query.
:param str table: Table name of executing the query.
:param str set_query: SET part of the UPDATE query.
:param str where:
Add a WHERE clause to execute query,
if the value is not |None|.
:return: Query of SQLite.
:rtype: str
:raises ValueError: If ``set_query`` is empty string.
:raises simplesqlite.NameValidationError:
|raises_validate_table_name|
]
call[name[validate_table_name], parameter[name[table]]]
if call[name[typepy].is_null_string, parameter[name[set_query]]] begin[:]
<ast.Raise object at 0x7da1b04f9870>
variable[query_list] assign[=] list[[<ast.Call object at 0x7da1b04f9f60>, <ast.Call object at 0x7da1b04f9c90>]]
if <ast.BoolOp object at 0x7da1b04f94e0> begin[:]
call[name[query_list].append, parameter[call[constant[WHERE {:s}].format, parameter[name[where]]]]]
return[call[constant[ ].join, parameter[name[query_list]]]] | keyword[def] identifier[make_update] ( identifier[cls] , identifier[table] , identifier[set_query] , identifier[where] = keyword[None] ):
literal[string]
identifier[validate_table_name] ( identifier[table] )
keyword[if] identifier[typepy] . identifier[is_null_string] ( identifier[set_query] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[query_list] =[ literal[string] . identifier[format] ( identifier[Table] ( identifier[table] )), literal[string] . identifier[format] ( identifier[set_query] )]
keyword[if] identifier[where] keyword[and] identifier[isinstance] ( identifier[where] ,( identifier[six] . identifier[text_type] , identifier[Where] , identifier[And] , identifier[Or] )):
identifier[query_list] . identifier[append] ( literal[string] . identifier[format] ( identifier[where] ))
keyword[return] literal[string] . identifier[join] ( identifier[query_list] ) | def make_update(cls, table, set_query, where=None):
"""
Make UPDATE query.
:param str table: Table name of executing the query.
:param str set_query: SET part of the UPDATE query.
:param str where:
Add a WHERE clause to execute query,
if the value is not |None|.
:return: Query of SQLite.
:rtype: str
:raises ValueError: If ``set_query`` is empty string.
:raises simplesqlite.NameValidationError:
|raises_validate_table_name|
"""
validate_table_name(table)
if typepy.is_null_string(set_query):
raise ValueError('SET query is null') # depends on [control=['if'], data=[]]
query_list = ['UPDATE {:s}'.format(Table(table)), 'SET {:s}'.format(set_query)]
if where and isinstance(where, (six.text_type, Where, And, Or)):
query_list.append('WHERE {:s}'.format(where)) # depends on [control=['if'], data=[]]
return ' '.join(query_list) |
def main(input_filename, format):
"""
Calculate the fingerprint hashses of the referenced audio file and save
to disk as a pickle file
"""
# open the file & convert to wav
song_data = AudioSegment.from_file(input_filename, format=format)
song_data = song_data.set_channels(1) # convert to mono
wav_tmp = song_data.export(format="wav") # write to a tmp file buffer
wav_tmp.seek(0)
rate, wav_data = wavfile.read(wav_tmp)
rows_per_second = (1 + (rate - WIDTH)) // FRAME_STRIDE
# Calculate a coarser window for matching
window_size = (rows_per_second // TIME_STRIDE, (WIDTH // 2) // FREQ_STRIDE)
peaks = resound.get_peaks(np.array(wav_data), window_size=window_size)
# half width (nyquist freq) & half size (window is +/- around the middle)
f_width = WIDTH // (2 * FREQ_STRIDE) * 2
t_gap = 1 * rows_per_second
t_width = 2 * rows_per_second
fingerprints = resound.hashes(peaks, f_width=f_width, t_gap=t_gap, t_width=t_width) # hash, offset pairs
return fingerprints | def function[main, parameter[input_filename, format]]:
constant[
Calculate the fingerprint hashses of the referenced audio file and save
to disk as a pickle file
]
variable[song_data] assign[=] call[name[AudioSegment].from_file, parameter[name[input_filename]]]
variable[song_data] assign[=] call[name[song_data].set_channels, parameter[constant[1]]]
variable[wav_tmp] assign[=] call[name[song_data].export, parameter[]]
call[name[wav_tmp].seek, parameter[constant[0]]]
<ast.Tuple object at 0x7da18dc04880> assign[=] call[name[wavfile].read, parameter[name[wav_tmp]]]
variable[rows_per_second] assign[=] binary_operation[binary_operation[constant[1] + binary_operation[name[rate] - name[WIDTH]]] <ast.FloorDiv object at 0x7da2590d6bc0> name[FRAME_STRIDE]]
variable[window_size] assign[=] tuple[[<ast.BinOp object at 0x7da18dc06a70>, <ast.BinOp object at 0x7da18dc07760>]]
variable[peaks] assign[=] call[name[resound].get_peaks, parameter[call[name[np].array, parameter[name[wav_data]]]]]
variable[f_width] assign[=] binary_operation[binary_operation[name[WIDTH] <ast.FloorDiv object at 0x7da2590d6bc0> binary_operation[constant[2] * name[FREQ_STRIDE]]] * constant[2]]
variable[t_gap] assign[=] binary_operation[constant[1] * name[rows_per_second]]
variable[t_width] assign[=] binary_operation[constant[2] * name[rows_per_second]]
variable[fingerprints] assign[=] call[name[resound].hashes, parameter[name[peaks]]]
return[name[fingerprints]] | keyword[def] identifier[main] ( identifier[input_filename] , identifier[format] ):
literal[string]
identifier[song_data] = identifier[AudioSegment] . identifier[from_file] ( identifier[input_filename] , identifier[format] = identifier[format] )
identifier[song_data] = identifier[song_data] . identifier[set_channels] ( literal[int] )
identifier[wav_tmp] = identifier[song_data] . identifier[export] ( identifier[format] = literal[string] )
identifier[wav_tmp] . identifier[seek] ( literal[int] )
identifier[rate] , identifier[wav_data] = identifier[wavfile] . identifier[read] ( identifier[wav_tmp] )
identifier[rows_per_second] =( literal[int] +( identifier[rate] - identifier[WIDTH] ))// identifier[FRAME_STRIDE]
identifier[window_size] =( identifier[rows_per_second] // identifier[TIME_STRIDE] ,( identifier[WIDTH] // literal[int] )// identifier[FREQ_STRIDE] )
identifier[peaks] = identifier[resound] . identifier[get_peaks] ( identifier[np] . identifier[array] ( identifier[wav_data] ), identifier[window_size] = identifier[window_size] )
identifier[f_width] = identifier[WIDTH] //( literal[int] * identifier[FREQ_STRIDE] )* literal[int]
identifier[t_gap] = literal[int] * identifier[rows_per_second]
identifier[t_width] = literal[int] * identifier[rows_per_second]
identifier[fingerprints] = identifier[resound] . identifier[hashes] ( identifier[peaks] , identifier[f_width] = identifier[f_width] , identifier[t_gap] = identifier[t_gap] , identifier[t_width] = identifier[t_width] )
keyword[return] identifier[fingerprints] | def main(input_filename, format):
"""
Calculate the fingerprint hashses of the referenced audio file and save
to disk as a pickle file
"""
# open the file & convert to wav
song_data = AudioSegment.from_file(input_filename, format=format)
song_data = song_data.set_channels(1) # convert to mono
wav_tmp = song_data.export(format='wav') # write to a tmp file buffer
wav_tmp.seek(0)
(rate, wav_data) = wavfile.read(wav_tmp)
rows_per_second = (1 + (rate - WIDTH)) // FRAME_STRIDE
# Calculate a coarser window for matching
window_size = (rows_per_second // TIME_STRIDE, WIDTH // 2 // FREQ_STRIDE)
peaks = resound.get_peaks(np.array(wav_data), window_size=window_size)
# half width (nyquist freq) & half size (window is +/- around the middle)
f_width = WIDTH // (2 * FREQ_STRIDE) * 2
t_gap = 1 * rows_per_second
t_width = 2 * rows_per_second
fingerprints = resound.hashes(peaks, f_width=f_width, t_gap=t_gap, t_width=t_width) # hash, offset pairs
return fingerprints |
def list_healthchecks(self, service_id, version_number):
"""List all of the healthchecks for a particular service and version."""
content = self._fetch("/service/%s/version/%d/healthcheck" % (service_id, version_number))
return map(lambda x: FastlyHealthCheck(self, x), content) | def function[list_healthchecks, parameter[self, service_id, version_number]]:
constant[List all of the healthchecks for a particular service and version.]
variable[content] assign[=] call[name[self]._fetch, parameter[binary_operation[constant[/service/%s/version/%d/healthcheck] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0f41810>, <ast.Name object at 0x7da1b0f407f0>]]]]]
return[call[name[map], parameter[<ast.Lambda object at 0x7da1b0f42e60>, name[content]]]] | keyword[def] identifier[list_healthchecks] ( identifier[self] , identifier[service_id] , identifier[version_number] ):
literal[string]
identifier[content] = identifier[self] . identifier[_fetch] ( literal[string] %( identifier[service_id] , identifier[version_number] ))
keyword[return] identifier[map] ( keyword[lambda] identifier[x] : identifier[FastlyHealthCheck] ( identifier[self] , identifier[x] ), identifier[content] ) | def list_healthchecks(self, service_id, version_number):
"""List all of the healthchecks for a particular service and version."""
content = self._fetch('/service/%s/version/%d/healthcheck' % (service_id, version_number))
return map(lambda x: FastlyHealthCheck(self, x), content) |
def plural_adj(self, text, count=None):
"""
Return the plural of text, where text is an adjective.
If count supplied, then return text if count is one of:
1, a, an, one, each, every, this, that
otherwise return the plural.
Whitespace at the start and end is preserved.
"""
pre, word, post = self.partition_word(text)
if not word:
return text
plural = self.postprocess(word, self._pl_special_adjective(word, count) or word)
return "{}{}{}".format(pre, plural, post) | def function[plural_adj, parameter[self, text, count]]:
constant[
Return the plural of text, where text is an adjective.
If count supplied, then return text if count is one of:
1, a, an, one, each, every, this, that
otherwise return the plural.
Whitespace at the start and end is preserved.
]
<ast.Tuple object at 0x7da1b12c9c00> assign[=] call[name[self].partition_word, parameter[name[text]]]
if <ast.UnaryOp object at 0x7da1b12c88b0> begin[:]
return[name[text]]
variable[plural] assign[=] call[name[self].postprocess, parameter[name[word], <ast.BoolOp object at 0x7da1b12c90f0>]]
return[call[constant[{}{}{}].format, parameter[name[pre], name[plural], name[post]]]] | keyword[def] identifier[plural_adj] ( identifier[self] , identifier[text] , identifier[count] = keyword[None] ):
literal[string]
identifier[pre] , identifier[word] , identifier[post] = identifier[self] . identifier[partition_word] ( identifier[text] )
keyword[if] keyword[not] identifier[word] :
keyword[return] identifier[text]
identifier[plural] = identifier[self] . identifier[postprocess] ( identifier[word] , identifier[self] . identifier[_pl_special_adjective] ( identifier[word] , identifier[count] ) keyword[or] identifier[word] )
keyword[return] literal[string] . identifier[format] ( identifier[pre] , identifier[plural] , identifier[post] ) | def plural_adj(self, text, count=None):
"""
Return the plural of text, where text is an adjective.
If count supplied, then return text if count is one of:
1, a, an, one, each, every, this, that
otherwise return the plural.
Whitespace at the start and end is preserved.
"""
(pre, word, post) = self.partition_word(text)
if not word:
return text # depends on [control=['if'], data=[]]
plural = self.postprocess(word, self._pl_special_adjective(word, count) or word)
return '{}{}{}'.format(pre, plural, post) |
def _validate_build_target(self, spec, target):
"""
Essentially validate that the target is inside the build_dir.
"""
if not realpath(target).startswith(spec[BUILD_DIR]):
raise ValueError('build_target %s is outside build_dir' % target) | def function[_validate_build_target, parameter[self, spec, target]]:
constant[
Essentially validate that the target is inside the build_dir.
]
if <ast.UnaryOp object at 0x7da1b19b5540> begin[:]
<ast.Raise object at 0x7da1b19b68c0> | keyword[def] identifier[_validate_build_target] ( identifier[self] , identifier[spec] , identifier[target] ):
literal[string]
keyword[if] keyword[not] identifier[realpath] ( identifier[target] ). identifier[startswith] ( identifier[spec] [ identifier[BUILD_DIR] ]):
keyword[raise] identifier[ValueError] ( literal[string] % identifier[target] ) | def _validate_build_target(self, spec, target):
"""
Essentially validate that the target is inside the build_dir.
"""
if not realpath(target).startswith(spec[BUILD_DIR]):
raise ValueError('build_target %s is outside build_dir' % target) # depends on [control=['if'], data=[]] |
def schedule_servicegroup_svc_downtime(self, servicegroup, start_time, end_time,
fixed, trigger_id, duration, author, comment):
"""Schedule a service downtime for each service of a servicegroup
Format of the line that triggers function call::
SCHEDULE_SERVICEGROUP_SVC_DOWNTIME;<servicegroup_name>;<start_time>;<end_time>;
<fixed>;<trigger_id>;<duration>;<author>;<comment>
:param servicegroup: servicegroup to schedule downtime
:type servicegroup: alignak.object.servicegroup.Servicegroup
:param start_time: downtime start time
:type start_time:
:param end_time: downtime end time
:type end_time:
:param fixed: is downtime fixed
:type fixed: bool
:param trigger_id: downtime id that triggered this one
:type trigger_id: str
:param duration: downtime duration
:type duration: int
:param author: downtime author
:type author: str
:param comment: downtime comment
:type comment: str
:return: None
"""
for serv in servicegroup.get_services():
self.schedule_svc_downtime(serv, start_time, end_time, fixed,
trigger_id, duration, author, comment) | def function[schedule_servicegroup_svc_downtime, parameter[self, servicegroup, start_time, end_time, fixed, trigger_id, duration, author, comment]]:
constant[Schedule a service downtime for each service of a servicegroup
Format of the line that triggers function call::
SCHEDULE_SERVICEGROUP_SVC_DOWNTIME;<servicegroup_name>;<start_time>;<end_time>;
<fixed>;<trigger_id>;<duration>;<author>;<comment>
:param servicegroup: servicegroup to schedule downtime
:type servicegroup: alignak.object.servicegroup.Servicegroup
:param start_time: downtime start time
:type start_time:
:param end_time: downtime end time
:type end_time:
:param fixed: is downtime fixed
:type fixed: bool
:param trigger_id: downtime id that triggered this one
:type trigger_id: str
:param duration: downtime duration
:type duration: int
:param author: downtime author
:type author: str
:param comment: downtime comment
:type comment: str
:return: None
]
for taget[name[serv]] in starred[call[name[servicegroup].get_services, parameter[]]] begin[:]
call[name[self].schedule_svc_downtime, parameter[name[serv], name[start_time], name[end_time], name[fixed], name[trigger_id], name[duration], name[author], name[comment]]] | keyword[def] identifier[schedule_servicegroup_svc_downtime] ( identifier[self] , identifier[servicegroup] , identifier[start_time] , identifier[end_time] ,
identifier[fixed] , identifier[trigger_id] , identifier[duration] , identifier[author] , identifier[comment] ):
literal[string]
keyword[for] identifier[serv] keyword[in] identifier[servicegroup] . identifier[get_services] ():
identifier[self] . identifier[schedule_svc_downtime] ( identifier[serv] , identifier[start_time] , identifier[end_time] , identifier[fixed] ,
identifier[trigger_id] , identifier[duration] , identifier[author] , identifier[comment] ) | def schedule_servicegroup_svc_downtime(self, servicegroup, start_time, end_time, fixed, trigger_id, duration, author, comment):
"""Schedule a service downtime for each service of a servicegroup
Format of the line that triggers function call::
SCHEDULE_SERVICEGROUP_SVC_DOWNTIME;<servicegroup_name>;<start_time>;<end_time>;
<fixed>;<trigger_id>;<duration>;<author>;<comment>
:param servicegroup: servicegroup to schedule downtime
:type servicegroup: alignak.object.servicegroup.Servicegroup
:param start_time: downtime start time
:type start_time:
:param end_time: downtime end time
:type end_time:
:param fixed: is downtime fixed
:type fixed: bool
:param trigger_id: downtime id that triggered this one
:type trigger_id: str
:param duration: downtime duration
:type duration: int
:param author: downtime author
:type author: str
:param comment: downtime comment
:type comment: str
:return: None
"""
for serv in servicegroup.get_services():
self.schedule_svc_downtime(serv, start_time, end_time, fixed, trigger_id, duration, author, comment) # depends on [control=['for'], data=['serv']] |
def prior_names(self):
""" get the prior information names
Returns
-------
prior_names : list
a list of prior information names
"""
return list(self.prior_information.groupby(
self.prior_information.index).groups.keys()) | def function[prior_names, parameter[self]]:
constant[ get the prior information names
Returns
-------
prior_names : list
a list of prior information names
]
return[call[name[list], parameter[call[call[name[self].prior_information.groupby, parameter[name[self].prior_information.index]].groups.keys, parameter[]]]]] | keyword[def] identifier[prior_names] ( identifier[self] ):
literal[string]
keyword[return] identifier[list] ( identifier[self] . identifier[prior_information] . identifier[groupby] (
identifier[self] . identifier[prior_information] . identifier[index] ). identifier[groups] . identifier[keys] ()) | def prior_names(self):
""" get the prior information names
Returns
-------
prior_names : list
a list of prior information names
"""
return list(self.prior_information.groupby(self.prior_information.index).groups.keys()) |
def get_frequencies(self, q):
"""Calculate phonon frequencies at a given q-point
Parameters
----------
q: array_like
A q-vector.
shape=(3,), dtype='double'
Returns
-------
frequencies: ndarray
Phonon frequencies.
shape=(bands, ), dtype='double'
"""
self._set_dynamical_matrix()
if self._dynamical_matrix is None:
msg = ("Dynamical matrix has not yet built.")
raise RuntimeError(msg)
self._dynamical_matrix.set_dynamical_matrix(q)
dm = self._dynamical_matrix.get_dynamical_matrix()
frequencies = []
for eig in np.linalg.eigvalsh(dm).real:
if eig < 0:
frequencies.append(-np.sqrt(-eig))
else:
frequencies.append(np.sqrt(eig))
return np.array(frequencies) * self._factor | def function[get_frequencies, parameter[self, q]]:
constant[Calculate phonon frequencies at a given q-point
Parameters
----------
q: array_like
A q-vector.
shape=(3,), dtype='double'
Returns
-------
frequencies: ndarray
Phonon frequencies.
shape=(bands, ), dtype='double'
]
call[name[self]._set_dynamical_matrix, parameter[]]
if compare[name[self]._dynamical_matrix is constant[None]] begin[:]
variable[msg] assign[=] constant[Dynamical matrix has not yet built.]
<ast.Raise object at 0x7da18fe913f0>
call[name[self]._dynamical_matrix.set_dynamical_matrix, parameter[name[q]]]
variable[dm] assign[=] call[name[self]._dynamical_matrix.get_dynamical_matrix, parameter[]]
variable[frequencies] assign[=] list[[]]
for taget[name[eig]] in starred[call[name[np].linalg.eigvalsh, parameter[name[dm]]].real] begin[:]
if compare[name[eig] less[<] constant[0]] begin[:]
call[name[frequencies].append, parameter[<ast.UnaryOp object at 0x7da18fe906d0>]]
return[binary_operation[call[name[np].array, parameter[name[frequencies]]] * name[self]._factor]] | keyword[def] identifier[get_frequencies] ( identifier[self] , identifier[q] ):
literal[string]
identifier[self] . identifier[_set_dynamical_matrix] ()
keyword[if] identifier[self] . identifier[_dynamical_matrix] keyword[is] keyword[None] :
identifier[msg] =( literal[string] )
keyword[raise] identifier[RuntimeError] ( identifier[msg] )
identifier[self] . identifier[_dynamical_matrix] . identifier[set_dynamical_matrix] ( identifier[q] )
identifier[dm] = identifier[self] . identifier[_dynamical_matrix] . identifier[get_dynamical_matrix] ()
identifier[frequencies] =[]
keyword[for] identifier[eig] keyword[in] identifier[np] . identifier[linalg] . identifier[eigvalsh] ( identifier[dm] ). identifier[real] :
keyword[if] identifier[eig] < literal[int] :
identifier[frequencies] . identifier[append] (- identifier[np] . identifier[sqrt] (- identifier[eig] ))
keyword[else] :
identifier[frequencies] . identifier[append] ( identifier[np] . identifier[sqrt] ( identifier[eig] ))
keyword[return] identifier[np] . identifier[array] ( identifier[frequencies] )* identifier[self] . identifier[_factor] | def get_frequencies(self, q):
"""Calculate phonon frequencies at a given q-point
Parameters
----------
q: array_like
A q-vector.
shape=(3,), dtype='double'
Returns
-------
frequencies: ndarray
Phonon frequencies.
shape=(bands, ), dtype='double'
"""
self._set_dynamical_matrix()
if self._dynamical_matrix is None:
msg = 'Dynamical matrix has not yet built.'
raise RuntimeError(msg) # depends on [control=['if'], data=[]]
self._dynamical_matrix.set_dynamical_matrix(q)
dm = self._dynamical_matrix.get_dynamical_matrix()
frequencies = []
for eig in np.linalg.eigvalsh(dm).real:
if eig < 0:
frequencies.append(-np.sqrt(-eig)) # depends on [control=['if'], data=['eig']]
else:
frequencies.append(np.sqrt(eig)) # depends on [control=['for'], data=['eig']]
return np.array(frequencies) * self._factor |
def _on_connection(self, data, unique_id):
"""Called on collection operation
:param data: Received data
"""
if unique_id is None:
unique_id = self.stream_unique_id
self.connection_id = data.get('connectionId')
logger.info('[Connect: %s]: connection_id: %s' % (unique_id, self.connection_id)) | def function[_on_connection, parameter[self, data, unique_id]]:
constant[Called on collection operation
:param data: Received data
]
if compare[name[unique_id] is constant[None]] begin[:]
variable[unique_id] assign[=] name[self].stream_unique_id
name[self].connection_id assign[=] call[name[data].get, parameter[constant[connectionId]]]
call[name[logger].info, parameter[binary_operation[constant[[Connect: %s]: connection_id: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b17f9e10>, <ast.Attribute object at 0x7da1b17f9ae0>]]]]] | keyword[def] identifier[_on_connection] ( identifier[self] , identifier[data] , identifier[unique_id] ):
literal[string]
keyword[if] identifier[unique_id] keyword[is] keyword[None] :
identifier[unique_id] = identifier[self] . identifier[stream_unique_id]
identifier[self] . identifier[connection_id] = identifier[data] . identifier[get] ( literal[string] )
identifier[logger] . identifier[info] ( literal[string] %( identifier[unique_id] , identifier[self] . identifier[connection_id] )) | def _on_connection(self, data, unique_id):
"""Called on collection operation
:param data: Received data
"""
if unique_id is None:
unique_id = self.stream_unique_id # depends on [control=['if'], data=['unique_id']]
self.connection_id = data.get('connectionId')
logger.info('[Connect: %s]: connection_id: %s' % (unique_id, self.connection_id)) |
def _unpickle_collection(self, collection):
"""Unpickles all members of the specified dictionary."""
for mkey in collection:
if isinstance(collection[mkey], list):
for item in collection[mkey]:
item.unpickle(self)
else:
collection[mkey].unpickle(self) | def function[_unpickle_collection, parameter[self, collection]]:
constant[Unpickles all members of the specified dictionary.]
for taget[name[mkey]] in starred[name[collection]] begin[:]
if call[name[isinstance], parameter[call[name[collection]][name[mkey]], name[list]]] begin[:]
for taget[name[item]] in starred[call[name[collection]][name[mkey]]] begin[:]
call[name[item].unpickle, parameter[name[self]]] | keyword[def] identifier[_unpickle_collection] ( identifier[self] , identifier[collection] ):
literal[string]
keyword[for] identifier[mkey] keyword[in] identifier[collection] :
keyword[if] identifier[isinstance] ( identifier[collection] [ identifier[mkey] ], identifier[list] ):
keyword[for] identifier[item] keyword[in] identifier[collection] [ identifier[mkey] ]:
identifier[item] . identifier[unpickle] ( identifier[self] )
keyword[else] :
identifier[collection] [ identifier[mkey] ]. identifier[unpickle] ( identifier[self] ) | def _unpickle_collection(self, collection):
"""Unpickles all members of the specified dictionary."""
for mkey in collection:
if isinstance(collection[mkey], list):
for item in collection[mkey]:
item.unpickle(self) # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=[]]
else:
collection[mkey].unpickle(self) # depends on [control=['for'], data=['mkey']] |
def html(self, page, version=None):
"""Returns HTML content of *page*. The HTML content of the last version
of the page is returned if *version* is not set.
"""
return (self._dokuwiki.send('wiki.getPageHTMLVersion', page, version)
if version is not None
else self._dokuwiki.send('wiki.getPageHTML', page)) | def function[html, parameter[self, page, version]]:
constant[Returns HTML content of *page*. The HTML content of the last version
of the page is returned if *version* is not set.
]
return[<ast.IfExp object at 0x7da20c76fa00>] | keyword[def] identifier[html] ( identifier[self] , identifier[page] , identifier[version] = keyword[None] ):
literal[string]
keyword[return] ( identifier[self] . identifier[_dokuwiki] . identifier[send] ( literal[string] , identifier[page] , identifier[version] )
keyword[if] identifier[version] keyword[is] keyword[not] keyword[None]
keyword[else] identifier[self] . identifier[_dokuwiki] . identifier[send] ( literal[string] , identifier[page] )) | def html(self, page, version=None):
"""Returns HTML content of *page*. The HTML content of the last version
of the page is returned if *version* is not set.
"""
return self._dokuwiki.send('wiki.getPageHTMLVersion', page, version) if version is not None else self._dokuwiki.send('wiki.getPageHTML', page) |
def find_package(find_pkg, directory):
"""Find packages
"""
pkgs = []
if os.path.isdir(directory):
installed = sorted(os.listdir(directory))
blacklist = BlackList().packages(pkgs=installed, repo="local")
if os.path.exists(directory):
for pkg in installed:
if (not pkg.startswith(".") and pkg.startswith(find_pkg) and
split_package(pkg)[0] not in blacklist):
pkgs.append(pkg)
return pkgs | def function[find_package, parameter[find_pkg, directory]]:
constant[Find packages
]
variable[pkgs] assign[=] list[[]]
if call[name[os].path.isdir, parameter[name[directory]]] begin[:]
variable[installed] assign[=] call[name[sorted], parameter[call[name[os].listdir, parameter[name[directory]]]]]
variable[blacklist] assign[=] call[call[name[BlackList], parameter[]].packages, parameter[]]
if call[name[os].path.exists, parameter[name[directory]]] begin[:]
for taget[name[pkg]] in starred[name[installed]] begin[:]
if <ast.BoolOp object at 0x7da1b2828bb0> begin[:]
call[name[pkgs].append, parameter[name[pkg]]]
return[name[pkgs]] | keyword[def] identifier[find_package] ( identifier[find_pkg] , identifier[directory] ):
literal[string]
identifier[pkgs] =[]
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[directory] ):
identifier[installed] = identifier[sorted] ( identifier[os] . identifier[listdir] ( identifier[directory] ))
identifier[blacklist] = identifier[BlackList] (). identifier[packages] ( identifier[pkgs] = identifier[installed] , identifier[repo] = literal[string] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[directory] ):
keyword[for] identifier[pkg] keyword[in] identifier[installed] :
keyword[if] ( keyword[not] identifier[pkg] . identifier[startswith] ( literal[string] ) keyword[and] identifier[pkg] . identifier[startswith] ( identifier[find_pkg] ) keyword[and]
identifier[split_package] ( identifier[pkg] )[ literal[int] ] keyword[not] keyword[in] identifier[blacklist] ):
identifier[pkgs] . identifier[append] ( identifier[pkg] )
keyword[return] identifier[pkgs] | def find_package(find_pkg, directory):
"""Find packages
"""
pkgs = []
if os.path.isdir(directory):
installed = sorted(os.listdir(directory))
blacklist = BlackList().packages(pkgs=installed, repo='local')
if os.path.exists(directory):
for pkg in installed:
if not pkg.startswith('.') and pkg.startswith(find_pkg) and (split_package(pkg)[0] not in blacklist):
pkgs.append(pkg) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['pkg']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return pkgs |
def uploader(func):
"""This method only used for CKEditor under version 4.5, in newer version,
you should use ``upload_success()`` and ``upload_fail()`` instead.
Decorated the view function that handle the file upload. The upload
view must return the uploaded image's url. For example::
from flask import send_from_directory
app.config['CKEDITOR_FILE_UPLOADER'] = 'upload' # this value can be endpoint or url
@app.route('/files/<filename>')
def uploaded_files(filename):
path = '/the/uploaded/directory'
return send_from_directory(path, filename)
@app.route('/upload', methods=['POST'])
@ckeditor.uploader
def upload():
f = request.files.get('upload')
f.save(os.path.join('/the/uploaded/directory', f.filename))
url = url_for('uploaded_files', filename=f.filename)
return url
.. versionadded:: 0.3
"""
@wraps(func)
def wrapper(*args, **kwargs):
func_num = request.args.get('CKEditorFuncNum')
# ckeditor = request.args.get('CKEditor')
# language code used for error message, not used yet.
# lang_code = request.args.get('langCode')
# the error message to display when upload failed.
message = current_app.config['CKEDITOR_UPLOAD_ERROR_MESSAGE']
url = func(*args, **kwargs)
return Markup('''<script type="text/javascript">
window.parent.CKEDITOR.tools.callFunction(%s, "%s", "%s");</script>''' % (func_num, url, message))
return wrapper | def function[uploader, parameter[func]]:
constant[This method only used for CKEditor under version 4.5, in newer version,
you should use ``upload_success()`` and ``upload_fail()`` instead.
Decorated the view function that handle the file upload. The upload
view must return the uploaded image's url. For example::
from flask import send_from_directory
app.config['CKEDITOR_FILE_UPLOADER'] = 'upload' # this value can be endpoint or url
@app.route('/files/<filename>')
def uploaded_files(filename):
path = '/the/uploaded/directory'
return send_from_directory(path, filename)
@app.route('/upload', methods=['POST'])
@ckeditor.uploader
def upload():
f = request.files.get('upload')
f.save(os.path.join('/the/uploaded/directory', f.filename))
url = url_for('uploaded_files', filename=f.filename)
return url
.. versionadded:: 0.3
]
def function[wrapper, parameter[]]:
variable[func_num] assign[=] call[name[request].args.get, parameter[constant[CKEditorFuncNum]]]
variable[message] assign[=] call[name[current_app].config][constant[CKEDITOR_UPLOAD_ERROR_MESSAGE]]
variable[url] assign[=] call[name[func], parameter[<ast.Starred object at 0x7da20c7968c0>]]
return[call[name[Markup], parameter[binary_operation[constant[<script type="text/javascript">
window.parent.CKEDITOR.tools.callFunction(%s, "%s", "%s");</script>] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c796440>, <ast.Name object at 0x7da20c795de0>, <ast.Name object at 0x7da20c794550>]]]]]]
return[name[wrapper]] | keyword[def] identifier[uploader] ( identifier[func] ):
literal[string]
@ identifier[wraps] ( identifier[func] )
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ):
identifier[func_num] = identifier[request] . identifier[args] . identifier[get] ( literal[string] )
identifier[message] = identifier[current_app] . identifier[config] [ literal[string] ]
identifier[url] = identifier[func] (* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[Markup] ( literal[string] %( identifier[func_num] , identifier[url] , identifier[message] ))
keyword[return] identifier[wrapper] | def uploader(func):
"""This method only used for CKEditor under version 4.5, in newer version,
you should use ``upload_success()`` and ``upload_fail()`` instead.
Decorated the view function that handle the file upload. The upload
view must return the uploaded image's url. For example::
from flask import send_from_directory
app.config['CKEDITOR_FILE_UPLOADER'] = 'upload' # this value can be endpoint or url
@app.route('/files/<filename>')
def uploaded_files(filename):
path = '/the/uploaded/directory'
return send_from_directory(path, filename)
@app.route('/upload', methods=['POST'])
@ckeditor.uploader
def upload():
f = request.files.get('upload')
f.save(os.path.join('/the/uploaded/directory', f.filename))
url = url_for('uploaded_files', filename=f.filename)
return url
.. versionadded:: 0.3
"""
@wraps(func)
def wrapper(*args, **kwargs):
func_num = request.args.get('CKEditorFuncNum')
# ckeditor = request.args.get('CKEditor')
# language code used for error message, not used yet.
# lang_code = request.args.get('langCode')
# the error message to display when upload failed.
message = current_app.config['CKEDITOR_UPLOAD_ERROR_MESSAGE']
url = func(*args, **kwargs)
return Markup('<script type="text/javascript">\n window.parent.CKEDITOR.tools.callFunction(%s, "%s", "%s");</script>' % (func_num, url, message))
return wrapper |
async def send_files_preconf(filepaths, config_path=CONFIG_PATH):
"""Send files using the config.ini settings.
Args:
filepaths (list(str)): A list of filepaths.
"""
config = read_config(config_path)
subject = "PDF files from pdfebc"
message = ""
await send_with_attachments(subject, message, filepaths, config) | <ast.AsyncFunctionDef object at 0x7da18c4cfbe0> | keyword[async] keyword[def] identifier[send_files_preconf] ( identifier[filepaths] , identifier[config_path] = identifier[CONFIG_PATH] ):
literal[string]
identifier[config] = identifier[read_config] ( identifier[config_path] )
identifier[subject] = literal[string]
identifier[message] = literal[string]
keyword[await] identifier[send_with_attachments] ( identifier[subject] , identifier[message] , identifier[filepaths] , identifier[config] ) | async def send_files_preconf(filepaths, config_path=CONFIG_PATH):
"""Send files using the config.ini settings.
Args:
filepaths (list(str)): A list of filepaths.
"""
config = read_config(config_path)
subject = 'PDF files from pdfebc'
message = ''
await send_with_attachments(subject, message, filepaths, config) |
def _multiplex(self, target_gate, list_of_angles):
"""
Return a recursive implementation of a multiplexor circuit,
where each instruction itself has a decomposition based on
smaller multiplexors.
The LSB is the multiplexor "data" and the other bits are multiplexor "select".
Args:
target_gate (Gate): Ry or Rz gate to apply to target qubit, multiplexed
over all other "select" qubits
list_of_angles (list[float]): list of rotation angles to apply Ry and Rz
Returns:
DAGCircuit: the circuit implementing the multiplexor's action
"""
list_len = len(list_of_angles)
local_num_qubits = int(math.log2(list_len)) + 1
q = QuantumRegister(local_num_qubits)
circuit = QuantumCircuit(q, name="multiplex" + local_num_qubits.__str__())
lsb = q[0]
msb = q[local_num_qubits - 1]
# case of no multiplexing: base case for recursion
if local_num_qubits == 1:
circuit.append(target_gate(list_of_angles[0]), [q[0]])
return circuit
# calc angle weights, assuming recursion (that is the lower-level
# requested angles have been correctly implemented by recursion
angle_weight = scipy.kron([[0.5, 0.5], [0.5, -0.5]],
np.identity(2 ** (local_num_qubits - 2)))
# calc the combo angles
list_of_angles = angle_weight.dot(np.array(list_of_angles)).tolist()
# recursive step on half the angles fulfilling the above assumption
multiplex_1 = self._multiplex(target_gate, list_of_angles[0:(list_len // 2)])
circuit.append(multiplex_1.to_instruction(), q[0:-1])
# attach CNOT as follows, thereby flipping the LSB qubit
circuit.append(CnotGate(), [msb, lsb])
# implement extra efficiency from the paper of cancelling adjacent
# CNOTs (by leaving out last CNOT and reversing (NOT inverting) the
# second lower-level multiplex)
multiplex_2 = self._multiplex(target_gate, list_of_angles[(list_len // 2):])
if list_len > 1:
circuit.append(multiplex_2.to_instruction().mirror(), q[0:-1])
else:
circuit.append(multiplex_2.to_instruction(), q[0:-1])
# attach a final CNOT
circuit.append(CnotGate(), [msb, lsb])
return circuit | def function[_multiplex, parameter[self, target_gate, list_of_angles]]:
constant[
Return a recursive implementation of a multiplexor circuit,
where each instruction itself has a decomposition based on
smaller multiplexors.
The LSB is the multiplexor "data" and the other bits are multiplexor "select".
Args:
target_gate (Gate): Ry or Rz gate to apply to target qubit, multiplexed
over all other "select" qubits
list_of_angles (list[float]): list of rotation angles to apply Ry and Rz
Returns:
DAGCircuit: the circuit implementing the multiplexor's action
]
variable[list_len] assign[=] call[name[len], parameter[name[list_of_angles]]]
variable[local_num_qubits] assign[=] binary_operation[call[name[int], parameter[call[name[math].log2, parameter[name[list_len]]]]] + constant[1]]
variable[q] assign[=] call[name[QuantumRegister], parameter[name[local_num_qubits]]]
variable[circuit] assign[=] call[name[QuantumCircuit], parameter[name[q]]]
variable[lsb] assign[=] call[name[q]][constant[0]]
variable[msb] assign[=] call[name[q]][binary_operation[name[local_num_qubits] - constant[1]]]
if compare[name[local_num_qubits] equal[==] constant[1]] begin[:]
call[name[circuit].append, parameter[call[name[target_gate], parameter[call[name[list_of_angles]][constant[0]]]], list[[<ast.Subscript object at 0x7da1b059f1c0>]]]]
return[name[circuit]]
variable[angle_weight] assign[=] call[name[scipy].kron, parameter[list[[<ast.List object at 0x7da1b059eef0>, <ast.List object at 0x7da1b059ee60>]], call[name[np].identity, parameter[binary_operation[constant[2] ** binary_operation[name[local_num_qubits] - constant[2]]]]]]]
variable[list_of_angles] assign[=] call[call[name[angle_weight].dot, parameter[call[name[np].array, parameter[name[list_of_angles]]]]].tolist, parameter[]]
variable[multiplex_1] assign[=] call[name[self]._multiplex, parameter[name[target_gate], call[name[list_of_angles]][<ast.Slice object at 0x7da1b059d540>]]]
call[name[circuit].append, parameter[call[name[multiplex_1].to_instruction, parameter[]], call[name[q]][<ast.Slice object at 0x7da1b059d7e0>]]]
call[name[circuit].append, parameter[call[name[CnotGate], parameter[]], list[[<ast.Name object at 0x7da1b059e170>, <ast.Name object at 0x7da1b059e1a0>]]]]
variable[multiplex_2] assign[=] call[name[self]._multiplex, parameter[name[target_gate], call[name[list_of_angles]][<ast.Slice object at 0x7da1b059e380>]]]
if compare[name[list_len] greater[>] constant[1]] begin[:]
call[name[circuit].append, parameter[call[call[name[multiplex_2].to_instruction, parameter[]].mirror, parameter[]], call[name[q]][<ast.Slice object at 0x7da1b059e710>]]]
call[name[circuit].append, parameter[call[name[CnotGate], parameter[]], list[[<ast.Name object at 0x7da1b059d270>, <ast.Name object at 0x7da1b059d240>]]]]
return[name[circuit]] | keyword[def] identifier[_multiplex] ( identifier[self] , identifier[target_gate] , identifier[list_of_angles] ):
literal[string]
identifier[list_len] = identifier[len] ( identifier[list_of_angles] )
identifier[local_num_qubits] = identifier[int] ( identifier[math] . identifier[log2] ( identifier[list_len] ))+ literal[int]
identifier[q] = identifier[QuantumRegister] ( identifier[local_num_qubits] )
identifier[circuit] = identifier[QuantumCircuit] ( identifier[q] , identifier[name] = literal[string] + identifier[local_num_qubits] . identifier[__str__] ())
identifier[lsb] = identifier[q] [ literal[int] ]
identifier[msb] = identifier[q] [ identifier[local_num_qubits] - literal[int] ]
keyword[if] identifier[local_num_qubits] == literal[int] :
identifier[circuit] . identifier[append] ( identifier[target_gate] ( identifier[list_of_angles] [ literal[int] ]),[ identifier[q] [ literal[int] ]])
keyword[return] identifier[circuit]
identifier[angle_weight] = identifier[scipy] . identifier[kron] ([[ literal[int] , literal[int] ],[ literal[int] ,- literal[int] ]],
identifier[np] . identifier[identity] ( literal[int] **( identifier[local_num_qubits] - literal[int] )))
identifier[list_of_angles] = identifier[angle_weight] . identifier[dot] ( identifier[np] . identifier[array] ( identifier[list_of_angles] )). identifier[tolist] ()
identifier[multiplex_1] = identifier[self] . identifier[_multiplex] ( identifier[target_gate] , identifier[list_of_angles] [ literal[int] :( identifier[list_len] // literal[int] )])
identifier[circuit] . identifier[append] ( identifier[multiplex_1] . identifier[to_instruction] (), identifier[q] [ literal[int] :- literal[int] ])
identifier[circuit] . identifier[append] ( identifier[CnotGate] (),[ identifier[msb] , identifier[lsb] ])
identifier[multiplex_2] = identifier[self] . identifier[_multiplex] ( identifier[target_gate] , identifier[list_of_angles] [( identifier[list_len] // literal[int] ):])
keyword[if] identifier[list_len] > literal[int] :
identifier[circuit] . identifier[append] ( identifier[multiplex_2] . identifier[to_instruction] (). identifier[mirror] (), identifier[q] [ literal[int] :- literal[int] ])
keyword[else] :
identifier[circuit] . identifier[append] ( identifier[multiplex_2] . identifier[to_instruction] (), identifier[q] [ literal[int] :- literal[int] ])
identifier[circuit] . identifier[append] ( identifier[CnotGate] (),[ identifier[msb] , identifier[lsb] ])
keyword[return] identifier[circuit] | def _multiplex(self, target_gate, list_of_angles):
"""
Return a recursive implementation of a multiplexor circuit,
where each instruction itself has a decomposition based on
smaller multiplexors.
The LSB is the multiplexor "data" and the other bits are multiplexor "select".
Args:
target_gate (Gate): Ry or Rz gate to apply to target qubit, multiplexed
over all other "select" qubits
list_of_angles (list[float]): list of rotation angles to apply Ry and Rz
Returns:
DAGCircuit: the circuit implementing the multiplexor's action
"""
list_len = len(list_of_angles)
local_num_qubits = int(math.log2(list_len)) + 1
q = QuantumRegister(local_num_qubits)
circuit = QuantumCircuit(q, name='multiplex' + local_num_qubits.__str__())
lsb = q[0]
msb = q[local_num_qubits - 1]
# case of no multiplexing: base case for recursion
if local_num_qubits == 1:
circuit.append(target_gate(list_of_angles[0]), [q[0]])
return circuit # depends on [control=['if'], data=[]]
# calc angle weights, assuming recursion (that is the lower-level
# requested angles have been correctly implemented by recursion
angle_weight = scipy.kron([[0.5, 0.5], [0.5, -0.5]], np.identity(2 ** (local_num_qubits - 2)))
# calc the combo angles
list_of_angles = angle_weight.dot(np.array(list_of_angles)).tolist()
# recursive step on half the angles fulfilling the above assumption
multiplex_1 = self._multiplex(target_gate, list_of_angles[0:list_len // 2])
circuit.append(multiplex_1.to_instruction(), q[0:-1])
# attach CNOT as follows, thereby flipping the LSB qubit
circuit.append(CnotGate(), [msb, lsb])
# implement extra efficiency from the paper of cancelling adjacent
# CNOTs (by leaving out last CNOT and reversing (NOT inverting) the
# second lower-level multiplex)
multiplex_2 = self._multiplex(target_gate, list_of_angles[list_len // 2:])
if list_len > 1:
circuit.append(multiplex_2.to_instruction().mirror(), q[0:-1]) # depends on [control=['if'], data=[]]
else:
circuit.append(multiplex_2.to_instruction(), q[0:-1])
# attach a final CNOT
circuit.append(CnotGate(), [msb, lsb])
return circuit |
def parse_enum_values_definition(lexer: Lexer) -> List[EnumValueDefinitionNode]:
"""EnumValuesDefinition: {EnumValueDefinition+}"""
return (
cast(
List[EnumValueDefinitionNode],
many_nodes(
lexer, TokenKind.BRACE_L, parse_enum_value_definition, TokenKind.BRACE_R
),
)
if peek(lexer, TokenKind.BRACE_L)
else []
) | def function[parse_enum_values_definition, parameter[lexer]]:
constant[EnumValuesDefinition: {EnumValueDefinition+}]
return[<ast.IfExp object at 0x7da1b1d80760>] | keyword[def] identifier[parse_enum_values_definition] ( identifier[lexer] : identifier[Lexer] )-> identifier[List] [ identifier[EnumValueDefinitionNode] ]:
literal[string]
keyword[return] (
identifier[cast] (
identifier[List] [ identifier[EnumValueDefinitionNode] ],
identifier[many_nodes] (
identifier[lexer] , identifier[TokenKind] . identifier[BRACE_L] , identifier[parse_enum_value_definition] , identifier[TokenKind] . identifier[BRACE_R]
),
)
keyword[if] identifier[peek] ( identifier[lexer] , identifier[TokenKind] . identifier[BRACE_L] )
keyword[else] []
) | def parse_enum_values_definition(lexer: Lexer) -> List[EnumValueDefinitionNode]:
"""EnumValuesDefinition: {EnumValueDefinition+}"""
return cast(List[EnumValueDefinitionNode], many_nodes(lexer, TokenKind.BRACE_L, parse_enum_value_definition, TokenKind.BRACE_R)) if peek(lexer, TokenKind.BRACE_L) else [] |
def search_packages_info(query):
"""
Gather details from installed distributions. Print distribution name,
version, location, and installed files. Installed files requires a
pip generated 'installed-files.txt' in the distributions '.egg-info'
directory.
"""
installed = {}
for p in pkg_resources.working_set:
installed[canonicalize_name(p.project_name)] = p
query_names = [canonicalize_name(name) for name in query]
for dist in [installed[pkg] for pkg in query_names if pkg in installed]:
package = {
'name': dist.project_name,
'version': dist.version,
'location': dist.location,
'requires': [dep.project_name for dep in dist.requires()],
}
file_list = None
if isinstance(dist, pkg_resources.DistInfoDistribution):
# RECORDs should be part of .dist-info metadatas
if dist.has_metadata('RECORD'):
lines = dist.get_metadata_lines('RECORD')
paths = [l.split(',')[0] for l in lines]
paths = [os.path.join(dist.location, p) for p in paths]
file_list = [os.path.relpath(p, dist.location) for p in paths]
else:
# Otherwise use pip's log for .egg-info's
if dist.has_metadata('installed-files.txt'):
paths = dist.get_metadata_lines('installed-files.txt')
paths = [os.path.join(dist.egg_info, p) for p in paths]
file_list = [os.path.relpath(p, dist.location) for p in paths]
if file_list:
package['files'] = sorted(file_list)
yield package | def function[search_packages_info, parameter[query]]:
constant[
Gather details from installed distributions. Print distribution name,
version, location, and installed files. Installed files requires a
pip generated 'installed-files.txt' in the distributions '.egg-info'
directory.
]
variable[installed] assign[=] dictionary[[], []]
for taget[name[p]] in starred[name[pkg_resources].working_set] begin[:]
call[name[installed]][call[name[canonicalize_name], parameter[name[p].project_name]]] assign[=] name[p]
variable[query_names] assign[=] <ast.ListComp object at 0x7da2047ebcd0>
for taget[name[dist]] in starred[<ast.ListComp object at 0x7da2047ea650>] begin[:]
variable[package] assign[=] dictionary[[<ast.Constant object at 0x7da2047e86d0>, <ast.Constant object at 0x7da2047e87f0>, <ast.Constant object at 0x7da2047eaa70>, <ast.Constant object at 0x7da2047e9180>], [<ast.Attribute object at 0x7da2047e8af0>, <ast.Attribute object at 0x7da2047e8850>, <ast.Attribute object at 0x7da2047eb190>, <ast.ListComp object at 0x7da2047e9c90>]]
variable[file_list] assign[=] constant[None]
if call[name[isinstance], parameter[name[dist], name[pkg_resources].DistInfoDistribution]] begin[:]
if call[name[dist].has_metadata, parameter[constant[RECORD]]] begin[:]
variable[lines] assign[=] call[name[dist].get_metadata_lines, parameter[constant[RECORD]]]
variable[paths] assign[=] <ast.ListComp object at 0x7da20c6aaa10>
variable[paths] assign[=] <ast.ListComp object at 0x7da20c6aac20>
variable[file_list] assign[=] <ast.ListComp object at 0x7da20c6aaf20>
if name[file_list] begin[:]
call[name[package]][constant[files]] assign[=] call[name[sorted], parameter[name[file_list]]]
<ast.Yield object at 0x7da20c6a9c30> | keyword[def] identifier[search_packages_info] ( identifier[query] ):
literal[string]
identifier[installed] ={}
keyword[for] identifier[p] keyword[in] identifier[pkg_resources] . identifier[working_set] :
identifier[installed] [ identifier[canonicalize_name] ( identifier[p] . identifier[project_name] )]= identifier[p]
identifier[query_names] =[ identifier[canonicalize_name] ( identifier[name] ) keyword[for] identifier[name] keyword[in] identifier[query] ]
keyword[for] identifier[dist] keyword[in] [ identifier[installed] [ identifier[pkg] ] keyword[for] identifier[pkg] keyword[in] identifier[query_names] keyword[if] identifier[pkg] keyword[in] identifier[installed] ]:
identifier[package] ={
literal[string] : identifier[dist] . identifier[project_name] ,
literal[string] : identifier[dist] . identifier[version] ,
literal[string] : identifier[dist] . identifier[location] ,
literal[string] :[ identifier[dep] . identifier[project_name] keyword[for] identifier[dep] keyword[in] identifier[dist] . identifier[requires] ()],
}
identifier[file_list] = keyword[None]
keyword[if] identifier[isinstance] ( identifier[dist] , identifier[pkg_resources] . identifier[DistInfoDistribution] ):
keyword[if] identifier[dist] . identifier[has_metadata] ( literal[string] ):
identifier[lines] = identifier[dist] . identifier[get_metadata_lines] ( literal[string] )
identifier[paths] =[ identifier[l] . identifier[split] ( literal[string] )[ literal[int] ] keyword[for] identifier[l] keyword[in] identifier[lines] ]
identifier[paths] =[ identifier[os] . identifier[path] . identifier[join] ( identifier[dist] . identifier[location] , identifier[p] ) keyword[for] identifier[p] keyword[in] identifier[paths] ]
identifier[file_list] =[ identifier[os] . identifier[path] . identifier[relpath] ( identifier[p] , identifier[dist] . identifier[location] ) keyword[for] identifier[p] keyword[in] identifier[paths] ]
keyword[else] :
keyword[if] identifier[dist] . identifier[has_metadata] ( literal[string] ):
identifier[paths] = identifier[dist] . identifier[get_metadata_lines] ( literal[string] )
identifier[paths] =[ identifier[os] . identifier[path] . identifier[join] ( identifier[dist] . identifier[egg_info] , identifier[p] ) keyword[for] identifier[p] keyword[in] identifier[paths] ]
identifier[file_list] =[ identifier[os] . identifier[path] . identifier[relpath] ( identifier[p] , identifier[dist] . identifier[location] ) keyword[for] identifier[p] keyword[in] identifier[paths] ]
keyword[if] identifier[file_list] :
identifier[package] [ literal[string] ]= identifier[sorted] ( identifier[file_list] )
keyword[yield] identifier[package] | def search_packages_info(query):
"""
Gather details from installed distributions. Print distribution name,
version, location, and installed files. Installed files requires a
pip generated 'installed-files.txt' in the distributions '.egg-info'
directory.
"""
installed = {}
for p in pkg_resources.working_set:
installed[canonicalize_name(p.project_name)] = p # depends on [control=['for'], data=['p']]
query_names = [canonicalize_name(name) for name in query]
for dist in [installed[pkg] for pkg in query_names if pkg in installed]:
package = {'name': dist.project_name, 'version': dist.version, 'location': dist.location, 'requires': [dep.project_name for dep in dist.requires()]}
file_list = None
if isinstance(dist, pkg_resources.DistInfoDistribution):
# RECORDs should be part of .dist-info metadatas
if dist.has_metadata('RECORD'):
lines = dist.get_metadata_lines('RECORD')
paths = [l.split(',')[0] for l in lines]
paths = [os.path.join(dist.location, p) for p in paths]
file_list = [os.path.relpath(p, dist.location) for p in paths] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Otherwise use pip's log for .egg-info's
elif dist.has_metadata('installed-files.txt'):
paths = dist.get_metadata_lines('installed-files.txt')
paths = [os.path.join(dist.egg_info, p) for p in paths]
file_list = [os.path.relpath(p, dist.location) for p in paths] # depends on [control=['if'], data=[]]
if file_list:
package['files'] = sorted(file_list) # depends on [control=['if'], data=[]]
yield package # depends on [control=['for'], data=['dist']] |
def image_export(self, image_name, dest_url, remote_host=None):
"""Export the specific image to remote host or local file system
:param image_name: image name that can be uniquely identify an image
:param dest_path: the location to store exported image, eg.
/opt/images, the image will be stored in folder
/opt/images/
:param remote_host: the server that export image to, the format is
username@IP eg. nova@192.168.99.1, if remote_host is
None, it means the image will be stored in local server
:returns a dictionary that contains the exported image info
{
'image_name': the image_name that exported
'image_path': the image_path after exported
'os_version': the os version of the exported image
'md5sum': the md5sum of the original image
}
"""
image_info = self._ImageDbOperator.image_query_record(image_name)
if not image_info:
msg = ("The image %s does not exist in image repository"
% image_name)
LOG.error(msg)
raise exception.SDKImageOperationError(rs=20, img=image_name)
image_type = image_info[0]['type']
# TODO: (nafei) according to image_type, detect image exported path
# For multiple disk image, make the tgz firstly, the specify the
# source_path to be something like: 0100-0101-0102.tgz
if image_type == 'rootonly':
source_path = '/'.join([CONF.image.sdk_image_repository,
const.IMAGE_TYPE['DEPLOY'],
image_info[0]['imageosdistro'],
image_name,
CONF.zvm.user_root_vdev])
else:
pass
self._scheme2backend(urlparse.urlparse(dest_url).scheme).image_export(
source_path, dest_url,
remote_host=remote_host)
# TODO: (nafei) for multiple disks image, update the expect_dict
# to be the tgz's md5sum
export_dict = {'image_name': image_name,
'image_path': dest_url,
'os_version': image_info[0]['imageosdistro'],
'md5sum': image_info[0]['md5sum']}
LOG.info("Image %s export successfully" % image_name)
return export_dict | def function[image_export, parameter[self, image_name, dest_url, remote_host]]:
constant[Export the specific image to remote host or local file system
:param image_name: image name that can be uniquely identify an image
:param dest_path: the location to store exported image, eg.
/opt/images, the image will be stored in folder
/opt/images/
:param remote_host: the server that export image to, the format is
username@IP eg. nova@192.168.99.1, if remote_host is
None, it means the image will be stored in local server
:returns a dictionary that contains the exported image info
{
'image_name': the image_name that exported
'image_path': the image_path after exported
'os_version': the os version of the exported image
'md5sum': the md5sum of the original image
}
]
variable[image_info] assign[=] call[name[self]._ImageDbOperator.image_query_record, parameter[name[image_name]]]
if <ast.UnaryOp object at 0x7da20c7cb400> begin[:]
variable[msg] assign[=] binary_operation[constant[The image %s does not exist in image repository] <ast.Mod object at 0x7da2590d6920> name[image_name]]
call[name[LOG].error, parameter[name[msg]]]
<ast.Raise object at 0x7da1b26af730>
variable[image_type] assign[=] call[call[name[image_info]][constant[0]]][constant[type]]
if compare[name[image_type] equal[==] constant[rootonly]] begin[:]
variable[source_path] assign[=] call[constant[/].join, parameter[list[[<ast.Attribute object at 0x7da1b26af970>, <ast.Subscript object at 0x7da1b26affa0>, <ast.Subscript object at 0x7da1b26ae050>, <ast.Name object at 0x7da1b26ac7c0>, <ast.Attribute object at 0x7da1b26aeaa0>]]]]
call[call[name[self]._scheme2backend, parameter[call[name[urlparse].urlparse, parameter[name[dest_url]]].scheme]].image_export, parameter[name[source_path], name[dest_url]]]
variable[export_dict] assign[=] dictionary[[<ast.Constant object at 0x7da1b26ad0c0>, <ast.Constant object at 0x7da1b26aea10>, <ast.Constant object at 0x7da1b26afa60>, <ast.Constant object at 0x7da1b26af6d0>], [<ast.Name object at 0x7da1b26ae590>, <ast.Name object at 0x7da1b26af910>, <ast.Subscript object at 0x7da1b26ac4f0>, <ast.Subscript object at 0x7da1b26ae0e0>]]
call[name[LOG].info, parameter[binary_operation[constant[Image %s export successfully] <ast.Mod object at 0x7da2590d6920> name[image_name]]]]
return[name[export_dict]] | keyword[def] identifier[image_export] ( identifier[self] , identifier[image_name] , identifier[dest_url] , identifier[remote_host] = keyword[None] ):
literal[string]
identifier[image_info] = identifier[self] . identifier[_ImageDbOperator] . identifier[image_query_record] ( identifier[image_name] )
keyword[if] keyword[not] identifier[image_info] :
identifier[msg] =( literal[string]
% identifier[image_name] )
identifier[LOG] . identifier[error] ( identifier[msg] )
keyword[raise] identifier[exception] . identifier[SDKImageOperationError] ( identifier[rs] = literal[int] , identifier[img] = identifier[image_name] )
identifier[image_type] = identifier[image_info] [ literal[int] ][ literal[string] ]
keyword[if] identifier[image_type] == literal[string] :
identifier[source_path] = literal[string] . identifier[join] ([ identifier[CONF] . identifier[image] . identifier[sdk_image_repository] ,
identifier[const] . identifier[IMAGE_TYPE] [ literal[string] ],
identifier[image_info] [ literal[int] ][ literal[string] ],
identifier[image_name] ,
identifier[CONF] . identifier[zvm] . identifier[user_root_vdev] ])
keyword[else] :
keyword[pass]
identifier[self] . identifier[_scheme2backend] ( identifier[urlparse] . identifier[urlparse] ( identifier[dest_url] ). identifier[scheme] ). identifier[image_export] (
identifier[source_path] , identifier[dest_url] ,
identifier[remote_host] = identifier[remote_host] )
identifier[export_dict] ={ literal[string] : identifier[image_name] ,
literal[string] : identifier[dest_url] ,
literal[string] : identifier[image_info] [ literal[int] ][ literal[string] ],
literal[string] : identifier[image_info] [ literal[int] ][ literal[string] ]}
identifier[LOG] . identifier[info] ( literal[string] % identifier[image_name] )
keyword[return] identifier[export_dict] | def image_export(self, image_name, dest_url, remote_host=None):
"""Export the specific image to remote host or local file system
:param image_name: image name that can be uniquely identify an image
:param dest_path: the location to store exported image, eg.
/opt/images, the image will be stored in folder
/opt/images/
:param remote_host: the server that export image to, the format is
username@IP eg. nova@192.168.99.1, if remote_host is
None, it means the image will be stored in local server
:returns a dictionary that contains the exported image info
{
'image_name': the image_name that exported
'image_path': the image_path after exported
'os_version': the os version of the exported image
'md5sum': the md5sum of the original image
}
"""
image_info = self._ImageDbOperator.image_query_record(image_name)
if not image_info:
msg = 'The image %s does not exist in image repository' % image_name
LOG.error(msg)
raise exception.SDKImageOperationError(rs=20, img=image_name) # depends on [control=['if'], data=[]]
image_type = image_info[0]['type']
# TODO: (nafei) according to image_type, detect image exported path
# For multiple disk image, make the tgz firstly, the specify the
# source_path to be something like: 0100-0101-0102.tgz
if image_type == 'rootonly':
source_path = '/'.join([CONF.image.sdk_image_repository, const.IMAGE_TYPE['DEPLOY'], image_info[0]['imageosdistro'], image_name, CONF.zvm.user_root_vdev]) # depends on [control=['if'], data=[]]
else:
pass
self._scheme2backend(urlparse.urlparse(dest_url).scheme).image_export(source_path, dest_url, remote_host=remote_host)
# TODO: (nafei) for multiple disks image, update the expect_dict
# to be the tgz's md5sum
export_dict = {'image_name': image_name, 'image_path': dest_url, 'os_version': image_info[0]['imageosdistro'], 'md5sum': image_info[0]['md5sum']}
LOG.info('Image %s export successfully' % image_name)
return export_dict |
def simxGetInMessageInfo(clientID, infoType):
'''
Please have a look at the function description/documentation in the V-REP user manual
'''
info = ct.c_int()
return c_GetInMessageInfo(clientID, infoType, ct.byref(info)), info.value | def function[simxGetInMessageInfo, parameter[clientID, infoType]]:
constant[
Please have a look at the function description/documentation in the V-REP user manual
]
variable[info] assign[=] call[name[ct].c_int, parameter[]]
return[tuple[[<ast.Call object at 0x7da18dc04a30>, <ast.Attribute object at 0x7da18dc07df0>]]] | keyword[def] identifier[simxGetInMessageInfo] ( identifier[clientID] , identifier[infoType] ):
literal[string]
identifier[info] = identifier[ct] . identifier[c_int] ()
keyword[return] identifier[c_GetInMessageInfo] ( identifier[clientID] , identifier[infoType] , identifier[ct] . identifier[byref] ( identifier[info] )), identifier[info] . identifier[value] | def simxGetInMessageInfo(clientID, infoType):
"""
Please have a look at the function description/documentation in the V-REP user manual
"""
info = ct.c_int()
return (c_GetInMessageInfo(clientID, infoType, ct.byref(info)), info.value) |
def cat(self, paths, check_crc=False):
''' Fetch all files that match the source file pattern
and display their content on stdout.
:param paths: Paths to display
:type paths: list of strings
:param check_crc: Check for checksum errors
:type check_crc: boolean
:returns: a generator that yields strings
'''
if not isinstance(paths, list):
raise InvalidInputException("Paths should be a list")
if not paths:
raise InvalidInputException("cat: no path given")
processor = lambda path, node, check_crc=check_crc: self._handle_cat(path, node, check_crc)
for item in self._find_items(paths, processor, include_toplevel=True,
include_children=False, recurse=False):
if item:
yield item | def function[cat, parameter[self, paths, check_crc]]:
constant[ Fetch all files that match the source file pattern
and display their content on stdout.
:param paths: Paths to display
:type paths: list of strings
:param check_crc: Check for checksum errors
:type check_crc: boolean
:returns: a generator that yields strings
]
if <ast.UnaryOp object at 0x7da1b0716740> begin[:]
<ast.Raise object at 0x7da1b08f8bb0>
if <ast.UnaryOp object at 0x7da1b08fa230> begin[:]
<ast.Raise object at 0x7da1b08fb190>
variable[processor] assign[=] <ast.Lambda object at 0x7da1b08f84c0>
for taget[name[item]] in starred[call[name[self]._find_items, parameter[name[paths], name[processor]]]] begin[:]
if name[item] begin[:]
<ast.Yield object at 0x7da1b08fb9d0> | keyword[def] identifier[cat] ( identifier[self] , identifier[paths] , identifier[check_crc] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[paths] , identifier[list] ):
keyword[raise] identifier[InvalidInputException] ( literal[string] )
keyword[if] keyword[not] identifier[paths] :
keyword[raise] identifier[InvalidInputException] ( literal[string] )
identifier[processor] = keyword[lambda] identifier[path] , identifier[node] , identifier[check_crc] = identifier[check_crc] : identifier[self] . identifier[_handle_cat] ( identifier[path] , identifier[node] , identifier[check_crc] )
keyword[for] identifier[item] keyword[in] identifier[self] . identifier[_find_items] ( identifier[paths] , identifier[processor] , identifier[include_toplevel] = keyword[True] ,
identifier[include_children] = keyword[False] , identifier[recurse] = keyword[False] ):
keyword[if] identifier[item] :
keyword[yield] identifier[item] | def cat(self, paths, check_crc=False):
""" Fetch all files that match the source file pattern
and display their content on stdout.
:param paths: Paths to display
:type paths: list of strings
:param check_crc: Check for checksum errors
:type check_crc: boolean
:returns: a generator that yields strings
"""
if not isinstance(paths, list):
raise InvalidInputException('Paths should be a list') # depends on [control=['if'], data=[]]
if not paths:
raise InvalidInputException('cat: no path given') # depends on [control=['if'], data=[]]
processor = lambda path, node, check_crc=check_crc: self._handle_cat(path, node, check_crc)
for item in self._find_items(paths, processor, include_toplevel=True, include_children=False, recurse=False):
if item:
yield item # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] |
def set_string(self, string):
"""Set the working string and its length then reset positions."""
self.string = string
self.length = len(string)
self.reset_position() | def function[set_string, parameter[self, string]]:
constant[Set the working string and its length then reset positions.]
name[self].string assign[=] name[string]
name[self].length assign[=] call[name[len], parameter[name[string]]]
call[name[self].reset_position, parameter[]] | keyword[def] identifier[set_string] ( identifier[self] , identifier[string] ):
literal[string]
identifier[self] . identifier[string] = identifier[string]
identifier[self] . identifier[length] = identifier[len] ( identifier[string] )
identifier[self] . identifier[reset_position] () | def set_string(self, string):
"""Set the working string and its length then reset positions."""
self.string = string
self.length = len(string)
self.reset_position() |
def is_valid_uuid(id):
"""Return True if id is a valid UUID, False otherwise."""
if not isinstance(id, basestring):
return False
try:
val = UUID(id, version=4)
except ValueError:
return False
return True | def function[is_valid_uuid, parameter[id]]:
constant[Return True if id is a valid UUID, False otherwise.]
if <ast.UnaryOp object at 0x7da1b025d840> begin[:]
return[constant[False]]
<ast.Try object at 0x7da1b025fdc0>
return[constant[True]] | keyword[def] identifier[is_valid_uuid] ( identifier[id] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[id] , identifier[basestring] ):
keyword[return] keyword[False]
keyword[try] :
identifier[val] = identifier[UUID] ( identifier[id] , identifier[version] = literal[int] )
keyword[except] identifier[ValueError] :
keyword[return] keyword[False]
keyword[return] keyword[True] | def is_valid_uuid(id):
"""Return True if id is a valid UUID, False otherwise."""
if not isinstance(id, basestring):
return False # depends on [control=['if'], data=[]]
try:
val = UUID(id, version=4) # depends on [control=['try'], data=[]]
except ValueError:
return False # depends on [control=['except'], data=[]]
return True |
def env_int(name: str, required: bool=False, default: Union[Type[empty], int]=empty) -> int:
"""Pulls an environment variable out of the environment and casts it to an
integer. If the name is not present in the environment and no default is
specified then a ``ValueError`` will be raised. Similarly, if the
environment value is not castable to an integer, a ``ValueError`` will be
raised.
:param name: The name of the environment variable be pulled
:type name: str
:param required: Whether the environment variable is required. If ``True``
and the variable is not present, a ``KeyError`` is raised.
:type required: bool
:param default: The value to return if the environment variable is not
present. (Providing a default alongside setting ``required=True`` will raise
a ``ValueError``)
:type default: bool
"""
value = get_env_value(name, required=required, default=default)
if value is empty:
raise ValueError(
"`env_int` requires either a default value to be specified, or for "
"the variable to be present in the environment"
)
return int(value) | def function[env_int, parameter[name, required, default]]:
constant[Pulls an environment variable out of the environment and casts it to an
integer. If the name is not present in the environment and no default is
specified then a ``ValueError`` will be raised. Similarly, if the
environment value is not castable to an integer, a ``ValueError`` will be
raised.
:param name: The name of the environment variable be pulled
:type name: str
:param required: Whether the environment variable is required. If ``True``
and the variable is not present, a ``KeyError`` is raised.
:type required: bool
:param default: The value to return if the environment variable is not
present. (Providing a default alongside setting ``required=True`` will raise
a ``ValueError``)
:type default: bool
]
variable[value] assign[=] call[name[get_env_value], parameter[name[name]]]
if compare[name[value] is name[empty]] begin[:]
<ast.Raise object at 0x7da1b17f96f0>
return[call[name[int], parameter[name[value]]]] | keyword[def] identifier[env_int] ( identifier[name] : identifier[str] , identifier[required] : identifier[bool] = keyword[False] , identifier[default] : identifier[Union] [ identifier[Type] [ identifier[empty] ], identifier[int] ]= identifier[empty] )-> identifier[int] :
literal[string]
identifier[value] = identifier[get_env_value] ( identifier[name] , identifier[required] = identifier[required] , identifier[default] = identifier[default] )
keyword[if] identifier[value] keyword[is] identifier[empty] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string]
)
keyword[return] identifier[int] ( identifier[value] ) | def env_int(name: str, required: bool=False, default: Union[Type[empty], int]=empty) -> int:
"""Pulls an environment variable out of the environment and casts it to an
integer. If the name is not present in the environment and no default is
specified then a ``ValueError`` will be raised. Similarly, if the
environment value is not castable to an integer, a ``ValueError`` will be
raised.
:param name: The name of the environment variable be pulled
:type name: str
:param required: Whether the environment variable is required. If ``True``
and the variable is not present, a ``KeyError`` is raised.
:type required: bool
:param default: The value to return if the environment variable is not
present. (Providing a default alongside setting ``required=True`` will raise
a ``ValueError``)
:type default: bool
"""
value = get_env_value(name, required=required, default=default)
if value is empty:
raise ValueError('`env_int` requires either a default value to be specified, or for the variable to be present in the environment') # depends on [control=['if'], data=[]]
return int(value) |
def get_extr_license_text(self, extr_lic):
"""
Return extracted text from an ExtractedLicense or None.
"""
text_tripples = list(self.graph.triples((extr_lic, self.spdx_namespace['extractedText'], None)))
if not text_tripples:
self.error = True
msg = 'Extracted license must have extractedText property'
self.logger.log(msg)
return
if len(text_tripples) > 1:
self.more_than_one_error('extracted license text')
return
text_tripple = text_tripples[0]
_s, _p, text = text_tripple
return text | def function[get_extr_license_text, parameter[self, extr_lic]]:
constant[
Return extracted text from an ExtractedLicense or None.
]
variable[text_tripples] assign[=] call[name[list], parameter[call[name[self].graph.triples, parameter[tuple[[<ast.Name object at 0x7da18dc9a860>, <ast.Subscript object at 0x7da18dc98ca0>, <ast.Constant object at 0x7da18dc99c90>]]]]]]
if <ast.UnaryOp object at 0x7da18dc9b820> begin[:]
name[self].error assign[=] constant[True]
variable[msg] assign[=] constant[Extracted license must have extractedText property]
call[name[self].logger.log, parameter[name[msg]]]
return[None]
if compare[call[name[len], parameter[name[text_tripples]]] greater[>] constant[1]] begin[:]
call[name[self].more_than_one_error, parameter[constant[extracted license text]]]
return[None]
variable[text_tripple] assign[=] call[name[text_tripples]][constant[0]]
<ast.Tuple object at 0x7da1b01d9d20> assign[=] name[text_tripple]
return[name[text]] | keyword[def] identifier[get_extr_license_text] ( identifier[self] , identifier[extr_lic] ):
literal[string]
identifier[text_tripples] = identifier[list] ( identifier[self] . identifier[graph] . identifier[triples] (( identifier[extr_lic] , identifier[self] . identifier[spdx_namespace] [ literal[string] ], keyword[None] )))
keyword[if] keyword[not] identifier[text_tripples] :
identifier[self] . identifier[error] = keyword[True]
identifier[msg] = literal[string]
identifier[self] . identifier[logger] . identifier[log] ( identifier[msg] )
keyword[return]
keyword[if] identifier[len] ( identifier[text_tripples] )> literal[int] :
identifier[self] . identifier[more_than_one_error] ( literal[string] )
keyword[return]
identifier[text_tripple] = identifier[text_tripples] [ literal[int] ]
identifier[_s] , identifier[_p] , identifier[text] = identifier[text_tripple]
keyword[return] identifier[text] | def get_extr_license_text(self, extr_lic):
"""
Return extracted text from an ExtractedLicense or None.
"""
text_tripples = list(self.graph.triples((extr_lic, self.spdx_namespace['extractedText'], None)))
if not text_tripples:
self.error = True
msg = 'Extracted license must have extractedText property'
self.logger.log(msg)
return # depends on [control=['if'], data=[]]
if len(text_tripples) > 1:
self.more_than_one_error('extracted license text')
return # depends on [control=['if'], data=[]]
text_tripple = text_tripples[0]
(_s, _p, text) = text_tripple
return text |
def push(self, lines):
"""Push one or more lines of input.
This stores the given lines and returns a status code indicating
whether the code forms a complete Python block or not.
Any exceptions generated in compilation are swallowed, but if an
exception was produced, the method returns True.
Parameters
----------
lines : string
One or more lines of Python input.
Returns
-------
is_complete : boolean
True if the current input source (the result of the current input
plus prior inputs) forms a complete Python execution block. Note that
this value is also stored as a private attribute (``_is_complete``), so it
can be queried at any time.
"""
if self.input_mode == 'cell':
self.reset()
self._store(lines)
source = self.source
# Before calling _compile(), reset the code object to None so that if an
# exception is raised in compilation, we don't mislead by having
# inconsistent code/source attributes.
self.code, self._is_complete = None, None
# Honor termination lines properly
if source.rstrip().endswith('\\'):
return False
self._update_indent(lines)
try:
self.code = self._compile(source, symbol="exec")
# Invalid syntax can produce any of a number of different errors from
# inside the compiler, so we have to catch them all. Syntax errors
# immediately produce a 'ready' block, so the invalid Python can be
# sent to the kernel for evaluation with possible ipython
# special-syntax conversion.
except (SyntaxError, OverflowError, ValueError, TypeError,
MemoryError):
self._is_complete = True
else:
# Compilation didn't produce any exceptions (though it may not have
# given a complete code object)
self._is_complete = self.code is not None
return self._is_complete | def function[push, parameter[self, lines]]:
constant[Push one or more lines of input.
This stores the given lines and returns a status code indicating
whether the code forms a complete Python block or not.
Any exceptions generated in compilation are swallowed, but if an
exception was produced, the method returns True.
Parameters
----------
lines : string
One or more lines of Python input.
Returns
-------
is_complete : boolean
True if the current input source (the result of the current input
plus prior inputs) forms a complete Python execution block. Note that
this value is also stored as a private attribute (``_is_complete``), so it
can be queried at any time.
]
if compare[name[self].input_mode equal[==] constant[cell]] begin[:]
call[name[self].reset, parameter[]]
call[name[self]._store, parameter[name[lines]]]
variable[source] assign[=] name[self].source
<ast.Tuple object at 0x7da1b26ada80> assign[=] tuple[[<ast.Constant object at 0x7da1b26ac490>, <ast.Constant object at 0x7da1b26afa60>]]
if call[call[name[source].rstrip, parameter[]].endswith, parameter[constant[\]]] begin[:]
return[constant[False]]
call[name[self]._update_indent, parameter[name[lines]]]
<ast.Try object at 0x7da1b26ad990>
return[name[self]._is_complete] | keyword[def] identifier[push] ( identifier[self] , identifier[lines] ):
literal[string]
keyword[if] identifier[self] . identifier[input_mode] == literal[string] :
identifier[self] . identifier[reset] ()
identifier[self] . identifier[_store] ( identifier[lines] )
identifier[source] = identifier[self] . identifier[source]
identifier[self] . identifier[code] , identifier[self] . identifier[_is_complete] = keyword[None] , keyword[None]
keyword[if] identifier[source] . identifier[rstrip] (). identifier[endswith] ( literal[string] ):
keyword[return] keyword[False]
identifier[self] . identifier[_update_indent] ( identifier[lines] )
keyword[try] :
identifier[self] . identifier[code] = identifier[self] . identifier[_compile] ( identifier[source] , identifier[symbol] = literal[string] )
keyword[except] ( identifier[SyntaxError] , identifier[OverflowError] , identifier[ValueError] , identifier[TypeError] ,
identifier[MemoryError] ):
identifier[self] . identifier[_is_complete] = keyword[True]
keyword[else] :
identifier[self] . identifier[_is_complete] = identifier[self] . identifier[code] keyword[is] keyword[not] keyword[None]
keyword[return] identifier[self] . identifier[_is_complete] | def push(self, lines):
"""Push one or more lines of input.
This stores the given lines and returns a status code indicating
whether the code forms a complete Python block or not.
Any exceptions generated in compilation are swallowed, but if an
exception was produced, the method returns True.
Parameters
----------
lines : string
One or more lines of Python input.
Returns
-------
is_complete : boolean
True if the current input source (the result of the current input
plus prior inputs) forms a complete Python execution block. Note that
this value is also stored as a private attribute (``_is_complete``), so it
can be queried at any time.
"""
if self.input_mode == 'cell':
self.reset() # depends on [control=['if'], data=[]]
self._store(lines)
source = self.source
# Before calling _compile(), reset the code object to None so that if an
# exception is raised in compilation, we don't mislead by having
# inconsistent code/source attributes.
(self.code, self._is_complete) = (None, None)
# Honor termination lines properly
if source.rstrip().endswith('\\'):
return False # depends on [control=['if'], data=[]]
self._update_indent(lines)
try:
self.code = self._compile(source, symbol='exec') # depends on [control=['try'], data=[]]
# Invalid syntax can produce any of a number of different errors from
# inside the compiler, so we have to catch them all. Syntax errors
# immediately produce a 'ready' block, so the invalid Python can be
# sent to the kernel for evaluation with possible ipython
# special-syntax conversion.
except (SyntaxError, OverflowError, ValueError, TypeError, MemoryError):
self._is_complete = True # depends on [control=['except'], data=[]]
else:
# Compilation didn't produce any exceptions (though it may not have
# given a complete code object)
self._is_complete = self.code is not None
return self._is_complete |
def _getEventsOnDay(self, request, day):
"""Return my child events for a given day."""
return getAllEventsByDay(request, day, day, home=self)[0] | def function[_getEventsOnDay, parameter[self, request, day]]:
constant[Return my child events for a given day.]
return[call[call[name[getAllEventsByDay], parameter[name[request], name[day], name[day]]]][constant[0]]] | keyword[def] identifier[_getEventsOnDay] ( identifier[self] , identifier[request] , identifier[day] ):
literal[string]
keyword[return] identifier[getAllEventsByDay] ( identifier[request] , identifier[day] , identifier[day] , identifier[home] = identifier[self] )[ literal[int] ] | def _getEventsOnDay(self, request, day):
"""Return my child events for a given day."""
return getAllEventsByDay(request, day, day, home=self)[0] |
def track(context, file_names):
"""Keep track of each file in list file_names.
Tracking does not create or delete the actual file, it only tells the
version control system whether to maintain versions (to keep track) of
the file.
"""
context.obj.find_repo_type()
for fn in file_names:
context.obj.call([context.obj.vc_name, 'add', fn]) | def function[track, parameter[context, file_names]]:
constant[Keep track of each file in list file_names.
Tracking does not create or delete the actual file, it only tells the
version control system whether to maintain versions (to keep track) of
the file.
]
call[name[context].obj.find_repo_type, parameter[]]
for taget[name[fn]] in starred[name[file_names]] begin[:]
call[name[context].obj.call, parameter[list[[<ast.Attribute object at 0x7da18f723310>, <ast.Constant object at 0x7da18f720100>, <ast.Name object at 0x7da18f7207c0>]]]] | keyword[def] identifier[track] ( identifier[context] , identifier[file_names] ):
literal[string]
identifier[context] . identifier[obj] . identifier[find_repo_type] ()
keyword[for] identifier[fn] keyword[in] identifier[file_names] :
identifier[context] . identifier[obj] . identifier[call] ([ identifier[context] . identifier[obj] . identifier[vc_name] , literal[string] , identifier[fn] ]) | def track(context, file_names):
"""Keep track of each file in list file_names.
Tracking does not create or delete the actual file, it only tells the
version control system whether to maintain versions (to keep track) of
the file.
"""
context.obj.find_repo_type()
for fn in file_names:
context.obj.call([context.obj.vc_name, 'add', fn]) # depends on [control=['for'], data=['fn']] |
def save_config(self, cmd="save force", confirm=False, confirm_response=""):
"""Save Config."""
return super(HPComwareBase, self).save_config(
cmd=cmd, confirm=confirm, confirm_response=confirm_response
) | def function[save_config, parameter[self, cmd, confirm, confirm_response]]:
constant[Save Config.]
return[call[call[name[super], parameter[name[HPComwareBase], name[self]]].save_config, parameter[]]] | keyword[def] identifier[save_config] ( identifier[self] , identifier[cmd] = literal[string] , identifier[confirm] = keyword[False] , identifier[confirm_response] = literal[string] ):
literal[string]
keyword[return] identifier[super] ( identifier[HPComwareBase] , identifier[self] ). identifier[save_config] (
identifier[cmd] = identifier[cmd] , identifier[confirm] = identifier[confirm] , identifier[confirm_response] = identifier[confirm_response]
) | def save_config(self, cmd='save force', confirm=False, confirm_response=''):
"""Save Config."""
return super(HPComwareBase, self).save_config(cmd=cmd, confirm=confirm, confirm_response=confirm_response) |
def get_alias(self, index=None, name=None, params=None):
"""
Retrieve a specified alias.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/indices-aliases.html>`_
:arg index: A comma-separated list of index names to filter aliases
:arg name: A comma-separated list of alias names to return
:arg allow_no_indices: Whether to ignore if a wildcard indices
expression resolves into no concrete indices. (This includes `_all`
string or when no indices have been specified)
:arg expand_wildcards: Whether to expand wildcard expression to concrete
indices that are open, closed or both., default 'all', valid choices
are: 'open', 'closed', 'none', 'all'
:arg ignore_unavailable: Whether specified concrete indices should be
ignored when unavailable (missing or closed)
:arg local: Return local information, do not retrieve the state from
master node (default: false)
"""
return self.transport.perform_request(
"GET", _make_path(index, "_alias", name), params=params
) | def function[get_alias, parameter[self, index, name, params]]:
constant[
Retrieve a specified alias.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/indices-aliases.html>`_
:arg index: A comma-separated list of index names to filter aliases
:arg name: A comma-separated list of alias names to return
:arg allow_no_indices: Whether to ignore if a wildcard indices
expression resolves into no concrete indices. (This includes `_all`
string or when no indices have been specified)
:arg expand_wildcards: Whether to expand wildcard expression to concrete
indices that are open, closed or both., default 'all', valid choices
are: 'open', 'closed', 'none', 'all'
:arg ignore_unavailable: Whether specified concrete indices should be
ignored when unavailable (missing or closed)
:arg local: Return local information, do not retrieve the state from
master node (default: false)
]
return[call[name[self].transport.perform_request, parameter[constant[GET], call[name[_make_path], parameter[name[index], constant[_alias], name[name]]]]]] | keyword[def] identifier[get_alias] ( identifier[self] , identifier[index] = keyword[None] , identifier[name] = keyword[None] , identifier[params] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[transport] . identifier[perform_request] (
literal[string] , identifier[_make_path] ( identifier[index] , literal[string] , identifier[name] ), identifier[params] = identifier[params]
) | def get_alias(self, index=None, name=None, params=None):
"""
Retrieve a specified alias.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/indices-aliases.html>`_
:arg index: A comma-separated list of index names to filter aliases
:arg name: A comma-separated list of alias names to return
:arg allow_no_indices: Whether to ignore if a wildcard indices
expression resolves into no concrete indices. (This includes `_all`
string or when no indices have been specified)
:arg expand_wildcards: Whether to expand wildcard expression to concrete
indices that are open, closed or both., default 'all', valid choices
are: 'open', 'closed', 'none', 'all'
:arg ignore_unavailable: Whether specified concrete indices should be
ignored when unavailable (missing or closed)
:arg local: Return local information, do not retrieve the state from
master node (default: false)
"""
return self.transport.perform_request('GET', _make_path(index, '_alias', name), params=params) |
def _paload8(ins):
''' Loads an 8 bit value from a memory address
If 2nd arg. start with '*', it is always treated as
an indirect value.
'''
output = _paddr(ins.quad[2])
output.append('ld a, (hl)')
output.append('push af')
return output | def function[_paload8, parameter[ins]]:
constant[ Loads an 8 bit value from a memory address
If 2nd arg. start with '*', it is always treated as
an indirect value.
]
variable[output] assign[=] call[name[_paddr], parameter[call[name[ins].quad][constant[2]]]]
call[name[output].append, parameter[constant[ld a, (hl)]]]
call[name[output].append, parameter[constant[push af]]]
return[name[output]] | keyword[def] identifier[_paload8] ( identifier[ins] ):
literal[string]
identifier[output] = identifier[_paddr] ( identifier[ins] . identifier[quad] [ literal[int] ])
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] )
keyword[return] identifier[output] | def _paload8(ins):
""" Loads an 8 bit value from a memory address
If 2nd arg. start with '*', it is always treated as
an indirect value.
"""
output = _paddr(ins.quad[2])
output.append('ld a, (hl)')
output.append('push af')
return output |
def pre_attention(self, segment_number, query_antecedent,
memory_antecedent, bias):
"""Called prior to self-attention, to incorporate memory items.
Args:
segment_number: an integer Tensor with shape [batch]
query_antecedent: a Tensor with shape [batch, length_q, channels]
memory_antecedent: must be None. Attention normally allows this to be a
Tensor with shape [batch, length_m, channels], but we currently only
support memory for decoder-side self-attention.
bias: bias Tensor (see attention_bias())
Returns:
(data, new_query_antecedent, new_memory_antecedent, new_bias)
"""
with tf.variable_scope(self.name + "/pre_attention", reuse=tf.AUTO_REUSE):
assert memory_antecedent is None, "We only support language modeling"
with tf.control_dependencies([
tf.assert_greater_equal(self.batch_size, tf.size(segment_number))]):
difference = self.batch_size - tf.size(segment_number)
segment_number = tf.pad(segment_number, [[0, difference]])
reset_op = self.reset(tf.reshape(tf.where(
tf.less(segment_number, self.segment_number)), [-1]))
memory_results = {}
with tf.control_dependencies([reset_op]):
with tf.control_dependencies([
self.update_segment_number(segment_number)]):
x = tf.pad(query_antecedent, [
[0, difference], [0, 0], [0, 0]])
access_logits, retrieved_mem = self.read(x)
memory_results["x"] = x
memory_results["access_logits"] = access_logits
memory_results["retrieved_mem"] = retrieved_mem
return memory_results, query_antecedent, memory_antecedent, bias | def function[pre_attention, parameter[self, segment_number, query_antecedent, memory_antecedent, bias]]:
constant[Called prior to self-attention, to incorporate memory items.
Args:
segment_number: an integer Tensor with shape [batch]
query_antecedent: a Tensor with shape [batch, length_q, channels]
memory_antecedent: must be None. Attention normally allows this to be a
Tensor with shape [batch, length_m, channels], but we currently only
support memory for decoder-side self-attention.
bias: bias Tensor (see attention_bias())
Returns:
(data, new_query_antecedent, new_memory_antecedent, new_bias)
]
with call[name[tf].variable_scope, parameter[binary_operation[name[self].name + constant[/pre_attention]]]] begin[:]
assert[compare[name[memory_antecedent] is constant[None]]]
with call[name[tf].control_dependencies, parameter[list[[<ast.Call object at 0x7da18fe90790>]]]] begin[:]
variable[difference] assign[=] binary_operation[name[self].batch_size - call[name[tf].size, parameter[name[segment_number]]]]
variable[segment_number] assign[=] call[name[tf].pad, parameter[name[segment_number], list[[<ast.List object at 0x7da18fe90520>]]]]
variable[reset_op] assign[=] call[name[self].reset, parameter[call[name[tf].reshape, parameter[call[name[tf].where, parameter[call[name[tf].less, parameter[name[segment_number], name[self].segment_number]]]], list[[<ast.UnaryOp object at 0x7da18fe92110>]]]]]]
variable[memory_results] assign[=] dictionary[[], []]
with call[name[tf].control_dependencies, parameter[list[[<ast.Name object at 0x7da18fe92c50>]]]] begin[:]
with call[name[tf].control_dependencies, parameter[list[[<ast.Call object at 0x7da18fe93820>]]]] begin[:]
variable[x] assign[=] call[name[tf].pad, parameter[name[query_antecedent], list[[<ast.List object at 0x7da18fe90850>, <ast.List object at 0x7da18fe91c00>, <ast.List object at 0x7da18fe930d0>]]]]
<ast.Tuple object at 0x7da18fe91a50> assign[=] call[name[self].read, parameter[name[x]]]
call[name[memory_results]][constant[x]] assign[=] name[x]
call[name[memory_results]][constant[access_logits]] assign[=] name[access_logits]
call[name[memory_results]][constant[retrieved_mem]] assign[=] name[retrieved_mem]
return[tuple[[<ast.Name object at 0x7da18fe93190>, <ast.Name object at 0x7da18fe93f70>, <ast.Name object at 0x7da18fe928f0>, <ast.Name object at 0x7da18fe90910>]]] | keyword[def] identifier[pre_attention] ( identifier[self] , identifier[segment_number] , identifier[query_antecedent] ,
identifier[memory_antecedent] , identifier[bias] ):
literal[string]
keyword[with] identifier[tf] . identifier[variable_scope] ( identifier[self] . identifier[name] + literal[string] , identifier[reuse] = identifier[tf] . identifier[AUTO_REUSE] ):
keyword[assert] identifier[memory_antecedent] keyword[is] keyword[None] , literal[string]
keyword[with] identifier[tf] . identifier[control_dependencies] ([
identifier[tf] . identifier[assert_greater_equal] ( identifier[self] . identifier[batch_size] , identifier[tf] . identifier[size] ( identifier[segment_number] ))]):
identifier[difference] = identifier[self] . identifier[batch_size] - identifier[tf] . identifier[size] ( identifier[segment_number] )
identifier[segment_number] = identifier[tf] . identifier[pad] ( identifier[segment_number] ,[[ literal[int] , identifier[difference] ]])
identifier[reset_op] = identifier[self] . identifier[reset] ( identifier[tf] . identifier[reshape] ( identifier[tf] . identifier[where] (
identifier[tf] . identifier[less] ( identifier[segment_number] , identifier[self] . identifier[segment_number] )),[- literal[int] ]))
identifier[memory_results] ={}
keyword[with] identifier[tf] . identifier[control_dependencies] ([ identifier[reset_op] ]):
keyword[with] identifier[tf] . identifier[control_dependencies] ([
identifier[self] . identifier[update_segment_number] ( identifier[segment_number] )]):
identifier[x] = identifier[tf] . identifier[pad] ( identifier[query_antecedent] ,[
[ literal[int] , identifier[difference] ],[ literal[int] , literal[int] ],[ literal[int] , literal[int] ]])
identifier[access_logits] , identifier[retrieved_mem] = identifier[self] . identifier[read] ( identifier[x] )
identifier[memory_results] [ literal[string] ]= identifier[x]
identifier[memory_results] [ literal[string] ]= identifier[access_logits]
identifier[memory_results] [ literal[string] ]= identifier[retrieved_mem]
keyword[return] identifier[memory_results] , identifier[query_antecedent] , identifier[memory_antecedent] , identifier[bias] | def pre_attention(self, segment_number, query_antecedent, memory_antecedent, bias):
"""Called prior to self-attention, to incorporate memory items.
Args:
segment_number: an integer Tensor with shape [batch]
query_antecedent: a Tensor with shape [batch, length_q, channels]
memory_antecedent: must be None. Attention normally allows this to be a
Tensor with shape [batch, length_m, channels], but we currently only
support memory for decoder-side self-attention.
bias: bias Tensor (see attention_bias())
Returns:
(data, new_query_antecedent, new_memory_antecedent, new_bias)
"""
with tf.variable_scope(self.name + '/pre_attention', reuse=tf.AUTO_REUSE):
assert memory_antecedent is None, 'We only support language modeling'
with tf.control_dependencies([tf.assert_greater_equal(self.batch_size, tf.size(segment_number))]):
difference = self.batch_size - tf.size(segment_number)
segment_number = tf.pad(segment_number, [[0, difference]])
reset_op = self.reset(tf.reshape(tf.where(tf.less(segment_number, self.segment_number)), [-1])) # depends on [control=['with'], data=[]]
memory_results = {}
with tf.control_dependencies([reset_op]):
with tf.control_dependencies([self.update_segment_number(segment_number)]):
x = tf.pad(query_antecedent, [[0, difference], [0, 0], [0, 0]])
(access_logits, retrieved_mem) = self.read(x) # depends on [control=['with'], data=[]] # depends on [control=['with'], data=[]]
memory_results['x'] = x
memory_results['access_logits'] = access_logits
memory_results['retrieved_mem'] = retrieved_mem
return (memory_results, query_antecedent, memory_antecedent, bias) # depends on [control=['with'], data=[]] |
def all_origins(m):
'''
Generate all unique statement origins in the given model
'''
seen = set()
for link in m.match():
origin = link[ORIGIN]
if origin not in seen:
seen.add(origin)
yield origin | def function[all_origins, parameter[m]]:
constant[
Generate all unique statement origins in the given model
]
variable[seen] assign[=] call[name[set], parameter[]]
for taget[name[link]] in starred[call[name[m].match, parameter[]]] begin[:]
variable[origin] assign[=] call[name[link]][name[ORIGIN]]
if compare[name[origin] <ast.NotIn object at 0x7da2590d7190> name[seen]] begin[:]
call[name[seen].add, parameter[name[origin]]]
<ast.Yield object at 0x7da18ede52a0> | keyword[def] identifier[all_origins] ( identifier[m] ):
literal[string]
identifier[seen] = identifier[set] ()
keyword[for] identifier[link] keyword[in] identifier[m] . identifier[match] ():
identifier[origin] = identifier[link] [ identifier[ORIGIN] ]
keyword[if] identifier[origin] keyword[not] keyword[in] identifier[seen] :
identifier[seen] . identifier[add] ( identifier[origin] )
keyword[yield] identifier[origin] | def all_origins(m):
"""
Generate all unique statement origins in the given model
"""
seen = set()
for link in m.match():
origin = link[ORIGIN]
if origin not in seen:
seen.add(origin)
yield origin # depends on [control=['if'], data=['origin', 'seen']] # depends on [control=['for'], data=['link']] |
def lint():
"report pylint results"
# report according to file extension
report_formats = {
".html": "html",
".log": "parseable",
".txt": "text",
}
lint_build_dir = easy.path("build/lint")
lint_build_dir.exists() or lint_build_dir.makedirs() # pylint: disable=expression-not-assigned
argv = []
rcfile = easy.options.lint.get("rcfile")
if not rcfile and easy.path("pylint.cfg").exists():
rcfile = "pylint.cfg"
if rcfile:
argv += ["--rcfile", os.path.abspath(rcfile)]
if easy.options.lint.get("msg_only", False):
argv += ["-rn"]
argv += [
"--import-graph", (lint_build_dir / "imports.dot").abspath(),
]
argv += support.toplevel_packages()
sys.stderr.write("Running %s::pylint '%s'\n" % (sys.argv[0], "' '".join(argv)))
outfile = easy.options.lint.get("output", None)
if outfile:
outfile = os.path.abspath(outfile)
try:
with easy.pushd("src" if easy.path("src").exists() else "."):
if outfile:
argv.extend(["-f", report_formats.get(easy.path(outfile).ext, "text")])
sys.stderr.write("Writing output to %r\n" % (str(outfile),))
outhandle = open(outfile, "w")
try:
subprocess.check_call(["pylint"] + argv, stdout=outhandle)
finally:
outhandle.close()
else:
subprocess.check_call(["pylint"] + argv, )
sys.stderr.write("paver::lint - No problems found.\n")
except subprocess.CalledProcessError as exc:
if exc.returncode & 32:
# usage error (internal error in this code)
sys.stderr.write("paver::lint - Usage error, bad arguments %r?!\n" % (argv,))
sys.exit(exc.returncode)
else:
bits = {
1: "fatal",
2: "error",
4: "warning",
8: "refactor",
16: "convention",
}
sys.stderr.write("paver::lint - Some %s message(s) issued.\n" % (
", ".join([text for bit, text in bits.items() if exc.returncode & bit])
))
if exc.returncode & 3:
sys.stderr.write("paver::lint - Exiting due to fatal / error message.\n")
sys.exit(exc.returncode) | def function[lint, parameter[]]:
constant[report pylint results]
variable[report_formats] assign[=] dictionary[[<ast.Constant object at 0x7da18f58d9c0>, <ast.Constant object at 0x7da18f58c520>, <ast.Constant object at 0x7da18f58d0c0>], [<ast.Constant object at 0x7da18f58d2d0>, <ast.Constant object at 0x7da18f58ebc0>, <ast.Constant object at 0x7da18f58d420>]]
variable[lint_build_dir] assign[=] call[name[easy].path, parameter[constant[build/lint]]]
<ast.BoolOp object at 0x7da18f58de40>
variable[argv] assign[=] list[[]]
variable[rcfile] assign[=] call[name[easy].options.lint.get, parameter[constant[rcfile]]]
if <ast.BoolOp object at 0x7da18f58de10> begin[:]
variable[rcfile] assign[=] constant[pylint.cfg]
if name[rcfile] begin[:]
<ast.AugAssign object at 0x7da18f58c2b0>
if call[name[easy].options.lint.get, parameter[constant[msg_only], constant[False]]] begin[:]
<ast.AugAssign object at 0x7da18f58e680>
<ast.AugAssign object at 0x7da20e9b2e90>
<ast.AugAssign object at 0x7da20e9b2650>
call[name[sys].stderr.write, parameter[binary_operation[constant[Running %s::pylint '%s'
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da20e9b1b40>, <ast.Call object at 0x7da20e9b28c0>]]]]]
variable[outfile] assign[=] call[name[easy].options.lint.get, parameter[constant[output], constant[None]]]
if name[outfile] begin[:]
variable[outfile] assign[=] call[name[os].path.abspath, parameter[name[outfile]]]
<ast.Try object at 0x7da20e9b3100> | keyword[def] identifier[lint] ():
literal[string]
identifier[report_formats] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
}
identifier[lint_build_dir] = identifier[easy] . identifier[path] ( literal[string] )
identifier[lint_build_dir] . identifier[exists] () keyword[or] identifier[lint_build_dir] . identifier[makedirs] ()
identifier[argv] =[]
identifier[rcfile] = identifier[easy] . identifier[options] . identifier[lint] . identifier[get] ( literal[string] )
keyword[if] keyword[not] identifier[rcfile] keyword[and] identifier[easy] . identifier[path] ( literal[string] ). identifier[exists] ():
identifier[rcfile] = literal[string]
keyword[if] identifier[rcfile] :
identifier[argv] +=[ literal[string] , identifier[os] . identifier[path] . identifier[abspath] ( identifier[rcfile] )]
keyword[if] identifier[easy] . identifier[options] . identifier[lint] . identifier[get] ( literal[string] , keyword[False] ):
identifier[argv] +=[ literal[string] ]
identifier[argv] +=[
literal[string] ,( identifier[lint_build_dir] / literal[string] ). identifier[abspath] (),
]
identifier[argv] += identifier[support] . identifier[toplevel_packages] ()
identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] %( identifier[sys] . identifier[argv] [ literal[int] ], literal[string] . identifier[join] ( identifier[argv] )))
identifier[outfile] = identifier[easy] . identifier[options] . identifier[lint] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[outfile] :
identifier[outfile] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[outfile] )
keyword[try] :
keyword[with] identifier[easy] . identifier[pushd] ( literal[string] keyword[if] identifier[easy] . identifier[path] ( literal[string] ). identifier[exists] () keyword[else] literal[string] ):
keyword[if] identifier[outfile] :
identifier[argv] . identifier[extend] ([ literal[string] , identifier[report_formats] . identifier[get] ( identifier[easy] . identifier[path] ( identifier[outfile] ). identifier[ext] , literal[string] )])
identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] %( identifier[str] ( identifier[outfile] ),))
identifier[outhandle] = identifier[open] ( identifier[outfile] , literal[string] )
keyword[try] :
identifier[subprocess] . identifier[check_call] ([ literal[string] ]+ identifier[argv] , identifier[stdout] = identifier[outhandle] )
keyword[finally] :
identifier[outhandle] . identifier[close] ()
keyword[else] :
identifier[subprocess] . identifier[check_call] ([ literal[string] ]+ identifier[argv] ,)
identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] )
keyword[except] identifier[subprocess] . identifier[CalledProcessError] keyword[as] identifier[exc] :
keyword[if] identifier[exc] . identifier[returncode] & literal[int] :
identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] %( identifier[argv] ,))
identifier[sys] . identifier[exit] ( identifier[exc] . identifier[returncode] )
keyword[else] :
identifier[bits] ={
literal[int] : literal[string] ,
literal[int] : literal[string] ,
literal[int] : literal[string] ,
literal[int] : literal[string] ,
literal[int] : literal[string] ,
}
identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] %(
literal[string] . identifier[join] ([ identifier[text] keyword[for] identifier[bit] , identifier[text] keyword[in] identifier[bits] . identifier[items] () keyword[if] identifier[exc] . identifier[returncode] & identifier[bit] ])
))
keyword[if] identifier[exc] . identifier[returncode] & literal[int] :
identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] )
identifier[sys] . identifier[exit] ( identifier[exc] . identifier[returncode] ) | def lint():
"""report pylint results"""
# report according to file extension
report_formats = {'.html': 'html', '.log': 'parseable', '.txt': 'text'}
lint_build_dir = easy.path('build/lint')
lint_build_dir.exists() or lint_build_dir.makedirs() # pylint: disable=expression-not-assigned
argv = []
rcfile = easy.options.lint.get('rcfile')
if not rcfile and easy.path('pylint.cfg').exists():
rcfile = 'pylint.cfg' # depends on [control=['if'], data=[]]
if rcfile:
argv += ['--rcfile', os.path.abspath(rcfile)] # depends on [control=['if'], data=[]]
if easy.options.lint.get('msg_only', False):
argv += ['-rn'] # depends on [control=['if'], data=[]]
argv += ['--import-graph', (lint_build_dir / 'imports.dot').abspath()]
argv += support.toplevel_packages()
sys.stderr.write("Running %s::pylint '%s'\n" % (sys.argv[0], "' '".join(argv)))
outfile = easy.options.lint.get('output', None)
if outfile:
outfile = os.path.abspath(outfile) # depends on [control=['if'], data=[]]
try:
with easy.pushd('src' if easy.path('src').exists() else '.'):
if outfile:
argv.extend(['-f', report_formats.get(easy.path(outfile).ext, 'text')])
sys.stderr.write('Writing output to %r\n' % (str(outfile),))
outhandle = open(outfile, 'w')
try:
subprocess.check_call(['pylint'] + argv, stdout=outhandle) # depends on [control=['try'], data=[]]
finally:
outhandle.close() # depends on [control=['if'], data=[]]
else:
subprocess.check_call(['pylint'] + argv)
sys.stderr.write('paver::lint - No problems found.\n') # depends on [control=['with'], data=[]] # depends on [control=['try'], data=[]]
except subprocess.CalledProcessError as exc:
if exc.returncode & 32:
# usage error (internal error in this code)
sys.stderr.write('paver::lint - Usage error, bad arguments %r?!\n' % (argv,))
sys.exit(exc.returncode) # depends on [control=['if'], data=[]]
else:
bits = {1: 'fatal', 2: 'error', 4: 'warning', 8: 'refactor', 16: 'convention'}
sys.stderr.write('paver::lint - Some %s message(s) issued.\n' % ', '.join([text for (bit, text) in bits.items() if exc.returncode & bit]))
if exc.returncode & 3:
sys.stderr.write('paver::lint - Exiting due to fatal / error message.\n')
sys.exit(exc.returncode) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['exc']] |
def create_logger(app):
"""Creates a logger for the given application. This logger works
similar to a regular Python logger but changes the effective logging
level based on the application's debug flag. Furthermore this
function also removes all attached handlers in case there was a
logger with the log name before.
"""
Logger = getLoggerClass()
class DebugLogger(Logger):
def getEffectiveLevel(x):
if x.level == 0 and app.debug:
return DEBUG
return Logger.getEffectiveLevel(x)
class DebugHandler(StreamHandler):
def emit(x, record):
StreamHandler.emit(x, record) if app.debug else None
handler = DebugHandler()
handler.setLevel(DEBUG)
handler.setFormatter(Formatter(app.debug_log_format))
logger = getLogger(app.logger_name)
# just in case that was not a new logger, get rid of all the handlers
# already attached to it.
del logger.handlers[:]
logger.__class__ = DebugLogger
logger.addHandler(handler)
return logger | def function[create_logger, parameter[app]]:
constant[Creates a logger for the given application. This logger works
similar to a regular Python logger but changes the effective logging
level based on the application's debug flag. Furthermore this
function also removes all attached handlers in case there was a
logger with the log name before.
]
variable[Logger] assign[=] call[name[getLoggerClass], parameter[]]
class class[DebugLogger, parameter[]] begin[:]
def function[getEffectiveLevel, parameter[x]]:
if <ast.BoolOp object at 0x7da204567bb0> begin[:]
return[name[DEBUG]]
return[call[name[Logger].getEffectiveLevel, parameter[name[x]]]]
class class[DebugHandler, parameter[]] begin[:]
def function[emit, parameter[x, record]]:
<ast.IfExp object at 0x7da1b26ac070>
variable[handler] assign[=] call[name[DebugHandler], parameter[]]
call[name[handler].setLevel, parameter[name[DEBUG]]]
call[name[handler].setFormatter, parameter[call[name[Formatter], parameter[name[app].debug_log_format]]]]
variable[logger] assign[=] call[name[getLogger], parameter[name[app].logger_name]]
<ast.Delete object at 0x7da1b26ae9e0>
name[logger].__class__ assign[=] name[DebugLogger]
call[name[logger].addHandler, parameter[name[handler]]]
return[name[logger]] | keyword[def] identifier[create_logger] ( identifier[app] ):
literal[string]
identifier[Logger] = identifier[getLoggerClass] ()
keyword[class] identifier[DebugLogger] ( identifier[Logger] ):
keyword[def] identifier[getEffectiveLevel] ( identifier[x] ):
keyword[if] identifier[x] . identifier[level] == literal[int] keyword[and] identifier[app] . identifier[debug] :
keyword[return] identifier[DEBUG]
keyword[return] identifier[Logger] . identifier[getEffectiveLevel] ( identifier[x] )
keyword[class] identifier[DebugHandler] ( identifier[StreamHandler] ):
keyword[def] identifier[emit] ( identifier[x] , identifier[record] ):
identifier[StreamHandler] . identifier[emit] ( identifier[x] , identifier[record] ) keyword[if] identifier[app] . identifier[debug] keyword[else] keyword[None]
identifier[handler] = identifier[DebugHandler] ()
identifier[handler] . identifier[setLevel] ( identifier[DEBUG] )
identifier[handler] . identifier[setFormatter] ( identifier[Formatter] ( identifier[app] . identifier[debug_log_format] ))
identifier[logger] = identifier[getLogger] ( identifier[app] . identifier[logger_name] )
keyword[del] identifier[logger] . identifier[handlers] [:]
identifier[logger] . identifier[__class__] = identifier[DebugLogger]
identifier[logger] . identifier[addHandler] ( identifier[handler] )
keyword[return] identifier[logger] | def create_logger(app):
"""Creates a logger for the given application. This logger works
similar to a regular Python logger but changes the effective logging
level based on the application's debug flag. Furthermore this
function also removes all attached handlers in case there was a
logger with the log name before.
"""
Logger = getLoggerClass()
class DebugLogger(Logger):
def getEffectiveLevel(x):
if x.level == 0 and app.debug:
return DEBUG # depends on [control=['if'], data=[]]
return Logger.getEffectiveLevel(x)
class DebugHandler(StreamHandler):
def emit(x, record):
StreamHandler.emit(x, record) if app.debug else None
handler = DebugHandler()
handler.setLevel(DEBUG)
handler.setFormatter(Formatter(app.debug_log_format))
logger = getLogger(app.logger_name)
# just in case that was not a new logger, get rid of all the handlers
# already attached to it.
del logger.handlers[:]
logger.__class__ = DebugLogger
logger.addHandler(handler)
return logger |
def get_asset_query_session(self):
"""Gets an asset query session.
return: (osid.repository.AssetQuerySession) - an
``AssetQuerySession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_query()`` is ``true``.*
"""
if not self.supports_asset_query():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.AssetQuerySession(runtime=self._runtime) | def function[get_asset_query_session, parameter[self]]:
constant[Gets an asset query session.
return: (osid.repository.AssetQuerySession) - an
``AssetQuerySession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_query()`` is ``true``.*
]
if <ast.UnaryOp object at 0x7da2054a4b50> begin[:]
<ast.Raise object at 0x7da2054a6590>
return[call[name[sessions].AssetQuerySession, parameter[]]] | keyword[def] identifier[get_asset_query_session] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[supports_asset_query] ():
keyword[raise] identifier[errors] . identifier[Unimplemented] ()
keyword[return] identifier[sessions] . identifier[AssetQuerySession] ( identifier[runtime] = identifier[self] . identifier[_runtime] ) | def get_asset_query_session(self):
"""Gets an asset query session.
return: (osid.repository.AssetQuerySession) - an
``AssetQuerySession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_query()`` is ``true``.*
"""
if not self.supports_asset_query():
raise errors.Unimplemented() # depends on [control=['if'], data=[]]
# pylint: disable=no-member
return sessions.AssetQuerySession(runtime=self._runtime) |
def _GetHWInfos(client_list, batch_size=10000, token=None):
"""Opens the given clients in batches and returns hardware information."""
# This function returns a dict mapping each client_id to a set of reported
# hardware serial numbers reported by this client.
hw_infos = {}
logging.info("%d clients to process.", len(client_list))
c = 0
for batch in collection.Batch(client_list, batch_size):
logging.info("Processing batch: %d-%d", c, c + batch_size)
c += len(batch)
client_objs = aff4.FACTORY.MultiOpen(batch, age=aff4.ALL_TIMES, token=token)
for client in client_objs:
hwi = client.GetValuesForAttribute(client.Schema.HARDWARE_INFO)
hw_infos[client.urn] = set(["%s" % x.serial_number for x in hwi])
return hw_infos | def function[_GetHWInfos, parameter[client_list, batch_size, token]]:
constant[Opens the given clients in batches and returns hardware information.]
variable[hw_infos] assign[=] dictionary[[], []]
call[name[logging].info, parameter[constant[%d clients to process.], call[name[len], parameter[name[client_list]]]]]
variable[c] assign[=] constant[0]
for taget[name[batch]] in starred[call[name[collection].Batch, parameter[name[client_list], name[batch_size]]]] begin[:]
call[name[logging].info, parameter[constant[Processing batch: %d-%d], name[c], binary_operation[name[c] + name[batch_size]]]]
<ast.AugAssign object at 0x7da1b1c3cdf0>
variable[client_objs] assign[=] call[name[aff4].FACTORY.MultiOpen, parameter[name[batch]]]
for taget[name[client]] in starred[name[client_objs]] begin[:]
variable[hwi] assign[=] call[name[client].GetValuesForAttribute, parameter[name[client].Schema.HARDWARE_INFO]]
call[name[hw_infos]][name[client].urn] assign[=] call[name[set], parameter[<ast.ListComp object at 0x7da1b1b05d80>]]
return[name[hw_infos]] | keyword[def] identifier[_GetHWInfos] ( identifier[client_list] , identifier[batch_size] = literal[int] , identifier[token] = keyword[None] ):
literal[string]
identifier[hw_infos] ={}
identifier[logging] . identifier[info] ( literal[string] , identifier[len] ( identifier[client_list] ))
identifier[c] = literal[int]
keyword[for] identifier[batch] keyword[in] identifier[collection] . identifier[Batch] ( identifier[client_list] , identifier[batch_size] ):
identifier[logging] . identifier[info] ( literal[string] , identifier[c] , identifier[c] + identifier[batch_size] )
identifier[c] += identifier[len] ( identifier[batch] )
identifier[client_objs] = identifier[aff4] . identifier[FACTORY] . identifier[MultiOpen] ( identifier[batch] , identifier[age] = identifier[aff4] . identifier[ALL_TIMES] , identifier[token] = identifier[token] )
keyword[for] identifier[client] keyword[in] identifier[client_objs] :
identifier[hwi] = identifier[client] . identifier[GetValuesForAttribute] ( identifier[client] . identifier[Schema] . identifier[HARDWARE_INFO] )
identifier[hw_infos] [ identifier[client] . identifier[urn] ]= identifier[set] ([ literal[string] % identifier[x] . identifier[serial_number] keyword[for] identifier[x] keyword[in] identifier[hwi] ])
keyword[return] identifier[hw_infos] | def _GetHWInfos(client_list, batch_size=10000, token=None):
"""Opens the given clients in batches and returns hardware information."""
# This function returns a dict mapping each client_id to a set of reported
# hardware serial numbers reported by this client.
hw_infos = {}
logging.info('%d clients to process.', len(client_list))
c = 0
for batch in collection.Batch(client_list, batch_size):
logging.info('Processing batch: %d-%d', c, c + batch_size)
c += len(batch)
client_objs = aff4.FACTORY.MultiOpen(batch, age=aff4.ALL_TIMES, token=token)
for client in client_objs:
hwi = client.GetValuesForAttribute(client.Schema.HARDWARE_INFO)
hw_infos[client.urn] = set(['%s' % x.serial_number for x in hwi]) # depends on [control=['for'], data=['client']] # depends on [control=['for'], data=['batch']]
return hw_infos |
def process_form(request, form_class, form_dict=None, save_kwargs=None, *a, **kw):
'''
Optional:
=========
You can pass kwargs to the form save function in a 'save_kwargs' dict.
Example Usage:
==============
success, form = process_form(request, DiscussionForm, initial={
'category':Category.objects.all()[0].id,
})
if success:
return form # this is the return from the forms save method.
return template(request, 'talk/create_discussion.html', {
'categories': Category.objects.all(),
'form': form
})
'''
form = form_class(form_dict or request.POST or None, *a, **kw)
if request.method == 'POST':
if form.is_valid():
if not save_kwargs:
save_kwargs = {}
results = form.save(request=request, **save_kwargs)
return (True, results)
return (False, form) | def function[process_form, parameter[request, form_class, form_dict, save_kwargs]]:
constant[
Optional:
=========
You can pass kwargs to the form save function in a 'save_kwargs' dict.
Example Usage:
==============
success, form = process_form(request, DiscussionForm, initial={
'category':Category.objects.all()[0].id,
})
if success:
return form # this is the return from the forms save method.
return template(request, 'talk/create_discussion.html', {
'categories': Category.objects.all(),
'form': form
})
]
variable[form] assign[=] call[name[form_class], parameter[<ast.BoolOp object at 0x7da1b15d6500>, <ast.Starred object at 0x7da1b15d6530>]]
if compare[name[request].method equal[==] constant[POST]] begin[:]
if call[name[form].is_valid, parameter[]] begin[:]
if <ast.UnaryOp object at 0x7da1b15d5ed0> begin[:]
variable[save_kwargs] assign[=] dictionary[[], []]
variable[results] assign[=] call[name[form].save, parameter[]]
return[tuple[[<ast.Constant object at 0x7da1b15d50f0>, <ast.Name object at 0x7da1b15d4be0>]]]
return[tuple[[<ast.Constant object at 0x7da1b15d6140>, <ast.Name object at 0x7da1b15d6110>]]] | keyword[def] identifier[process_form] ( identifier[request] , identifier[form_class] , identifier[form_dict] = keyword[None] , identifier[save_kwargs] = keyword[None] ,* identifier[a] ,** identifier[kw] ):
literal[string]
identifier[form] = identifier[form_class] ( identifier[form_dict] keyword[or] identifier[request] . identifier[POST] keyword[or] keyword[None] ,* identifier[a] ,** identifier[kw] )
keyword[if] identifier[request] . identifier[method] == literal[string] :
keyword[if] identifier[form] . identifier[is_valid] ():
keyword[if] keyword[not] identifier[save_kwargs] :
identifier[save_kwargs] ={}
identifier[results] = identifier[form] . identifier[save] ( identifier[request] = identifier[request] ,** identifier[save_kwargs] )
keyword[return] ( keyword[True] , identifier[results] )
keyword[return] ( keyword[False] , identifier[form] ) | def process_form(request, form_class, form_dict=None, save_kwargs=None, *a, **kw):
"""
Optional:
=========
You can pass kwargs to the form save function in a 'save_kwargs' dict.
Example Usage:
==============
success, form = process_form(request, DiscussionForm, initial={
'category':Category.objects.all()[0].id,
})
if success:
return form # this is the return from the forms save method.
return template(request, 'talk/create_discussion.html', {
'categories': Category.objects.all(),
'form': form
})
"""
form = form_class(form_dict or request.POST or None, *a, **kw)
if request.method == 'POST':
if form.is_valid():
if not save_kwargs:
save_kwargs = {} # depends on [control=['if'], data=[]]
results = form.save(request=request, **save_kwargs)
return (True, results) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return (False, form) |
def safe_division(dividend, divisor):
"""
:return:
nan: invalid arguments
:rtype: float
"""
try:
divisor = float(divisor)
dividend = float(dividend)
except (TypeError, ValueError, AssertionError):
return float("nan")
try:
return dividend / divisor
except (ZeroDivisionError):
return float("nan") | def function[safe_division, parameter[dividend, divisor]]:
constant[
:return:
nan: invalid arguments
:rtype: float
]
<ast.Try object at 0x7da20e954910>
<ast.Try object at 0x7da20e954cd0> | keyword[def] identifier[safe_division] ( identifier[dividend] , identifier[divisor] ):
literal[string]
keyword[try] :
identifier[divisor] = identifier[float] ( identifier[divisor] )
identifier[dividend] = identifier[float] ( identifier[dividend] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] , identifier[AssertionError] ):
keyword[return] identifier[float] ( literal[string] )
keyword[try] :
keyword[return] identifier[dividend] / identifier[divisor]
keyword[except] ( identifier[ZeroDivisionError] ):
keyword[return] identifier[float] ( literal[string] ) | def safe_division(dividend, divisor):
"""
:return:
nan: invalid arguments
:rtype: float
"""
try:
divisor = float(divisor)
dividend = float(dividend) # depends on [control=['try'], data=[]]
except (TypeError, ValueError, AssertionError):
return float('nan') # depends on [control=['except'], data=[]]
try:
return dividend / divisor # depends on [control=['try'], data=[]]
except ZeroDivisionError:
return float('nan') # depends on [control=['except'], data=[]] |
def interface_list(env, securitygroup_id, sortby):
"""List interfaces associated with security groups."""
mgr = SoftLayer.NetworkManager(env.client)
table = formatting.Table(COLUMNS)
table.sortby = sortby
mask = (
'''networkComponentBindings[
networkComponentId,
networkComponent[
id,
port,
guest[
id,
hostname,
primaryBackendIpAddress,
primaryIpAddress
]
]
]'''
)
secgroup = mgr.get_securitygroup(securitygroup_id, mask=mask)
for binding in secgroup.get('networkComponentBindings', []):
interface_id = binding['networkComponentId']
try:
interface = binding['networkComponent']
vsi = interface['guest']
vsi_id = vsi['id']
hostname = vsi['hostname']
priv_pub = 'PRIVATE' if interface['port'] == 0 else 'PUBLIC'
ip_address = (vsi['primaryBackendIpAddress']
if interface['port'] == 0
else vsi['primaryIpAddress'])
except KeyError:
vsi_id = "N/A"
hostname = "Not enough permission to view"
priv_pub = "N/A"
ip_address = "N/A"
table.add_row([
interface_id,
vsi_id,
hostname,
priv_pub,
ip_address
])
env.fout(table) | def function[interface_list, parameter[env, securitygroup_id, sortby]]:
constant[List interfaces associated with security groups.]
variable[mgr] assign[=] call[name[SoftLayer].NetworkManager, parameter[name[env].client]]
variable[table] assign[=] call[name[formatting].Table, parameter[name[COLUMNS]]]
name[table].sortby assign[=] name[sortby]
variable[mask] assign[=] constant[networkComponentBindings[
networkComponentId,
networkComponent[
id,
port,
guest[
id,
hostname,
primaryBackendIpAddress,
primaryIpAddress
]
]
]]
variable[secgroup] assign[=] call[name[mgr].get_securitygroup, parameter[name[securitygroup_id]]]
for taget[name[binding]] in starred[call[name[secgroup].get, parameter[constant[networkComponentBindings], list[[]]]]] begin[:]
variable[interface_id] assign[=] call[name[binding]][constant[networkComponentId]]
<ast.Try object at 0x7da18f8136a0>
call[name[table].add_row, parameter[list[[<ast.Name object at 0x7da18fe908e0>, <ast.Name object at 0x7da18fe91060>, <ast.Name object at 0x7da18fe93af0>, <ast.Name object at 0x7da18fe93eb0>, <ast.Name object at 0x7da18fe92350>]]]]
call[name[env].fout, parameter[name[table]]] | keyword[def] identifier[interface_list] ( identifier[env] , identifier[securitygroup_id] , identifier[sortby] ):
literal[string]
identifier[mgr] = identifier[SoftLayer] . identifier[NetworkManager] ( identifier[env] . identifier[client] )
identifier[table] = identifier[formatting] . identifier[Table] ( identifier[COLUMNS] )
identifier[table] . identifier[sortby] = identifier[sortby]
identifier[mask] =(
literal[string]
)
identifier[secgroup] = identifier[mgr] . identifier[get_securitygroup] ( identifier[securitygroup_id] , identifier[mask] = identifier[mask] )
keyword[for] identifier[binding] keyword[in] identifier[secgroup] . identifier[get] ( literal[string] ,[]):
identifier[interface_id] = identifier[binding] [ literal[string] ]
keyword[try] :
identifier[interface] = identifier[binding] [ literal[string] ]
identifier[vsi] = identifier[interface] [ literal[string] ]
identifier[vsi_id] = identifier[vsi] [ literal[string] ]
identifier[hostname] = identifier[vsi] [ literal[string] ]
identifier[priv_pub] = literal[string] keyword[if] identifier[interface] [ literal[string] ]== literal[int] keyword[else] literal[string]
identifier[ip_address] =( identifier[vsi] [ literal[string] ]
keyword[if] identifier[interface] [ literal[string] ]== literal[int]
keyword[else] identifier[vsi] [ literal[string] ])
keyword[except] identifier[KeyError] :
identifier[vsi_id] = literal[string]
identifier[hostname] = literal[string]
identifier[priv_pub] = literal[string]
identifier[ip_address] = literal[string]
identifier[table] . identifier[add_row] ([
identifier[interface_id] ,
identifier[vsi_id] ,
identifier[hostname] ,
identifier[priv_pub] ,
identifier[ip_address]
])
identifier[env] . identifier[fout] ( identifier[table] ) | def interface_list(env, securitygroup_id, sortby):
"""List interfaces associated with security groups."""
mgr = SoftLayer.NetworkManager(env.client)
table = formatting.Table(COLUMNS)
table.sortby = sortby
mask = 'networkComponentBindings[\n networkComponentId,\n networkComponent[\n id,\n port,\n guest[\n id,\n hostname,\n primaryBackendIpAddress,\n primaryIpAddress\n ]\n ]\n ]'
secgroup = mgr.get_securitygroup(securitygroup_id, mask=mask)
for binding in secgroup.get('networkComponentBindings', []):
interface_id = binding['networkComponentId']
try:
interface = binding['networkComponent']
vsi = interface['guest']
vsi_id = vsi['id']
hostname = vsi['hostname']
priv_pub = 'PRIVATE' if interface['port'] == 0 else 'PUBLIC'
ip_address = vsi['primaryBackendIpAddress'] if interface['port'] == 0 else vsi['primaryIpAddress'] # depends on [control=['try'], data=[]]
except KeyError:
vsi_id = 'N/A'
hostname = 'Not enough permission to view'
priv_pub = 'N/A'
ip_address = 'N/A' # depends on [control=['except'], data=[]]
table.add_row([interface_id, vsi_id, hostname, priv_pub, ip_address]) # depends on [control=['for'], data=['binding']]
env.fout(table) |
def on_play_speed(self, *args):
"""Change the interval at which ``self.play`` is called to match my
current ``play_speed``.
"""
Clock.unschedule(self.play)
Clock.schedule_interval(self.play, 1.0 / self.play_speed) | def function[on_play_speed, parameter[self]]:
constant[Change the interval at which ``self.play`` is called to match my
current ``play_speed``.
]
call[name[Clock].unschedule, parameter[name[self].play]]
call[name[Clock].schedule_interval, parameter[name[self].play, binary_operation[constant[1.0] / name[self].play_speed]]] | keyword[def] identifier[on_play_speed] ( identifier[self] ,* identifier[args] ):
literal[string]
identifier[Clock] . identifier[unschedule] ( identifier[self] . identifier[play] )
identifier[Clock] . identifier[schedule_interval] ( identifier[self] . identifier[play] , literal[int] / identifier[self] . identifier[play_speed] ) | def on_play_speed(self, *args):
"""Change the interval at which ``self.play`` is called to match my
current ``play_speed``.
"""
Clock.unschedule(self.play)
Clock.schedule_interval(self.play, 1.0 / self.play_speed) |
def generate_jid(name, append_date=None):
"""Generates a v alid JID based on the room name.
:param append_date: appends the given date to the JID
"""
if not append_date:
return sanitize_jid(name)
return '{}-{}'.format(sanitize_jid(name), append_date.strftime('%Y-%m-%d')) | def function[generate_jid, parameter[name, append_date]]:
constant[Generates a v alid JID based on the room name.
:param append_date: appends the given date to the JID
]
if <ast.UnaryOp object at 0x7da18fe90e20> begin[:]
return[call[name[sanitize_jid], parameter[name[name]]]]
return[call[constant[{}-{}].format, parameter[call[name[sanitize_jid], parameter[name[name]]], call[name[append_date].strftime, parameter[constant[%Y-%m-%d]]]]]] | keyword[def] identifier[generate_jid] ( identifier[name] , identifier[append_date] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[append_date] :
keyword[return] identifier[sanitize_jid] ( identifier[name] )
keyword[return] literal[string] . identifier[format] ( identifier[sanitize_jid] ( identifier[name] ), identifier[append_date] . identifier[strftime] ( literal[string] )) | def generate_jid(name, append_date=None):
"""Generates a v alid JID based on the room name.
:param append_date: appends the given date to the JID
"""
if not append_date:
return sanitize_jid(name) # depends on [control=['if'], data=[]]
return '{}-{}'.format(sanitize_jid(name), append_date.strftime('%Y-%m-%d')) |
def main(argv=None):
"""Main entry point for the cdstar CLI."""
args = docopt(__doc__, version=pycdstar.__version__, argv=argv, options_first=True)
subargs = [args['<command>']] + args['<args>']
if args['<command>'] in ['help', None]:
cmd = None
if len(subargs) > 1:
cmd = COMMANDS.get(subargs[1])
if cmd:
print(cmd.__doc__)
else:
print(__doc__)
return 0
cmd = COMMANDS.get(args['<command>'])
if not cmd:
print('unknown command')
print(__doc__)
return 0
cfg = Config(**dict(
cfg=args.pop('--cfg', None),
url=args.pop('--service', None),
user=args.pop('--user', None),
password=args.pop('--password', None)))
try:
res = cmd(
Cdstar(cfg=cfg),
docopt(cmd.__doc__, argv=subargs),
verbose=args.get('--verbose'))
if isinstance(res, types.GeneratorType):
res = list(res)
if isinstance(res, list):
for line in res:
print(line)
res = 0
return res or 0
except: # noqa: E722; # pragma: no cover
# FIXME: log exception!
return 256 | def function[main, parameter[argv]]:
constant[Main entry point for the cdstar CLI.]
variable[args] assign[=] call[name[docopt], parameter[name[__doc__]]]
variable[subargs] assign[=] binary_operation[list[[<ast.Subscript object at 0x7da1b0a82410>]] + call[name[args]][constant[<args>]]]
if compare[call[name[args]][constant[<command>]] in list[[<ast.Constant object at 0x7da1b0a82fe0>, <ast.Constant object at 0x7da1b0a81ff0>]]] begin[:]
variable[cmd] assign[=] constant[None]
if compare[call[name[len], parameter[name[subargs]]] greater[>] constant[1]] begin[:]
variable[cmd] assign[=] call[name[COMMANDS].get, parameter[call[name[subargs]][constant[1]]]]
if name[cmd] begin[:]
call[name[print], parameter[name[cmd].__doc__]]
return[constant[0]]
variable[cmd] assign[=] call[name[COMMANDS].get, parameter[call[name[args]][constant[<command>]]]]
if <ast.UnaryOp object at 0x7da1b0ab99c0> begin[:]
call[name[print], parameter[constant[unknown command]]]
call[name[print], parameter[name[__doc__]]]
return[constant[0]]
variable[cfg] assign[=] call[name[Config], parameter[]]
<ast.Try object at 0x7da1b0a83280> | keyword[def] identifier[main] ( identifier[argv] = keyword[None] ):
literal[string]
identifier[args] = identifier[docopt] ( identifier[__doc__] , identifier[version] = identifier[pycdstar] . identifier[__version__] , identifier[argv] = identifier[argv] , identifier[options_first] = keyword[True] )
identifier[subargs] =[ identifier[args] [ literal[string] ]]+ identifier[args] [ literal[string] ]
keyword[if] identifier[args] [ literal[string] ] keyword[in] [ literal[string] , keyword[None] ]:
identifier[cmd] = keyword[None]
keyword[if] identifier[len] ( identifier[subargs] )> literal[int] :
identifier[cmd] = identifier[COMMANDS] . identifier[get] ( identifier[subargs] [ literal[int] ])
keyword[if] identifier[cmd] :
identifier[print] ( identifier[cmd] . identifier[__doc__] )
keyword[else] :
identifier[print] ( identifier[__doc__] )
keyword[return] literal[int]
identifier[cmd] = identifier[COMMANDS] . identifier[get] ( identifier[args] [ literal[string] ])
keyword[if] keyword[not] identifier[cmd] :
identifier[print] ( literal[string] )
identifier[print] ( identifier[__doc__] )
keyword[return] literal[int]
identifier[cfg] = identifier[Config] (** identifier[dict] (
identifier[cfg] = identifier[args] . identifier[pop] ( literal[string] , keyword[None] ),
identifier[url] = identifier[args] . identifier[pop] ( literal[string] , keyword[None] ),
identifier[user] = identifier[args] . identifier[pop] ( literal[string] , keyword[None] ),
identifier[password] = identifier[args] . identifier[pop] ( literal[string] , keyword[None] )))
keyword[try] :
identifier[res] = identifier[cmd] (
identifier[Cdstar] ( identifier[cfg] = identifier[cfg] ),
identifier[docopt] ( identifier[cmd] . identifier[__doc__] , identifier[argv] = identifier[subargs] ),
identifier[verbose] = identifier[args] . identifier[get] ( literal[string] ))
keyword[if] identifier[isinstance] ( identifier[res] , identifier[types] . identifier[GeneratorType] ):
identifier[res] = identifier[list] ( identifier[res] )
keyword[if] identifier[isinstance] ( identifier[res] , identifier[list] ):
keyword[for] identifier[line] keyword[in] identifier[res] :
identifier[print] ( identifier[line] )
identifier[res] = literal[int]
keyword[return] identifier[res] keyword[or] literal[int]
keyword[except] :
keyword[return] literal[int] | def main(argv=None):
"""Main entry point for the cdstar CLI."""
args = docopt(__doc__, version=pycdstar.__version__, argv=argv, options_first=True)
subargs = [args['<command>']] + args['<args>']
if args['<command>'] in ['help', None]:
cmd = None
if len(subargs) > 1:
cmd = COMMANDS.get(subargs[1]) # depends on [control=['if'], data=[]]
if cmd:
print(cmd.__doc__) # depends on [control=['if'], data=[]]
else:
print(__doc__)
return 0 # depends on [control=['if'], data=[]]
cmd = COMMANDS.get(args['<command>'])
if not cmd:
print('unknown command')
print(__doc__)
return 0 # depends on [control=['if'], data=[]]
cfg = Config(**dict(cfg=args.pop('--cfg', None), url=args.pop('--service', None), user=args.pop('--user', None), password=args.pop('--password', None)))
try:
res = cmd(Cdstar(cfg=cfg), docopt(cmd.__doc__, argv=subargs), verbose=args.get('--verbose'))
if isinstance(res, types.GeneratorType):
res = list(res) # depends on [control=['if'], data=[]]
if isinstance(res, list):
for line in res:
print(line) # depends on [control=['for'], data=['line']]
res = 0 # depends on [control=['if'], data=[]]
return res or 0 # depends on [control=['try'], data=[]]
except: # noqa: E722; # pragma: no cover
# FIXME: log exception!
return 256 # depends on [control=['except'], data=[]] |
def _exec_check_pointers(executable):
"""Checks the specified executable for the pointer condition that not
all members of the derived type have had their values set.
Returns (list of offending members, parameter name).
"""
oparams = []
pmembers = {}
xassigns = map(lambda x: x.lower().strip(), executable.external_assignments())
def add_offense(pname, member):
"""Adds the specified member as an offender under the specified parameter."""
if pname not in oparams:
oparams.append(pname)
if pname not in pmembers:
pmembers[pname] = [member]
else:
pmembers[pname].append(member)
def check_buried(executable, pname, member):
"""Checks whether the member has its value changed by one of the dependency
subroutines in the executable.
"""
for d in executable.dependencies:
if pname in d.argnames:
pindex = d.argnames.index(pname)
dtarget = d.target
if dtarget is not None:
mparam = dtarget.ordered_parameters[pindex]
for pname, param in executable.parameters.items():
if param.direction == "(out)" and param.is_custom:
utype = param.customtype
if utype is None:
continue
for mname, member in utype.members.items():
key = "{}%{}".format(pname, mname).lower().strip()
if key not in xassigns:
#We also need to check the dependency calls to other, buried subroutines.
compname = "{}%{}".format(pname, mname).lower()
if executable.changed(compname) is None:
add_offense(pname, member)
return (oparams, pmembers) | def function[_exec_check_pointers, parameter[executable]]:
constant[Checks the specified executable for the pointer condition that not
all members of the derived type have had their values set.
Returns (list of offending members, parameter name).
]
variable[oparams] assign[=] list[[]]
variable[pmembers] assign[=] dictionary[[], []]
variable[xassigns] assign[=] call[name[map], parameter[<ast.Lambda object at 0x7da1b260f430>, call[name[executable].external_assignments, parameter[]]]]
def function[add_offense, parameter[pname, member]]:
constant[Adds the specified member as an offender under the specified parameter.]
if compare[name[pname] <ast.NotIn object at 0x7da2590d7190> name[oparams]] begin[:]
call[name[oparams].append, parameter[name[pname]]]
if compare[name[pname] <ast.NotIn object at 0x7da2590d7190> name[pmembers]] begin[:]
call[name[pmembers]][name[pname]] assign[=] list[[<ast.Name object at 0x7da1b260fb80>]]
def function[check_buried, parameter[executable, pname, member]]:
constant[Checks whether the member has its value changed by one of the dependency
subroutines in the executable.
]
for taget[name[d]] in starred[name[executable].dependencies] begin[:]
if compare[name[pname] in name[d].argnames] begin[:]
variable[pindex] assign[=] call[name[d].argnames.index, parameter[name[pname]]]
variable[dtarget] assign[=] name[d].target
if compare[name[dtarget] is_not constant[None]] begin[:]
variable[mparam] assign[=] call[name[dtarget].ordered_parameters][name[pindex]]
for taget[tuple[[<ast.Name object at 0x7da20e9556f0>, <ast.Name object at 0x7da20e956470>]]] in starred[call[name[executable].parameters.items, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da20e9562f0> begin[:]
variable[utype] assign[=] name[param].customtype
if compare[name[utype] is constant[None]] begin[:]
continue
for taget[tuple[[<ast.Name object at 0x7da20e9547f0>, <ast.Name object at 0x7da20e957160>]]] in starred[call[name[utype].members.items, parameter[]]] begin[:]
variable[key] assign[=] call[call[call[constant[{}%{}].format, parameter[name[pname], name[mname]]].lower, parameter[]].strip, parameter[]]
if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[xassigns]] begin[:]
variable[compname] assign[=] call[call[constant[{}%{}].format, parameter[name[pname], name[mname]]].lower, parameter[]]
if compare[call[name[executable].changed, parameter[name[compname]]] is constant[None]] begin[:]
call[name[add_offense], parameter[name[pname], name[member]]]
return[tuple[[<ast.Name object at 0x7da20e955ae0>, <ast.Name object at 0x7da20e9569b0>]]] | keyword[def] identifier[_exec_check_pointers] ( identifier[executable] ):
literal[string]
identifier[oparams] =[]
identifier[pmembers] ={}
identifier[xassigns] = identifier[map] ( keyword[lambda] identifier[x] : identifier[x] . identifier[lower] (). identifier[strip] (), identifier[executable] . identifier[external_assignments] ())
keyword[def] identifier[add_offense] ( identifier[pname] , identifier[member] ):
literal[string]
keyword[if] identifier[pname] keyword[not] keyword[in] identifier[oparams] :
identifier[oparams] . identifier[append] ( identifier[pname] )
keyword[if] identifier[pname] keyword[not] keyword[in] identifier[pmembers] :
identifier[pmembers] [ identifier[pname] ]=[ identifier[member] ]
keyword[else] :
identifier[pmembers] [ identifier[pname] ]. identifier[append] ( identifier[member] )
keyword[def] identifier[check_buried] ( identifier[executable] , identifier[pname] , identifier[member] ):
literal[string]
keyword[for] identifier[d] keyword[in] identifier[executable] . identifier[dependencies] :
keyword[if] identifier[pname] keyword[in] identifier[d] . identifier[argnames] :
identifier[pindex] = identifier[d] . identifier[argnames] . identifier[index] ( identifier[pname] )
identifier[dtarget] = identifier[d] . identifier[target]
keyword[if] identifier[dtarget] keyword[is] keyword[not] keyword[None] :
identifier[mparam] = identifier[dtarget] . identifier[ordered_parameters] [ identifier[pindex] ]
keyword[for] identifier[pname] , identifier[param] keyword[in] identifier[executable] . identifier[parameters] . identifier[items] ():
keyword[if] identifier[param] . identifier[direction] == literal[string] keyword[and] identifier[param] . identifier[is_custom] :
identifier[utype] = identifier[param] . identifier[customtype]
keyword[if] identifier[utype] keyword[is] keyword[None] :
keyword[continue]
keyword[for] identifier[mname] , identifier[member] keyword[in] identifier[utype] . identifier[members] . identifier[items] ():
identifier[key] = literal[string] . identifier[format] ( identifier[pname] , identifier[mname] ). identifier[lower] (). identifier[strip] ()
keyword[if] identifier[key] keyword[not] keyword[in] identifier[xassigns] :
identifier[compname] = literal[string] . identifier[format] ( identifier[pname] , identifier[mname] ). identifier[lower] ()
keyword[if] identifier[executable] . identifier[changed] ( identifier[compname] ) keyword[is] keyword[None] :
identifier[add_offense] ( identifier[pname] , identifier[member] )
keyword[return] ( identifier[oparams] , identifier[pmembers] ) | def _exec_check_pointers(executable):
"""Checks the specified executable for the pointer condition that not
all members of the derived type have had their values set.
Returns (list of offending members, parameter name).
"""
oparams = []
pmembers = {}
xassigns = map(lambda x: x.lower().strip(), executable.external_assignments())
def add_offense(pname, member):
"""Adds the specified member as an offender under the specified parameter."""
if pname not in oparams:
oparams.append(pname) # depends on [control=['if'], data=['pname', 'oparams']]
if pname not in pmembers:
pmembers[pname] = [member] # depends on [control=['if'], data=['pname', 'pmembers']]
else:
pmembers[pname].append(member)
def check_buried(executable, pname, member):
"""Checks whether the member has its value changed by one of the dependency
subroutines in the executable.
"""
for d in executable.dependencies:
if pname in d.argnames:
pindex = d.argnames.index(pname)
dtarget = d.target
if dtarget is not None:
mparam = dtarget.ordered_parameters[pindex] # depends on [control=['if'], data=['dtarget']] # depends on [control=['if'], data=['pname']] # depends on [control=['for'], data=['d']]
for (pname, param) in executable.parameters.items():
if param.direction == '(out)' and param.is_custom:
utype = param.customtype
if utype is None:
continue # depends on [control=['if'], data=[]]
for (mname, member) in utype.members.items():
key = '{}%{}'.format(pname, mname).lower().strip()
if key not in xassigns:
#We also need to check the dependency calls to other, buried subroutines.
compname = '{}%{}'.format(pname, mname).lower()
if executable.changed(compname) is None:
add_offense(pname, member) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return (oparams, pmembers) |
def class_space(classlevel=3):
"returns the calling class' name and dictionary"
frame = sys._getframe(classlevel)
classname = frame.f_code.co_name
classdict = frame.f_locals
return classname, classdict | def function[class_space, parameter[classlevel]]:
constant[returns the calling class' name and dictionary]
variable[frame] assign[=] call[name[sys]._getframe, parameter[name[classlevel]]]
variable[classname] assign[=] name[frame].f_code.co_name
variable[classdict] assign[=] name[frame].f_locals
return[tuple[[<ast.Name object at 0x7da1b0d0cf70>, <ast.Name object at 0x7da1b0d0d450>]]] | keyword[def] identifier[class_space] ( identifier[classlevel] = literal[int] ):
literal[string]
identifier[frame] = identifier[sys] . identifier[_getframe] ( identifier[classlevel] )
identifier[classname] = identifier[frame] . identifier[f_code] . identifier[co_name]
identifier[classdict] = identifier[frame] . identifier[f_locals]
keyword[return] identifier[classname] , identifier[classdict] | def class_space(classlevel=3):
"""returns the calling class' name and dictionary"""
frame = sys._getframe(classlevel)
classname = frame.f_code.co_name
classdict = frame.f_locals
return (classname, classdict) |
def _format_and_write(self, level, correlation_id, error, message, *args, **kwargs):
"""
Formats the log message and writes it to the logger destination.
:param level: a log level.
:param correlation_id: (optional) transaction id to trace execution through call chain.
:param error: an error object associated with this message.
:param message: a human-readable message to log.
:param args: arguments to parameterize the message.
:param kwargs: arguments to parameterize the message.
"""
if message != None and len(message) > 0 and len(kwargs) > 0:
message = message.format(*args, **kwargs)
self._write(level, correlation_id, error, message) | def function[_format_and_write, parameter[self, level, correlation_id, error, message]]:
constant[
Formats the log message and writes it to the logger destination.
:param level: a log level.
:param correlation_id: (optional) transaction id to trace execution through call chain.
:param error: an error object associated with this message.
:param message: a human-readable message to log.
:param args: arguments to parameterize the message.
:param kwargs: arguments to parameterize the message.
]
if <ast.BoolOp object at 0x7da1b15f7c70> begin[:]
variable[message] assign[=] call[name[message].format, parameter[<ast.Starred object at 0x7da1b15f4280>]]
call[name[self]._write, parameter[name[level], name[correlation_id], name[error], name[message]]] | keyword[def] identifier[_format_and_write] ( identifier[self] , identifier[level] , identifier[correlation_id] , identifier[error] , identifier[message] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[message] != keyword[None] keyword[and] identifier[len] ( identifier[message] )> literal[int] keyword[and] identifier[len] ( identifier[kwargs] )> literal[int] :
identifier[message] = identifier[message] . identifier[format] (* identifier[args] ,** identifier[kwargs] )
identifier[self] . identifier[_write] ( identifier[level] , identifier[correlation_id] , identifier[error] , identifier[message] ) | def _format_and_write(self, level, correlation_id, error, message, *args, **kwargs):
"""
Formats the log message and writes it to the logger destination.
:param level: a log level.
:param correlation_id: (optional) transaction id to trace execution through call chain.
:param error: an error object associated with this message.
:param message: a human-readable message to log.
:param args: arguments to parameterize the message.
:param kwargs: arguments to parameterize the message.
"""
if message != None and len(message) > 0 and (len(kwargs) > 0):
message = message.format(*args, **kwargs) # depends on [control=['if'], data=[]]
self._write(level, correlation_id, error, message) |
def prettymetrics(self) -> str:
"""
Pretty printing for metrics
"""
rendered = ["{}: {}".format(*m) for m in self.metrics()]
return "\n ".join(rendered) | def function[prettymetrics, parameter[self]]:
constant[
Pretty printing for metrics
]
variable[rendered] assign[=] <ast.ListComp object at 0x7da18dc9a920>
return[call[constant[
].join, parameter[name[rendered]]]] | keyword[def] identifier[prettymetrics] ( identifier[self] )-> identifier[str] :
literal[string]
identifier[rendered] =[ literal[string] . identifier[format] (* identifier[m] ) keyword[for] identifier[m] keyword[in] identifier[self] . identifier[metrics] ()]
keyword[return] literal[string] . identifier[join] ( identifier[rendered] ) | def prettymetrics(self) -> str:
"""
Pretty printing for metrics
"""
rendered = ['{}: {}'.format(*m) for m in self.metrics()]
return '\n '.join(rendered) |
def _restart_stream(self):
"""Restart the stream as needed after SASL and StartTLS negotiation."""
self._input_state = "restart"
self._output_state = "restart"
self.features = None
self.transport.restart()
if self.initiator:
self._send_stream_start(self.stream_id) | def function[_restart_stream, parameter[self]]:
constant[Restart the stream as needed after SASL and StartTLS negotiation.]
name[self]._input_state assign[=] constant[restart]
name[self]._output_state assign[=] constant[restart]
name[self].features assign[=] constant[None]
call[name[self].transport.restart, parameter[]]
if name[self].initiator begin[:]
call[name[self]._send_stream_start, parameter[name[self].stream_id]] | keyword[def] identifier[_restart_stream] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_input_state] = literal[string]
identifier[self] . identifier[_output_state] = literal[string]
identifier[self] . identifier[features] = keyword[None]
identifier[self] . identifier[transport] . identifier[restart] ()
keyword[if] identifier[self] . identifier[initiator] :
identifier[self] . identifier[_send_stream_start] ( identifier[self] . identifier[stream_id] ) | def _restart_stream(self):
"""Restart the stream as needed after SASL and StartTLS negotiation."""
self._input_state = 'restart'
self._output_state = 'restart'
self.features = None
self.transport.restart()
if self.initiator:
self._send_stream_start(self.stream_id) # depends on [control=['if'], data=[]] |
def compute_skew_stats(intra, inter):
"""Returns two dictionaries reporting (skew, skew_pval) for all groups"""
# Intra (within a group) stats
intra_skew = {}
for k, v in intra.items():
skew = st.skew(v)
try:
skew_zstat, skew_pval = st.skewtest(v)
except ValueError: # if sample size too small
skew_zstat, skew_pval = (0, 1)
intra_skew[k] = (skew, skew_zstat, skew_pval)
# Inter (between groups) stats
inter_skew = {}
for k, v in inter.items():
# Inter skew stats
skew_sep = st.skew(v.flatten())
try:
skew_sep_zstat, skew_sep_pval = st.skewtest(v.flatten())
except ValueError:
skew_sep_zstat, skew_sep_pval = (0, 1)
inter_skew['-'.join(k)] = (skew_sep, skew_sep_zstat, skew_sep_pval)
# Significance of difference between intra and inter distributions
for intra_key in k:
try:
separation_zstat, separation_pval = mannwhitneyu(intra[intra_key],
v.flatten(),
alternative='less')
except ValueError: # All numbers are identical in mannwhitneyu
separation_zstat, separation_pval = (0, 1)
inter_skew['{}<{}'.format(intra_key, '-'.join(k))] = (separation_zstat, separation_pval)
return intra_skew, inter_skew | def function[compute_skew_stats, parameter[intra, inter]]:
constant[Returns two dictionaries reporting (skew, skew_pval) for all groups]
variable[intra_skew] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da18eb57130>, <ast.Name object at 0x7da18eb552a0>]]] in starred[call[name[intra].items, parameter[]]] begin[:]
variable[skew] assign[=] call[name[st].skew, parameter[name[v]]]
<ast.Try object at 0x7da1b0e46890>
call[name[intra_skew]][name[k]] assign[=] tuple[[<ast.Name object at 0x7da1b0e46200>, <ast.Name object at 0x7da1b0e44850>, <ast.Name object at 0x7da1b0e47550>]]
variable[inter_skew] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b0e45600>, <ast.Name object at 0x7da1b0e45420>]]] in starred[call[name[inter].items, parameter[]]] begin[:]
variable[skew_sep] assign[=] call[name[st].skew, parameter[call[name[v].flatten, parameter[]]]]
<ast.Try object at 0x7da1b0e46350>
call[name[inter_skew]][call[constant[-].join, parameter[name[k]]]] assign[=] tuple[[<ast.Name object at 0x7da1b0e2ca30>, <ast.Name object at 0x7da1b0e2e7a0>, <ast.Name object at 0x7da1b0e2dcf0>]]
for taget[name[intra_key]] in starred[name[k]] begin[:]
<ast.Try object at 0x7da1b0e2d270>
call[name[inter_skew]][call[constant[{}<{}].format, parameter[name[intra_key], call[constant[-].join, parameter[name[k]]]]]] assign[=] tuple[[<ast.Name object at 0x7da204621c60>, <ast.Name object at 0x7da204621300>]]
return[tuple[[<ast.Name object at 0x7da204621b10>, <ast.Name object at 0x7da204623280>]]] | keyword[def] identifier[compute_skew_stats] ( identifier[intra] , identifier[inter] ):
literal[string]
identifier[intra_skew] ={}
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[intra] . identifier[items] ():
identifier[skew] = identifier[st] . identifier[skew] ( identifier[v] )
keyword[try] :
identifier[skew_zstat] , identifier[skew_pval] = identifier[st] . identifier[skewtest] ( identifier[v] )
keyword[except] identifier[ValueError] :
identifier[skew_zstat] , identifier[skew_pval] =( literal[int] , literal[int] )
identifier[intra_skew] [ identifier[k] ]=( identifier[skew] , identifier[skew_zstat] , identifier[skew_pval] )
identifier[inter_skew] ={}
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[inter] . identifier[items] ():
identifier[skew_sep] = identifier[st] . identifier[skew] ( identifier[v] . identifier[flatten] ())
keyword[try] :
identifier[skew_sep_zstat] , identifier[skew_sep_pval] = identifier[st] . identifier[skewtest] ( identifier[v] . identifier[flatten] ())
keyword[except] identifier[ValueError] :
identifier[skew_sep_zstat] , identifier[skew_sep_pval] =( literal[int] , literal[int] )
identifier[inter_skew] [ literal[string] . identifier[join] ( identifier[k] )]=( identifier[skew_sep] , identifier[skew_sep_zstat] , identifier[skew_sep_pval] )
keyword[for] identifier[intra_key] keyword[in] identifier[k] :
keyword[try] :
identifier[separation_zstat] , identifier[separation_pval] = identifier[mannwhitneyu] ( identifier[intra] [ identifier[intra_key] ],
identifier[v] . identifier[flatten] (),
identifier[alternative] = literal[string] )
keyword[except] identifier[ValueError] :
identifier[separation_zstat] , identifier[separation_pval] =( literal[int] , literal[int] )
identifier[inter_skew] [ literal[string] . identifier[format] ( identifier[intra_key] , literal[string] . identifier[join] ( identifier[k] ))]=( identifier[separation_zstat] , identifier[separation_pval] )
keyword[return] identifier[intra_skew] , identifier[inter_skew] | def compute_skew_stats(intra, inter):
"""Returns two dictionaries reporting (skew, skew_pval) for all groups"""
# Intra (within a group) stats
intra_skew = {}
for (k, v) in intra.items():
skew = st.skew(v)
try:
(skew_zstat, skew_pval) = st.skewtest(v) # depends on [control=['try'], data=[]]
except ValueError: # if sample size too small
(skew_zstat, skew_pval) = (0, 1) # depends on [control=['except'], data=[]]
intra_skew[k] = (skew, skew_zstat, skew_pval) # depends on [control=['for'], data=[]]
# Inter (between groups) stats
inter_skew = {}
for (k, v) in inter.items():
# Inter skew stats
skew_sep = st.skew(v.flatten())
try:
(skew_sep_zstat, skew_sep_pval) = st.skewtest(v.flatten()) # depends on [control=['try'], data=[]]
except ValueError:
(skew_sep_zstat, skew_sep_pval) = (0, 1) # depends on [control=['except'], data=[]]
inter_skew['-'.join(k)] = (skew_sep, skew_sep_zstat, skew_sep_pval)
# Significance of difference between intra and inter distributions
for intra_key in k:
try:
(separation_zstat, separation_pval) = mannwhitneyu(intra[intra_key], v.flatten(), alternative='less') # depends on [control=['try'], data=[]]
except ValueError: # All numbers are identical in mannwhitneyu
(separation_zstat, separation_pval) = (0, 1) # depends on [control=['except'], data=[]]
inter_skew['{}<{}'.format(intra_key, '-'.join(k))] = (separation_zstat, separation_pval) # depends on [control=['for'], data=['intra_key']] # depends on [control=['for'], data=[]]
return (intra_skew, inter_skew) |
def tree_analysisOutput(self, *args, **kwargs):
"""
An optional method for looping over the <outputTree> and
calling an outputcallback on the analysis results at each
path.
Only call this if self.b_persisAnalysisResults is True.
"""
fn_outputcallback = None
for k, v in kwargs.items():
if k == 'outputcallback': fn_outputcallback = v
index = 1
total = len(self.d_inputTree.keys())
for path, d_analysis in self.d_outputTree.items():
self.simpleProgress_show(index, total)
self.dp.qprint("Processing analysis results in output: %s" % path)
d_output = fn_outputcallback((path, d_analysis), **kwargs)
return {
'status': True
} | def function[tree_analysisOutput, parameter[self]]:
constant[
An optional method for looping over the <outputTree> and
calling an outputcallback on the analysis results at each
path.
Only call this if self.b_persisAnalysisResults is True.
]
variable[fn_outputcallback] assign[=] constant[None]
for taget[tuple[[<ast.Name object at 0x7da20c6a9f90>, <ast.Name object at 0x7da20c6aae90>]]] in starred[call[name[kwargs].items, parameter[]]] begin[:]
if compare[name[k] equal[==] constant[outputcallback]] begin[:]
variable[fn_outputcallback] assign[=] name[v]
variable[index] assign[=] constant[1]
variable[total] assign[=] call[name[len], parameter[call[name[self].d_inputTree.keys, parameter[]]]]
for taget[tuple[[<ast.Name object at 0x7da20c6ab4f0>, <ast.Name object at 0x7da20c6aa800>]]] in starred[call[name[self].d_outputTree.items, parameter[]]] begin[:]
call[name[self].simpleProgress_show, parameter[name[index], name[total]]]
call[name[self].dp.qprint, parameter[binary_operation[constant[Processing analysis results in output: %s] <ast.Mod object at 0x7da2590d6920> name[path]]]]
variable[d_output] assign[=] call[name[fn_outputcallback], parameter[tuple[[<ast.Name object at 0x7da18fe90b80>, <ast.Name object at 0x7da18fe92d40>]]]]
return[dictionary[[<ast.Constant object at 0x7da18fe92110>], [<ast.Constant object at 0x7da18fe91480>]]] | keyword[def] identifier[tree_analysisOutput] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[fn_outputcallback] = keyword[None]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[kwargs] . identifier[items] ():
keyword[if] identifier[k] == literal[string] : identifier[fn_outputcallback] = identifier[v]
identifier[index] = literal[int]
identifier[total] = identifier[len] ( identifier[self] . identifier[d_inputTree] . identifier[keys] ())
keyword[for] identifier[path] , identifier[d_analysis] keyword[in] identifier[self] . identifier[d_outputTree] . identifier[items] ():
identifier[self] . identifier[simpleProgress_show] ( identifier[index] , identifier[total] )
identifier[self] . identifier[dp] . identifier[qprint] ( literal[string] % identifier[path] )
identifier[d_output] = identifier[fn_outputcallback] (( identifier[path] , identifier[d_analysis] ),** identifier[kwargs] )
keyword[return] {
literal[string] : keyword[True]
} | def tree_analysisOutput(self, *args, **kwargs):
"""
An optional method for looping over the <outputTree> and
calling an outputcallback on the analysis results at each
path.
Only call this if self.b_persisAnalysisResults is True.
"""
fn_outputcallback = None
for (k, v) in kwargs.items():
if k == 'outputcallback':
fn_outputcallback = v # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
index = 1
total = len(self.d_inputTree.keys())
for (path, d_analysis) in self.d_outputTree.items():
self.simpleProgress_show(index, total)
self.dp.qprint('Processing analysis results in output: %s' % path)
d_output = fn_outputcallback((path, d_analysis), **kwargs) # depends on [control=['for'], data=[]]
return {'status': True} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.