code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def get_metadata(self):
"""
Convert dictionary returned after docker inspect command into instance of ContainerMetadata class
:return: ContainerMetadata, container metadata instance
"""
inspect_data = self.inspect(refresh=True)
inspect_to_container_metadata(self.metadata, inspect_data, self.image)
return self.metadata | def function[get_metadata, parameter[self]]:
constant[
Convert dictionary returned after docker inspect command into instance of ContainerMetadata class
:return: ContainerMetadata, container metadata instance
]
variable[inspect_data] assign[=] call[name[self].inspect, parameter[]]
call[name[inspect_to_container_metadata], parameter[name[self].metadata, name[inspect_data], name[self].image]]
return[name[self].metadata] | keyword[def] identifier[get_metadata] ( identifier[self] ):
literal[string]
identifier[inspect_data] = identifier[self] . identifier[inspect] ( identifier[refresh] = keyword[True] )
identifier[inspect_to_container_metadata] ( identifier[self] . identifier[metadata] , identifier[inspect_data] , identifier[self] . identifier[image] )
keyword[return] identifier[self] . identifier[metadata] | def get_metadata(self):
"""
Convert dictionary returned after docker inspect command into instance of ContainerMetadata class
:return: ContainerMetadata, container metadata instance
"""
inspect_data = self.inspect(refresh=True)
inspect_to_container_metadata(self.metadata, inspect_data, self.image)
return self.metadata |
def Hfus(T=298.15, P=101325, MW=None, AvailableMethods=False, Method=None, CASRN=''): # pragma: no cover
'''This function handles the calculation of a chemical's enthalpy of fusion.
Generally this, is used by the chemical class, as all parameters are passed.
Calling the function directly works okay.
Enthalpy of fusion is a weak function of pressure, and its effects are
neglected.
This API is considered experimental, and is expected to be removed in a
future release in favor of a more complete object-oriented interface.
'''
def list_methods():
methods = []
if CASRN in CRCHfus_data.index:
methods.append('CRC, at melting point')
methods.append('None')
return methods
if AvailableMethods:
return list_methods()
if not Method:
Method = list_methods()[0]
# This is the calculate, given the method section
if Method == 'CRC, at melting point':
_Hfus = CRCHfus_data.at[CASRN, 'Hfus']
elif Method == 'None' or not MW:
_Hfus = None
else:
raise Exception('Failure in in function')
_Hfus = property_molar_to_mass(_Hfus, MW)
return _Hfus | def function[Hfus, parameter[T, P, MW, AvailableMethods, Method, CASRN]]:
constant[This function handles the calculation of a chemical's enthalpy of fusion.
Generally this, is used by the chemical class, as all parameters are passed.
Calling the function directly works okay.
Enthalpy of fusion is a weak function of pressure, and its effects are
neglected.
This API is considered experimental, and is expected to be removed in a
future release in favor of a more complete object-oriented interface.
]
def function[list_methods, parameter[]]:
variable[methods] assign[=] list[[]]
if compare[name[CASRN] in name[CRCHfus_data].index] begin[:]
call[name[methods].append, parameter[constant[CRC, at melting point]]]
call[name[methods].append, parameter[constant[None]]]
return[name[methods]]
if name[AvailableMethods] begin[:]
return[call[name[list_methods], parameter[]]]
if <ast.UnaryOp object at 0x7da1b26ac4f0> begin[:]
variable[Method] assign[=] call[call[name[list_methods], parameter[]]][constant[0]]
if compare[name[Method] equal[==] constant[CRC, at melting point]] begin[:]
variable[_Hfus] assign[=] call[name[CRCHfus_data].at][tuple[[<ast.Name object at 0x7da18eb55e70>, <ast.Constant object at 0x7da18eb57220>]]]
variable[_Hfus] assign[=] call[name[property_molar_to_mass], parameter[name[_Hfus], name[MW]]]
return[name[_Hfus]] | keyword[def] identifier[Hfus] ( identifier[T] = literal[int] , identifier[P] = literal[int] , identifier[MW] = keyword[None] , identifier[AvailableMethods] = keyword[False] , identifier[Method] = keyword[None] , identifier[CASRN] = literal[string] ):
literal[string]
keyword[def] identifier[list_methods] ():
identifier[methods] =[]
keyword[if] identifier[CASRN] keyword[in] identifier[CRCHfus_data] . identifier[index] :
identifier[methods] . identifier[append] ( literal[string] )
identifier[methods] . identifier[append] ( literal[string] )
keyword[return] identifier[methods]
keyword[if] identifier[AvailableMethods] :
keyword[return] identifier[list_methods] ()
keyword[if] keyword[not] identifier[Method] :
identifier[Method] = identifier[list_methods] ()[ literal[int] ]
keyword[if] identifier[Method] == literal[string] :
identifier[_Hfus] = identifier[CRCHfus_data] . identifier[at] [ identifier[CASRN] , literal[string] ]
keyword[elif] identifier[Method] == literal[string] keyword[or] keyword[not] identifier[MW] :
identifier[_Hfus] = keyword[None]
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[_Hfus] = identifier[property_molar_to_mass] ( identifier[_Hfus] , identifier[MW] )
keyword[return] identifier[_Hfus] | def Hfus(T=298.15, P=101325, MW=None, AvailableMethods=False, Method=None, CASRN=''): # pragma: no cover
"This function handles the calculation of a chemical's enthalpy of fusion.\n Generally this, is used by the chemical class, as all parameters are passed.\n Calling the function directly works okay.\n\n Enthalpy of fusion is a weak function of pressure, and its effects are\n neglected.\n\n This API is considered experimental, and is expected to be removed in a\n future release in favor of a more complete object-oriented interface.\n\n "
def list_methods():
methods = []
if CASRN in CRCHfus_data.index:
methods.append('CRC, at melting point') # depends on [control=['if'], data=[]]
methods.append('None')
return methods
if AvailableMethods:
return list_methods() # depends on [control=['if'], data=[]]
if not Method:
Method = list_methods()[0] # depends on [control=['if'], data=[]]
# This is the calculate, given the method section
if Method == 'CRC, at melting point':
_Hfus = CRCHfus_data.at[CASRN, 'Hfus'] # depends on [control=['if'], data=[]]
elif Method == 'None' or not MW:
_Hfus = None # depends on [control=['if'], data=[]]
else:
raise Exception('Failure in in function')
_Hfus = property_molar_to_mass(_Hfus, MW)
return _Hfus |
def uniq2orderipix(uniq):
"""
convert a HEALPix pixel coded as a NUNIQ number
to a (norder, ipix) tuple
"""
order = ((np.log2(uniq//4)) // 2)
order = order.astype(int)
ipix = uniq - 4 * (4**order)
return order, ipix | def function[uniq2orderipix, parameter[uniq]]:
constant[
convert a HEALPix pixel coded as a NUNIQ number
to a (norder, ipix) tuple
]
variable[order] assign[=] binary_operation[call[name[np].log2, parameter[binary_operation[name[uniq] <ast.FloorDiv object at 0x7da2590d6bc0> constant[4]]]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]]
variable[order] assign[=] call[name[order].astype, parameter[name[int]]]
variable[ipix] assign[=] binary_operation[name[uniq] - binary_operation[constant[4] * binary_operation[constant[4] ** name[order]]]]
return[tuple[[<ast.Name object at 0x7da1b0e74700>, <ast.Name object at 0x7da1b0e75240>]]] | keyword[def] identifier[uniq2orderipix] ( identifier[uniq] ):
literal[string]
identifier[order] =(( identifier[np] . identifier[log2] ( identifier[uniq] // literal[int] ))// literal[int] )
identifier[order] = identifier[order] . identifier[astype] ( identifier[int] )
identifier[ipix] = identifier[uniq] - literal[int] *( literal[int] ** identifier[order] )
keyword[return] identifier[order] , identifier[ipix] | def uniq2orderipix(uniq):
"""
convert a HEALPix pixel coded as a NUNIQ number
to a (norder, ipix) tuple
"""
order = np.log2(uniq // 4) // 2
order = order.astype(int)
ipix = uniq - 4 * 4 ** order
return (order, ipix) |
def get_input_vector(self, ind):
"""
Given an index, get the corresponding vector of the Input Matrix.
"""
dim = self.get_dimension()
b = fasttext.Vector(dim)
self.f.getInputVector(b, ind)
return np.array(b) | def function[get_input_vector, parameter[self, ind]]:
constant[
Given an index, get the corresponding vector of the Input Matrix.
]
variable[dim] assign[=] call[name[self].get_dimension, parameter[]]
variable[b] assign[=] call[name[fasttext].Vector, parameter[name[dim]]]
call[name[self].f.getInputVector, parameter[name[b], name[ind]]]
return[call[name[np].array, parameter[name[b]]]] | keyword[def] identifier[get_input_vector] ( identifier[self] , identifier[ind] ):
literal[string]
identifier[dim] = identifier[self] . identifier[get_dimension] ()
identifier[b] = identifier[fasttext] . identifier[Vector] ( identifier[dim] )
identifier[self] . identifier[f] . identifier[getInputVector] ( identifier[b] , identifier[ind] )
keyword[return] identifier[np] . identifier[array] ( identifier[b] ) | def get_input_vector(self, ind):
"""
Given an index, get the corresponding vector of the Input Matrix.
"""
dim = self.get_dimension()
b = fasttext.Vector(dim)
self.f.getInputVector(b, ind)
return np.array(b) |
def get_connection(db=DATABASE):
""" Returns a new connection to the database. """
return database.connect(host=HOST, port=PORT, user=USER, password=PASSWORD, database=db) | def function[get_connection, parameter[db]]:
constant[ Returns a new connection to the database. ]
return[call[name[database].connect, parameter[]]] | keyword[def] identifier[get_connection] ( identifier[db] = identifier[DATABASE] ):
literal[string]
keyword[return] identifier[database] . identifier[connect] ( identifier[host] = identifier[HOST] , identifier[port] = identifier[PORT] , identifier[user] = identifier[USER] , identifier[password] = identifier[PASSWORD] , identifier[database] = identifier[db] ) | def get_connection(db=DATABASE):
""" Returns a new connection to the database. """
return database.connect(host=HOST, port=PORT, user=USER, password=PASSWORD, database=db) |
def tabfile2list(fname):
"tabfile2list"
#dat = mylib1.readfileasmac(fname)
#data = string.strip(dat)
data = mylib1.readfileasmac(fname)
#data = data[:-2]#remove the last return
alist = data.split('\r')#since I read it as a mac file
blist = alist[1].split('\t')
clist = []
for num in range(0, len(alist)):
ilist = alist[num].split('\t')
clist = clist+[ilist]
cclist = clist[:-1]#the last element is turning out to be empty
return cclist | def function[tabfile2list, parameter[fname]]:
constant[tabfile2list]
variable[data] assign[=] call[name[mylib1].readfileasmac, parameter[name[fname]]]
variable[alist] assign[=] call[name[data].split, parameter[constant[
]]]
variable[blist] assign[=] call[call[name[alist]][constant[1]].split, parameter[constant[ ]]]
variable[clist] assign[=] list[[]]
for taget[name[num]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[alist]]]]]] begin[:]
variable[ilist] assign[=] call[call[name[alist]][name[num]].split, parameter[constant[ ]]]
variable[clist] assign[=] binary_operation[name[clist] + list[[<ast.Name object at 0x7da1b11ef430>]]]
variable[cclist] assign[=] call[name[clist]][<ast.Slice object at 0x7da1b11ec190>]
return[name[cclist]] | keyword[def] identifier[tabfile2list] ( identifier[fname] ):
literal[string]
identifier[data] = identifier[mylib1] . identifier[readfileasmac] ( identifier[fname] )
identifier[alist] = identifier[data] . identifier[split] ( literal[string] )
identifier[blist] = identifier[alist] [ literal[int] ]. identifier[split] ( literal[string] )
identifier[clist] =[]
keyword[for] identifier[num] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[alist] )):
identifier[ilist] = identifier[alist] [ identifier[num] ]. identifier[split] ( literal[string] )
identifier[clist] = identifier[clist] +[ identifier[ilist] ]
identifier[cclist] = identifier[clist] [:- literal[int] ]
keyword[return] identifier[cclist] | def tabfile2list(fname):
"""tabfile2list"""
#dat = mylib1.readfileasmac(fname)
#data = string.strip(dat)
data = mylib1.readfileasmac(fname)
#data = data[:-2]#remove the last return
alist = data.split('\r') #since I read it as a mac file
blist = alist[1].split('\t')
clist = []
for num in range(0, len(alist)):
ilist = alist[num].split('\t')
clist = clist + [ilist] # depends on [control=['for'], data=['num']]
cclist = clist[:-1] #the last element is turning out to be empty
return cclist |
def frameify(self, state, data):
"""Yield the data as a single frame."""
try:
yield state.recv_buf + data
except FrameSwitch:
pass
finally:
state.recv_buf = '' | def function[frameify, parameter[self, state, data]]:
constant[Yield the data as a single frame.]
<ast.Try object at 0x7da18f811690> | keyword[def] identifier[frameify] ( identifier[self] , identifier[state] , identifier[data] ):
literal[string]
keyword[try] :
keyword[yield] identifier[state] . identifier[recv_buf] + identifier[data]
keyword[except] identifier[FrameSwitch] :
keyword[pass]
keyword[finally] :
identifier[state] . identifier[recv_buf] = literal[string] | def frameify(self, state, data):
"""Yield the data as a single frame."""
try:
yield (state.recv_buf + data) # depends on [control=['try'], data=[]]
except FrameSwitch:
pass # depends on [control=['except'], data=[]]
finally:
state.recv_buf = '' |
def compose(self, *things):
'''
compose applies multiple fsts onto a base implementation.
Pass the base implementation as last parameter.
fsts are merged from RIGHT TO LEFT (like function application)
e.g.:
class MyFST(object):
#place introductions and refinements here
introduce_foo = 'bar'
compose(MyFST(), MyClass)
'''
if not len(things):
raise CompositionError('nothing to compose')
if len(things) == 1:
# composing one element is simple
return things[0]
else:
# recurse after applying last role to object
return self.compose(*(
list(things[:-2]) # all but the last two
# plus the composition of the last two
+ [self._compose_pair(things[-2], things[-1])]
)) | def function[compose, parameter[self]]:
constant[
compose applies multiple fsts onto a base implementation.
Pass the base implementation as last parameter.
fsts are merged from RIGHT TO LEFT (like function application)
e.g.:
class MyFST(object):
#place introductions and refinements here
introduce_foo = 'bar'
compose(MyFST(), MyClass)
]
if <ast.UnaryOp object at 0x7da1b184af80> begin[:]
<ast.Raise object at 0x7da1b1848be0>
if compare[call[name[len], parameter[name[things]]] equal[==] constant[1]] begin[:]
return[call[name[things]][constant[0]]] | keyword[def] identifier[compose] ( identifier[self] ,* identifier[things] ):
literal[string]
keyword[if] keyword[not] identifier[len] ( identifier[things] ):
keyword[raise] identifier[CompositionError] ( literal[string] )
keyword[if] identifier[len] ( identifier[things] )== literal[int] :
keyword[return] identifier[things] [ literal[int] ]
keyword[else] :
keyword[return] identifier[self] . identifier[compose] (*(
identifier[list] ( identifier[things] [:- literal[int] ])
+[ identifier[self] . identifier[_compose_pair] ( identifier[things] [- literal[int] ], identifier[things] [- literal[int] ])]
)) | def compose(self, *things):
"""
compose applies multiple fsts onto a base implementation.
Pass the base implementation as last parameter.
fsts are merged from RIGHT TO LEFT (like function application)
e.g.:
class MyFST(object):
#place introductions and refinements here
introduce_foo = 'bar'
compose(MyFST(), MyClass)
"""
if not len(things):
raise CompositionError('nothing to compose') # depends on [control=['if'], data=[]]
if len(things) == 1:
# composing one element is simple
return things[0] # depends on [control=['if'], data=[]]
else:
# recurse after applying last role to object
# all but the last two
# plus the composition of the last two
return self.compose(*list(things[:-2]) + [self._compose_pair(things[-2], things[-1])]) |
def permission_request_approve_link(context, perm):
"""
Renders a html link to the approve view of the given permission request.
Returns no content if the request-user has no permission to delete foreign
permissions.
"""
user = context['request'].user
if user.is_authenticated():
if user.has_perm('authority.approve_permission_requests'):
return base_link(context, perm, 'authority-approve-permission-request')
return {'url': None} | def function[permission_request_approve_link, parameter[context, perm]]:
constant[
Renders a html link to the approve view of the given permission request.
Returns no content if the request-user has no permission to delete foreign
permissions.
]
variable[user] assign[=] call[name[context]][constant[request]].user
if call[name[user].is_authenticated, parameter[]] begin[:]
if call[name[user].has_perm, parameter[constant[authority.approve_permission_requests]]] begin[:]
return[call[name[base_link], parameter[name[context], name[perm], constant[authority-approve-permission-request]]]]
return[dictionary[[<ast.Constant object at 0x7da1b045c910>], [<ast.Constant object at 0x7da1b045f580>]]] | keyword[def] identifier[permission_request_approve_link] ( identifier[context] , identifier[perm] ):
literal[string]
identifier[user] = identifier[context] [ literal[string] ]. identifier[user]
keyword[if] identifier[user] . identifier[is_authenticated] ():
keyword[if] identifier[user] . identifier[has_perm] ( literal[string] ):
keyword[return] identifier[base_link] ( identifier[context] , identifier[perm] , literal[string] )
keyword[return] { literal[string] : keyword[None] } | def permission_request_approve_link(context, perm):
"""
Renders a html link to the approve view of the given permission request.
Returns no content if the request-user has no permission to delete foreign
permissions.
"""
user = context['request'].user
if user.is_authenticated():
if user.has_perm('authority.approve_permission_requests'):
return base_link(context, perm, 'authority-approve-permission-request') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return {'url': None} |
def open_json(file_name):
"""
returns json contents as string
"""
with open(file_name, "r") as json_data:
data = json.load(json_data)
return data | def function[open_json, parameter[file_name]]:
constant[
returns json contents as string
]
with call[name[open], parameter[name[file_name], constant[r]]] begin[:]
variable[data] assign[=] call[name[json].load, parameter[name[json_data]]]
return[name[data]] | keyword[def] identifier[open_json] ( identifier[file_name] ):
literal[string]
keyword[with] identifier[open] ( identifier[file_name] , literal[string] ) keyword[as] identifier[json_data] :
identifier[data] = identifier[json] . identifier[load] ( identifier[json_data] )
keyword[return] identifier[data] | def open_json(file_name):
"""
returns json contents as string
"""
with open(file_name, 'r') as json_data:
data = json.load(json_data)
return data # depends on [control=['with'], data=['json_data']] |
def data_contains_key_builder(key: str) -> NodePredicate: # noqa: D202
"""Build a filter that passes only on nodes that have the given key in their data dictionary.
:param key: A key for the node's data dictionary
"""
def data_contains_key(_: BELGraph, node: BaseEntity) -> bool:
"""Pass only for a node that contains the enclosed key in its data dictionary.
:return: If the node contains the enclosed key in its data dictionary
"""
return key in node
return data_contains_key | def function[data_contains_key_builder, parameter[key]]:
constant[Build a filter that passes only on nodes that have the given key in their data dictionary.
:param key: A key for the node's data dictionary
]
def function[data_contains_key, parameter[_, node]]:
constant[Pass only for a node that contains the enclosed key in its data dictionary.
:return: If the node contains the enclosed key in its data dictionary
]
return[compare[name[key] in name[node]]]
return[name[data_contains_key]] | keyword[def] identifier[data_contains_key_builder] ( identifier[key] : identifier[str] )-> identifier[NodePredicate] :
literal[string]
keyword[def] identifier[data_contains_key] ( identifier[_] : identifier[BELGraph] , identifier[node] : identifier[BaseEntity] )-> identifier[bool] :
literal[string]
keyword[return] identifier[key] keyword[in] identifier[node]
keyword[return] identifier[data_contains_key] | def data_contains_key_builder(key: str) -> NodePredicate: # noqa: D202
"Build a filter that passes only on nodes that have the given key in their data dictionary.\n\n :param key: A key for the node's data dictionary\n "
def data_contains_key(_: BELGraph, node: BaseEntity) -> bool:
"""Pass only for a node that contains the enclosed key in its data dictionary.
:return: If the node contains the enclosed key in its data dictionary
"""
return key in node
return data_contains_key |
def to_surface(self, image, alpha=1.0):
"""
Converts a :py:mod:`PIL.Image` into a :class:`pygame.Surface`,
transforming it according to the ``transform`` and ``scale``
constructor arguments.
"""
assert(0.0 <= alpha <= 1.0)
if alpha < 1.0:
im = image.convert("RGBA")
black = Image.new(im.mode, im.size, "black")
im = Image.blend(black, im, alpha)
else:
im = image.convert("RGB")
mode = im.mode
size = im.size
data = im.tobytes()
del im
surface = self._pygame.image.fromstring(data, size, mode)
return self._transform(surface) | def function[to_surface, parameter[self, image, alpha]]:
constant[
Converts a :py:mod:`PIL.Image` into a :class:`pygame.Surface`,
transforming it according to the ``transform`` and ``scale``
constructor arguments.
]
assert[compare[constant[0.0] less_or_equal[<=] name[alpha]]]
if compare[name[alpha] less[<] constant[1.0]] begin[:]
variable[im] assign[=] call[name[image].convert, parameter[constant[RGBA]]]
variable[black] assign[=] call[name[Image].new, parameter[name[im].mode, name[im].size, constant[black]]]
variable[im] assign[=] call[name[Image].blend, parameter[name[black], name[im], name[alpha]]]
variable[mode] assign[=] name[im].mode
variable[size] assign[=] name[im].size
variable[data] assign[=] call[name[im].tobytes, parameter[]]
<ast.Delete object at 0x7da18eb55c60>
variable[surface] assign[=] call[name[self]._pygame.image.fromstring, parameter[name[data], name[size], name[mode]]]
return[call[name[self]._transform, parameter[name[surface]]]] | keyword[def] identifier[to_surface] ( identifier[self] , identifier[image] , identifier[alpha] = literal[int] ):
literal[string]
keyword[assert] ( literal[int] <= identifier[alpha] <= literal[int] )
keyword[if] identifier[alpha] < literal[int] :
identifier[im] = identifier[image] . identifier[convert] ( literal[string] )
identifier[black] = identifier[Image] . identifier[new] ( identifier[im] . identifier[mode] , identifier[im] . identifier[size] , literal[string] )
identifier[im] = identifier[Image] . identifier[blend] ( identifier[black] , identifier[im] , identifier[alpha] )
keyword[else] :
identifier[im] = identifier[image] . identifier[convert] ( literal[string] )
identifier[mode] = identifier[im] . identifier[mode]
identifier[size] = identifier[im] . identifier[size]
identifier[data] = identifier[im] . identifier[tobytes] ()
keyword[del] identifier[im]
identifier[surface] = identifier[self] . identifier[_pygame] . identifier[image] . identifier[fromstring] ( identifier[data] , identifier[size] , identifier[mode] )
keyword[return] identifier[self] . identifier[_transform] ( identifier[surface] ) | def to_surface(self, image, alpha=1.0):
"""
Converts a :py:mod:`PIL.Image` into a :class:`pygame.Surface`,
transforming it according to the ``transform`` and ``scale``
constructor arguments.
"""
assert 0.0 <= alpha <= 1.0
if alpha < 1.0:
im = image.convert('RGBA')
black = Image.new(im.mode, im.size, 'black')
im = Image.blend(black, im, alpha) # depends on [control=['if'], data=['alpha']]
else:
im = image.convert('RGB')
mode = im.mode
size = im.size
data = im.tobytes()
del im
surface = self._pygame.image.fromstring(data, size, mode)
return self._transform(surface) |
def _get_digest(self, info):
"""
Get a digest from a dictionary by looking at keys of the form
'algo_digest'.
Returns a 2-tuple (algo, digest) if found, else None. Currently
looks only for SHA256, then MD5.
"""
result = None
for algo in ('sha256', 'md5'):
key = '%s_digest' % algo
if key in info:
result = (algo, info[key])
break
return result | def function[_get_digest, parameter[self, info]]:
constant[
Get a digest from a dictionary by looking at keys of the form
'algo_digest'.
Returns a 2-tuple (algo, digest) if found, else None. Currently
looks only for SHA256, then MD5.
]
variable[result] assign[=] constant[None]
for taget[name[algo]] in starred[tuple[[<ast.Constant object at 0x7da18dc07cd0>, <ast.Constant object at 0x7da18dc04730>]]] begin[:]
variable[key] assign[=] binary_operation[constant[%s_digest] <ast.Mod object at 0x7da2590d6920> name[algo]]
if compare[name[key] in name[info]] begin[:]
variable[result] assign[=] tuple[[<ast.Name object at 0x7da18dc049a0>, <ast.Subscript object at 0x7da18dc05a80>]]
break
return[name[result]] | keyword[def] identifier[_get_digest] ( identifier[self] , identifier[info] ):
literal[string]
identifier[result] = keyword[None]
keyword[for] identifier[algo] keyword[in] ( literal[string] , literal[string] ):
identifier[key] = literal[string] % identifier[algo]
keyword[if] identifier[key] keyword[in] identifier[info] :
identifier[result] =( identifier[algo] , identifier[info] [ identifier[key] ])
keyword[break]
keyword[return] identifier[result] | def _get_digest(self, info):
"""
Get a digest from a dictionary by looking at keys of the form
'algo_digest'.
Returns a 2-tuple (algo, digest) if found, else None. Currently
looks only for SHA256, then MD5.
"""
result = None
for algo in ('sha256', 'md5'):
key = '%s_digest' % algo
if key in info:
result = (algo, info[key])
break # depends on [control=['if'], data=['key', 'info']] # depends on [control=['for'], data=['algo']]
return result |
def get_branching_nodes(self):
"""
Returns all nodes that has an out degree >= 2
"""
nodes = set()
for n in self.graph.nodes():
if self.graph.out_degree(n) >= 2:
nodes.add(n)
return nodes | def function[get_branching_nodes, parameter[self]]:
constant[
Returns all nodes that has an out degree >= 2
]
variable[nodes] assign[=] call[name[set], parameter[]]
for taget[name[n]] in starred[call[name[self].graph.nodes, parameter[]]] begin[:]
if compare[call[name[self].graph.out_degree, parameter[name[n]]] greater_or_equal[>=] constant[2]] begin[:]
call[name[nodes].add, parameter[name[n]]]
return[name[nodes]] | keyword[def] identifier[get_branching_nodes] ( identifier[self] ):
literal[string]
identifier[nodes] = identifier[set] ()
keyword[for] identifier[n] keyword[in] identifier[self] . identifier[graph] . identifier[nodes] ():
keyword[if] identifier[self] . identifier[graph] . identifier[out_degree] ( identifier[n] )>= literal[int] :
identifier[nodes] . identifier[add] ( identifier[n] )
keyword[return] identifier[nodes] | def get_branching_nodes(self):
"""
Returns all nodes that has an out degree >= 2
"""
nodes = set()
for n in self.graph.nodes():
if self.graph.out_degree(n) >= 2:
nodes.add(n) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['n']]
return nodes |
def link_android(self, path, pkg):
""" Link's the android project to this library.
1. Includes this project's directory in the app's
android/settings.gradle
It adds:
include ':<project-name>'
project(':<project-name>').projectDir = new File(
rootProject.projectDir, '../packages/<project-name>/android')
2. Add's this project as a dependency to the android/app/build.gradle
It adds:
compile project(':<project-name>')
to the dependencies.
3. If preset, adds the import and package statement
to the android/app/src/main/java/<bundle/id>/MainApplication.java
"""
bundle_id = self.ctx['bundle_id']
pkg_root = join(path, pkg)
#: Check if it's already linked
with open(join('android', 'settings.gradle')) as f:
settings_gradle = f.read()
with open(join('android', 'app', 'build.gradle')) as f:
build_gradle = f.read()
#: Find the MainApplication.java
main_app_java_path = join('android', 'app', 'src', 'main', 'java',
join(*bundle_id.split(".")),
'MainApplication.java')
with open(main_app_java_path) as f:
main_application_java = f.read()
try:
#: Now link all the EnamlPackages we can find in the new "package"
new_packages = Link.find_packages(join(path, pkg))
if not new_packages:
print("[Android] {} No EnamlPackages found to link!".format(
pkg))
return
#: Link settings.gradle
if not Link.is_settings_linked(settings_gradle, pkg):
#: Add two statements
new_settings = settings_gradle.split("\n")
new_settings.append("") # Blank line
new_settings.append("include ':{name}'".format(name=pkg))
new_settings.append("project(':{name}').projectDir = "
"new File(rootProject.projectDir, "
"'../{path}/android/{name}')"
.format(name=pkg, path=self.package_dir))
with open(join('android', 'settings.gradle'), 'w') as f:
f.write("\n".join(new_settings))
print("[Android] {} linked in settings.gradle!".format(pkg))
else:
print("[Android] {} was already linked in "
"settings.gradle!".format(pkg))
#: Link app/build.gradle
if not Link.is_build_linked(build_gradle, pkg):
#: Add two statements
new_build = build_gradle.split("\n")
#: Find correct line number
found = False
for i, line in enumerate(new_build):
if re.match(r"dependencies\s*{", line):
found = True
continue
if found and "}" in line:
#: Hackish way to find line of the closing bracket after
#: the dependencies { block is found
break
if not found:
raise ValueError("Unable to find dependencies in "
"{pkg}/app/build.gradle!".format(pkg=pkg))
#: Insert before the closing bracket
new_build.insert(i, " api project(':{name}')".format(
name=pkg))
with open(join('android', 'app', 'build.gradle'), 'w') as f:
f.write("\n".join(new_build))
print("[Android] {} linked in app/build.gradle!".format(pkg))
else:
print("[Android] {} was already linked in "
"app/build.gradle!".format(pkg))
new_app_java = []
for package in new_packages:
#: Add our import statement
javacls = os.path.splitext(package)[0].replace("/", ".")
if not Link.is_app_linked(main_application_java, pkg, javacls):
#: Reuse previous if avialable
new_app_java = (new_app_java or
main_application_java.split("\n"))
#: Find last import statement
j = 0
for i, line in enumerate(new_app_java):
if fnmatch.fnmatch(line, "import *;"):
j = i
new_app_java.insert(j+1, "import {};".format(javacls))
#: Add the package statement
j = 0
for i, line in enumerate(new_app_java):
if fnmatch.fnmatch(line.strip(), "new *Package()"):
j = i
if j == 0:
raise ValueError("Could not find the correct spot to "
"add package {}".format(javacls))
else:
#: Get indent and add to previous line
#: Add comma to previous line
new_app_java[j] = new_app_java[j]+ ","
#: Insert new line
new_app_java.insert(j+1, " new {}()"
.format(javacls.split(".")[-1]))
else:
print("[Android] {} was already linked in {}!".format(
pkg, main_app_java_path))
if new_app_java:
with open(main_app_java_path, 'w') as f:
f.write("\n".join(new_app_java))
print(Colors.GREEN+"[Android] {} linked successfully!".format(
pkg)+Colors.RESET)
except Exception as e:
print(Colors.GREEN+"[Android] {} Failed to link. "
"Reverting due to error: "
"{}".format(pkg, e)+Colors.RESET)
#: Undo any changes
with open(join('android', 'settings.gradle'), 'w') as f:
f.write(settings_gradle)
with open(join('android', 'app', 'build.gradle'), 'w') as f:
f.write(build_gradle)
with open(main_app_java_path, 'w') as f:
f.write(main_application_java)
#: Now blow up
raise | def function[link_android, parameter[self, path, pkg]]:
constant[ Link's the android project to this library.
1. Includes this project's directory in the app's
android/settings.gradle
It adds:
include ':<project-name>'
project(':<project-name>').projectDir = new File(
rootProject.projectDir, '../packages/<project-name>/android')
2. Add's this project as a dependency to the android/app/build.gradle
It adds:
compile project(':<project-name>')
to the dependencies.
3. If preset, adds the import and package statement
to the android/app/src/main/java/<bundle/id>/MainApplication.java
]
variable[bundle_id] assign[=] call[name[self].ctx][constant[bundle_id]]
variable[pkg_root] assign[=] call[name[join], parameter[name[path], name[pkg]]]
with call[name[open], parameter[call[name[join], parameter[constant[android], constant[settings.gradle]]]]] begin[:]
variable[settings_gradle] assign[=] call[name[f].read, parameter[]]
with call[name[open], parameter[call[name[join], parameter[constant[android], constant[app], constant[build.gradle]]]]] begin[:]
variable[build_gradle] assign[=] call[name[f].read, parameter[]]
variable[main_app_java_path] assign[=] call[name[join], parameter[constant[android], constant[app], constant[src], constant[main], constant[java], call[name[join], parameter[<ast.Starred object at 0x7da20c6c7610>]], constant[MainApplication.java]]]
with call[name[open], parameter[name[main_app_java_path]]] begin[:]
variable[main_application_java] assign[=] call[name[f].read, parameter[]]
<ast.Try object at 0x7da20c6c4160> | keyword[def] identifier[link_android] ( identifier[self] , identifier[path] , identifier[pkg] ):
literal[string]
identifier[bundle_id] = identifier[self] . identifier[ctx] [ literal[string] ]
identifier[pkg_root] = identifier[join] ( identifier[path] , identifier[pkg] )
keyword[with] identifier[open] ( identifier[join] ( literal[string] , literal[string] )) keyword[as] identifier[f] :
identifier[settings_gradle] = identifier[f] . identifier[read] ()
keyword[with] identifier[open] ( identifier[join] ( literal[string] , literal[string] , literal[string] )) keyword[as] identifier[f] :
identifier[build_gradle] = identifier[f] . identifier[read] ()
identifier[main_app_java_path] = identifier[join] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
identifier[join] (* identifier[bundle_id] . identifier[split] ( literal[string] )),
literal[string] )
keyword[with] identifier[open] ( identifier[main_app_java_path] ) keyword[as] identifier[f] :
identifier[main_application_java] = identifier[f] . identifier[read] ()
keyword[try] :
identifier[new_packages] = identifier[Link] . identifier[find_packages] ( identifier[join] ( identifier[path] , identifier[pkg] ))
keyword[if] keyword[not] identifier[new_packages] :
identifier[print] ( literal[string] . identifier[format] (
identifier[pkg] ))
keyword[return]
keyword[if] keyword[not] identifier[Link] . identifier[is_settings_linked] ( identifier[settings_gradle] , identifier[pkg] ):
identifier[new_settings] = identifier[settings_gradle] . identifier[split] ( literal[string] )
identifier[new_settings] . identifier[append] ( literal[string] )
identifier[new_settings] . identifier[append] ( literal[string] . identifier[format] ( identifier[name] = identifier[pkg] ))
identifier[new_settings] . identifier[append] ( literal[string]
literal[string]
literal[string]
. identifier[format] ( identifier[name] = identifier[pkg] , identifier[path] = identifier[self] . identifier[package_dir] ))
keyword[with] identifier[open] ( identifier[join] ( literal[string] , literal[string] ), literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( literal[string] . identifier[join] ( identifier[new_settings] ))
identifier[print] ( literal[string] . identifier[format] ( identifier[pkg] ))
keyword[else] :
identifier[print] ( literal[string]
literal[string] . identifier[format] ( identifier[pkg] ))
keyword[if] keyword[not] identifier[Link] . identifier[is_build_linked] ( identifier[build_gradle] , identifier[pkg] ):
identifier[new_build] = identifier[build_gradle] . identifier[split] ( literal[string] )
identifier[found] = keyword[False]
keyword[for] identifier[i] , identifier[line] keyword[in] identifier[enumerate] ( identifier[new_build] ):
keyword[if] identifier[re] . identifier[match] ( literal[string] , identifier[line] ):
identifier[found] = keyword[True]
keyword[continue]
keyword[if] identifier[found] keyword[and] literal[string] keyword[in] identifier[line] :
keyword[break]
keyword[if] keyword[not] identifier[found] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[pkg] = identifier[pkg] ))
identifier[new_build] . identifier[insert] ( identifier[i] , literal[string] . identifier[format] (
identifier[name] = identifier[pkg] ))
keyword[with] identifier[open] ( identifier[join] ( literal[string] , literal[string] , literal[string] ), literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( literal[string] . identifier[join] ( identifier[new_build] ))
identifier[print] ( literal[string] . identifier[format] ( identifier[pkg] ))
keyword[else] :
identifier[print] ( literal[string]
literal[string] . identifier[format] ( identifier[pkg] ))
identifier[new_app_java] =[]
keyword[for] identifier[package] keyword[in] identifier[new_packages] :
identifier[javacls] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[package] )[ literal[int] ]. identifier[replace] ( literal[string] , literal[string] )
keyword[if] keyword[not] identifier[Link] . identifier[is_app_linked] ( identifier[main_application_java] , identifier[pkg] , identifier[javacls] ):
identifier[new_app_java] =( identifier[new_app_java] keyword[or]
identifier[main_application_java] . identifier[split] ( literal[string] ))
identifier[j] = literal[int]
keyword[for] identifier[i] , identifier[line] keyword[in] identifier[enumerate] ( identifier[new_app_java] ):
keyword[if] identifier[fnmatch] . identifier[fnmatch] ( identifier[line] , literal[string] ):
identifier[j] = identifier[i]
identifier[new_app_java] . identifier[insert] ( identifier[j] + literal[int] , literal[string] . identifier[format] ( identifier[javacls] ))
identifier[j] = literal[int]
keyword[for] identifier[i] , identifier[line] keyword[in] identifier[enumerate] ( identifier[new_app_java] ):
keyword[if] identifier[fnmatch] . identifier[fnmatch] ( identifier[line] . identifier[strip] (), literal[string] ):
identifier[j] = identifier[i]
keyword[if] identifier[j] == literal[int] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[javacls] ))
keyword[else] :
identifier[new_app_java] [ identifier[j] ]= identifier[new_app_java] [ identifier[j] ]+ literal[string]
identifier[new_app_java] . identifier[insert] ( identifier[j] + literal[int] , literal[string]
. identifier[format] ( identifier[javacls] . identifier[split] ( literal[string] )[- literal[int] ]))
keyword[else] :
identifier[print] ( literal[string] . identifier[format] (
identifier[pkg] , identifier[main_app_java_path] ))
keyword[if] identifier[new_app_java] :
keyword[with] identifier[open] ( identifier[main_app_java_path] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( literal[string] . identifier[join] ( identifier[new_app_java] ))
identifier[print] ( identifier[Colors] . identifier[GREEN] + literal[string] . identifier[format] (
identifier[pkg] )+ identifier[Colors] . identifier[RESET] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[print] ( identifier[Colors] . identifier[GREEN] + literal[string]
literal[string]
literal[string] . identifier[format] ( identifier[pkg] , identifier[e] )+ identifier[Colors] . identifier[RESET] )
keyword[with] identifier[open] ( identifier[join] ( literal[string] , literal[string] ), literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[settings_gradle] )
keyword[with] identifier[open] ( identifier[join] ( literal[string] , literal[string] , literal[string] ), literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[build_gradle] )
keyword[with] identifier[open] ( identifier[main_app_java_path] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[main_application_java] )
keyword[raise] | def link_android(self, path, pkg):
""" Link's the android project to this library.
1. Includes this project's directory in the app's
android/settings.gradle
It adds:
include ':<project-name>'
project(':<project-name>').projectDir = new File(
rootProject.projectDir, '../packages/<project-name>/android')
2. Add's this project as a dependency to the android/app/build.gradle
It adds:
compile project(':<project-name>')
to the dependencies.
3. If preset, adds the import and package statement
to the android/app/src/main/java/<bundle/id>/MainApplication.java
"""
bundle_id = self.ctx['bundle_id']
pkg_root = join(path, pkg)
#: Check if it's already linked
with open(join('android', 'settings.gradle')) as f:
settings_gradle = f.read() # depends on [control=['with'], data=['f']]
with open(join('android', 'app', 'build.gradle')) as f:
build_gradle = f.read() # depends on [control=['with'], data=['f']]
#: Find the MainApplication.java
main_app_java_path = join('android', 'app', 'src', 'main', 'java', join(*bundle_id.split('.')), 'MainApplication.java')
with open(main_app_java_path) as f:
main_application_java = f.read() # depends on [control=['with'], data=['f']]
try:
#: Now link all the EnamlPackages we can find in the new "package"
new_packages = Link.find_packages(join(path, pkg))
if not new_packages:
print('[Android] {} No EnamlPackages found to link!'.format(pkg))
return # depends on [control=['if'], data=[]]
#: Link settings.gradle
if not Link.is_settings_linked(settings_gradle, pkg):
#: Add two statements
new_settings = settings_gradle.split('\n')
new_settings.append('') # Blank line
new_settings.append("include ':{name}'".format(name=pkg))
new_settings.append("project(':{name}').projectDir = new File(rootProject.projectDir, '../{path}/android/{name}')".format(name=pkg, path=self.package_dir))
with open(join('android', 'settings.gradle'), 'w') as f:
f.write('\n'.join(new_settings)) # depends on [control=['with'], data=['f']]
print('[Android] {} linked in settings.gradle!'.format(pkg)) # depends on [control=['if'], data=[]]
else:
print('[Android] {} was already linked in settings.gradle!'.format(pkg))
#: Link app/build.gradle
if not Link.is_build_linked(build_gradle, pkg):
#: Add two statements
new_build = build_gradle.split('\n')
#: Find correct line number
found = False
for (i, line) in enumerate(new_build):
if re.match('dependencies\\s*{', line):
found = True
continue # depends on [control=['if'], data=[]]
if found and '}' in line:
#: Hackish way to find line of the closing bracket after
#: the dependencies { block is found
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if not found:
raise ValueError('Unable to find dependencies in {pkg}/app/build.gradle!'.format(pkg=pkg)) # depends on [control=['if'], data=[]]
#: Insert before the closing bracket
new_build.insert(i, " api project(':{name}')".format(name=pkg))
with open(join('android', 'app', 'build.gradle'), 'w') as f:
f.write('\n'.join(new_build)) # depends on [control=['with'], data=['f']]
print('[Android] {} linked in app/build.gradle!'.format(pkg)) # depends on [control=['if'], data=[]]
else:
print('[Android] {} was already linked in app/build.gradle!'.format(pkg))
new_app_java = []
for package in new_packages:
#: Add our import statement
javacls = os.path.splitext(package)[0].replace('/', '.')
if not Link.is_app_linked(main_application_java, pkg, javacls):
#: Reuse previous if avialable
new_app_java = new_app_java or main_application_java.split('\n')
#: Find last import statement
j = 0
for (i, line) in enumerate(new_app_java):
if fnmatch.fnmatch(line, 'import *;'):
j = i # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
new_app_java.insert(j + 1, 'import {};'.format(javacls))
#: Add the package statement
j = 0
for (i, line) in enumerate(new_app_java):
if fnmatch.fnmatch(line.strip(), 'new *Package()'):
j = i # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if j == 0:
raise ValueError('Could not find the correct spot to add package {}'.format(javacls)) # depends on [control=['if'], data=[]]
else:
#: Get indent and add to previous line
#: Add comma to previous line
new_app_java[j] = new_app_java[j] + ','
#: Insert new line
new_app_java.insert(j + 1, ' new {}()'.format(javacls.split('.')[-1])) # depends on [control=['if'], data=[]]
else:
print('[Android] {} was already linked in {}!'.format(pkg, main_app_java_path)) # depends on [control=['for'], data=['package']]
if new_app_java:
with open(main_app_java_path, 'w') as f:
f.write('\n'.join(new_app_java)) # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]]
print(Colors.GREEN + '[Android] {} linked successfully!'.format(pkg) + Colors.RESET) # depends on [control=['try'], data=[]]
except Exception as e:
print(Colors.GREEN + '[Android] {} Failed to link. Reverting due to error: {}'.format(pkg, e) + Colors.RESET)
#: Undo any changes
with open(join('android', 'settings.gradle'), 'w') as f:
f.write(settings_gradle) # depends on [control=['with'], data=['f']]
with open(join('android', 'app', 'build.gradle'), 'w') as f:
f.write(build_gradle) # depends on [control=['with'], data=['f']]
with open(main_app_java_path, 'w') as f:
f.write(main_application_java) # depends on [control=['with'], data=['f']]
#: Now blow up
raise # depends on [control=['except'], data=['e']] |
def set_vnic_connectivity_status(nic_spec, to_connect):
"""
sets the device spec as connected or disconnected
:param nic_spec: the specification
:param to_connect: bool
"""
nic_spec.device.connectable = vim.vm.device.VirtualDevice.ConnectInfo()
nic_spec.device.connectable.connected = to_connect
nic_spec.device.connectable.startConnected = to_connect | def function[set_vnic_connectivity_status, parameter[nic_spec, to_connect]]:
constant[
sets the device spec as connected or disconnected
:param nic_spec: the specification
:param to_connect: bool
]
name[nic_spec].device.connectable assign[=] call[name[vim].vm.device.VirtualDevice.ConnectInfo, parameter[]]
name[nic_spec].device.connectable.connected assign[=] name[to_connect]
name[nic_spec].device.connectable.startConnected assign[=] name[to_connect] | keyword[def] identifier[set_vnic_connectivity_status] ( identifier[nic_spec] , identifier[to_connect] ):
literal[string]
identifier[nic_spec] . identifier[device] . identifier[connectable] = identifier[vim] . identifier[vm] . identifier[device] . identifier[VirtualDevice] . identifier[ConnectInfo] ()
identifier[nic_spec] . identifier[device] . identifier[connectable] . identifier[connected] = identifier[to_connect]
identifier[nic_spec] . identifier[device] . identifier[connectable] . identifier[startConnected] = identifier[to_connect] | def set_vnic_connectivity_status(nic_spec, to_connect):
"""
sets the device spec as connected or disconnected
:param nic_spec: the specification
:param to_connect: bool
"""
nic_spec.device.connectable = vim.vm.device.VirtualDevice.ConnectInfo()
nic_spec.device.connectable.connected = to_connect
nic_spec.device.connectable.startConnected = to_connect |
def get_all_tags(filters=None, region=None, key=None, keyid=None, profile=None):
'''
Describe all tags matching the filter criteria, or all tags in the account otherwise.
.. versionadded:: 2018.3.0
filters
(dict) - Additional constraints on which volumes to return. Note that valid filters vary
extensively depending on the resource type. When in doubt, search first without a filter
and then use the returned data to help fine-tune your search. You can generally garner the
resource type from its ID (e.g. `vol-XXXXX` is a volume, `i-XXXXX` is an instance, etc.
CLI Example:
.. code-block:: bash
salt-call boto_ec2.get_all_tags '{"tag:Name": myInstanceNameTag, resource-type: instance}'
'''
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
try:
ret = conn.get_all_tags(filters)
tags = {}
for t in ret:
if t.res_id not in tags:
tags[t.res_id] = {}
tags[t.res_id][t.name] = t.value
return tags
except boto.exception.BotoServerError as e:
log.error(e)
return {} | def function[get_all_tags, parameter[filters, region, key, keyid, profile]]:
constant[
Describe all tags matching the filter criteria, or all tags in the account otherwise.
.. versionadded:: 2018.3.0
filters
(dict) - Additional constraints on which volumes to return. Note that valid filters vary
extensively depending on the resource type. When in doubt, search first without a filter
and then use the returned data to help fine-tune your search. You can generally garner the
resource type from its ID (e.g. `vol-XXXXX` is a volume, `i-XXXXX` is an instance, etc.
CLI Example:
.. code-block:: bash
salt-call boto_ec2.get_all_tags '{"tag:Name": myInstanceNameTag, resource-type: instance}'
]
variable[conn] assign[=] call[name[_get_conn], parameter[]]
<ast.Try object at 0x7da1b2007220> | keyword[def] identifier[get_all_tags] ( identifier[filters] = keyword[None] , identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] ):
literal[string]
identifier[conn] = identifier[_get_conn] ( identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] )
keyword[try] :
identifier[ret] = identifier[conn] . identifier[get_all_tags] ( identifier[filters] )
identifier[tags] ={}
keyword[for] identifier[t] keyword[in] identifier[ret] :
keyword[if] identifier[t] . identifier[res_id] keyword[not] keyword[in] identifier[tags] :
identifier[tags] [ identifier[t] . identifier[res_id] ]={}
identifier[tags] [ identifier[t] . identifier[res_id] ][ identifier[t] . identifier[name] ]= identifier[t] . identifier[value]
keyword[return] identifier[tags]
keyword[except] identifier[boto] . identifier[exception] . identifier[BotoServerError] keyword[as] identifier[e] :
identifier[log] . identifier[error] ( identifier[e] )
keyword[return] {} | def get_all_tags(filters=None, region=None, key=None, keyid=None, profile=None):
"""
Describe all tags matching the filter criteria, or all tags in the account otherwise.
.. versionadded:: 2018.3.0
filters
(dict) - Additional constraints on which volumes to return. Note that valid filters vary
extensively depending on the resource type. When in doubt, search first without a filter
and then use the returned data to help fine-tune your search. You can generally garner the
resource type from its ID (e.g. `vol-XXXXX` is a volume, `i-XXXXX` is an instance, etc.
CLI Example:
.. code-block:: bash
salt-call boto_ec2.get_all_tags '{"tag:Name": myInstanceNameTag, resource-type: instance}'
"""
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
try:
ret = conn.get_all_tags(filters)
tags = {}
for t in ret:
if t.res_id not in tags:
tags[t.res_id] = {} # depends on [control=['if'], data=['tags']]
tags[t.res_id][t.name] = t.value # depends on [control=['for'], data=['t']]
return tags # depends on [control=['try'], data=[]]
except boto.exception.BotoServerError as e:
log.error(e)
return {} # depends on [control=['except'], data=['e']] |
def laea2cf(area):
"""Return the cf grid mapping for the laea projection."""
proj_dict = area.proj_dict
args = dict(latitude_of_projection_origin=proj_dict.get('lat_0'),
longitude_of_projection_origin=proj_dict.get('lon_0'),
grid_mapping_name='lambert_azimuthal_equal_area',
)
return args | def function[laea2cf, parameter[area]]:
constant[Return the cf grid mapping for the laea projection.]
variable[proj_dict] assign[=] name[area].proj_dict
variable[args] assign[=] call[name[dict], parameter[]]
return[name[args]] | keyword[def] identifier[laea2cf] ( identifier[area] ):
literal[string]
identifier[proj_dict] = identifier[area] . identifier[proj_dict]
identifier[args] = identifier[dict] ( identifier[latitude_of_projection_origin] = identifier[proj_dict] . identifier[get] ( literal[string] ),
identifier[longitude_of_projection_origin] = identifier[proj_dict] . identifier[get] ( literal[string] ),
identifier[grid_mapping_name] = literal[string] ,
)
keyword[return] identifier[args] | def laea2cf(area):
"""Return the cf grid mapping for the laea projection."""
proj_dict = area.proj_dict
args = dict(latitude_of_projection_origin=proj_dict.get('lat_0'), longitude_of_projection_origin=proj_dict.get('lon_0'), grid_mapping_name='lambert_azimuthal_equal_area')
return args |
def replace_stripe_gateway_by_id(cls, stripe_gateway_id, stripe_gateway, **kwargs):
"""Replace StripeGateway
Replace all attributes of StripeGateway
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.replace_stripe_gateway_by_id(stripe_gateway_id, stripe_gateway, async=True)
>>> result = thread.get()
:param async bool
:param str stripe_gateway_id: ID of stripeGateway to replace (required)
:param StripeGateway stripe_gateway: Attributes of stripeGateway to replace (required)
:return: StripeGateway
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._replace_stripe_gateway_by_id_with_http_info(stripe_gateway_id, stripe_gateway, **kwargs)
else:
(data) = cls._replace_stripe_gateway_by_id_with_http_info(stripe_gateway_id, stripe_gateway, **kwargs)
return data | def function[replace_stripe_gateway_by_id, parameter[cls, stripe_gateway_id, stripe_gateway]]:
constant[Replace StripeGateway
Replace all attributes of StripeGateway
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.replace_stripe_gateway_by_id(stripe_gateway_id, stripe_gateway, async=True)
>>> result = thread.get()
:param async bool
:param str stripe_gateway_id: ID of stripeGateway to replace (required)
:param StripeGateway stripe_gateway: Attributes of stripeGateway to replace (required)
:return: StripeGateway
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async]]] begin[:]
return[call[name[cls]._replace_stripe_gateway_by_id_with_http_info, parameter[name[stripe_gateway_id], name[stripe_gateway]]]] | keyword[def] identifier[replace_stripe_gateway_by_id] ( identifier[cls] , identifier[stripe_gateway_id] , identifier[stripe_gateway] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[cls] . identifier[_replace_stripe_gateway_by_id_with_http_info] ( identifier[stripe_gateway_id] , identifier[stripe_gateway] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[cls] . identifier[_replace_stripe_gateway_by_id_with_http_info] ( identifier[stripe_gateway_id] , identifier[stripe_gateway] ,** identifier[kwargs] )
keyword[return] identifier[data] | def replace_stripe_gateway_by_id(cls, stripe_gateway_id, stripe_gateway, **kwargs):
"""Replace StripeGateway
Replace all attributes of StripeGateway
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.replace_stripe_gateway_by_id(stripe_gateway_id, stripe_gateway, async=True)
>>> result = thread.get()
:param async bool
:param str stripe_gateway_id: ID of stripeGateway to replace (required)
:param StripeGateway stripe_gateway: Attributes of stripeGateway to replace (required)
:return: StripeGateway
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._replace_stripe_gateway_by_id_with_http_info(stripe_gateway_id, stripe_gateway, **kwargs) # depends on [control=['if'], data=[]]
else:
data = cls._replace_stripe_gateway_by_id_with_http_info(stripe_gateway_id, stripe_gateway, **kwargs)
return data |
def output(self, resource):
"""Wrap a resource (as a flask view function).
This is for cases where the resource does not directly return
a response object. Now everything should be a Response object.
:param resource: The resource as a flask view function
"""
@wraps(resource)
def wrapper(*args, **kwargs):
rv = resource(*args, **kwargs)
rv = self.responder(rv)
return rv
return wrapper | def function[output, parameter[self, resource]]:
constant[Wrap a resource (as a flask view function).
This is for cases where the resource does not directly return
a response object. Now everything should be a Response object.
:param resource: The resource as a flask view function
]
def function[wrapper, parameter[]]:
variable[rv] assign[=] call[name[resource], parameter[<ast.Starred object at 0x7da1b0a36a40>]]
variable[rv] assign[=] call[name[self].responder, parameter[name[rv]]]
return[name[rv]]
return[name[wrapper]] | keyword[def] identifier[output] ( identifier[self] , identifier[resource] ):
literal[string]
@ identifier[wraps] ( identifier[resource] )
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ):
identifier[rv] = identifier[resource] (* identifier[args] ,** identifier[kwargs] )
identifier[rv] = identifier[self] . identifier[responder] ( identifier[rv] )
keyword[return] identifier[rv]
keyword[return] identifier[wrapper] | def output(self, resource):
"""Wrap a resource (as a flask view function).
This is for cases where the resource does not directly return
a response object. Now everything should be a Response object.
:param resource: The resource as a flask view function
"""
@wraps(resource)
def wrapper(*args, **kwargs):
rv = resource(*args, **kwargs)
rv = self.responder(rv)
return rv
return wrapper |
def tofrequency(self, rf, v0, rfq):
"""Convert a Doppler type value (e.g. in radio mode) to a
frequency. The type of frequency (e.g. LSRK) and a rest frequency
(either as a frequency quantity (e.g. ``dm.constants('HI'))`` or
a frequency measure (e.g. ``dm.frequency('rest','5100MHz'))`` should
be specified.
:param rf: frequency reference code (see :meth:`frequency`)
:param v0: a doppler measure
:param rfq: frequency measure or quantity
Example::
dop = dm.doppler('radio',0.4)
freq = dm.tofrequency('lsrk', dop, dm.constants('HI'))
"""
if is_measure(rfq) and rfq['type'] == 'frequency':
rfq = dq.quantity(rfq['m0'])
elif isinstance(rfq, str):
rfq = dq.quantity(rfq)
if is_measure(v0) and v0['type'] == 'doppler' \
and dq.is_quantity(rfq) \
and rfq.conforms(dq.quantity('Hz')):
return self.doptofreq(v0, rf, rfq)
else:
raise TypeError('Illegal Doppler or rest frequency specified') | def function[tofrequency, parameter[self, rf, v0, rfq]]:
constant[Convert a Doppler type value (e.g. in radio mode) to a
frequency. The type of frequency (e.g. LSRK) and a rest frequency
(either as a frequency quantity (e.g. ``dm.constants('HI'))`` or
a frequency measure (e.g. ``dm.frequency('rest','5100MHz'))`` should
be specified.
:param rf: frequency reference code (see :meth:`frequency`)
:param v0: a doppler measure
:param rfq: frequency measure or quantity
Example::
dop = dm.doppler('radio',0.4)
freq = dm.tofrequency('lsrk', dop, dm.constants('HI'))
]
if <ast.BoolOp object at 0x7da18dc07cd0> begin[:]
variable[rfq] assign[=] call[name[dq].quantity, parameter[call[name[rfq]][constant[m0]]]]
if <ast.BoolOp object at 0x7da18dc05d20> begin[:]
return[call[name[self].doptofreq, parameter[name[v0], name[rf], name[rfq]]]] | keyword[def] identifier[tofrequency] ( identifier[self] , identifier[rf] , identifier[v0] , identifier[rfq] ):
literal[string]
keyword[if] identifier[is_measure] ( identifier[rfq] ) keyword[and] identifier[rfq] [ literal[string] ]== literal[string] :
identifier[rfq] = identifier[dq] . identifier[quantity] ( identifier[rfq] [ literal[string] ])
keyword[elif] identifier[isinstance] ( identifier[rfq] , identifier[str] ):
identifier[rfq] = identifier[dq] . identifier[quantity] ( identifier[rfq] )
keyword[if] identifier[is_measure] ( identifier[v0] ) keyword[and] identifier[v0] [ literal[string] ]== literal[string] keyword[and] identifier[dq] . identifier[is_quantity] ( identifier[rfq] ) keyword[and] identifier[rfq] . identifier[conforms] ( identifier[dq] . identifier[quantity] ( literal[string] )):
keyword[return] identifier[self] . identifier[doptofreq] ( identifier[v0] , identifier[rf] , identifier[rfq] )
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] ) | def tofrequency(self, rf, v0, rfq):
"""Convert a Doppler type value (e.g. in radio mode) to a
frequency. The type of frequency (e.g. LSRK) and a rest frequency
(either as a frequency quantity (e.g. ``dm.constants('HI'))`` or
a frequency measure (e.g. ``dm.frequency('rest','5100MHz'))`` should
be specified.
:param rf: frequency reference code (see :meth:`frequency`)
:param v0: a doppler measure
:param rfq: frequency measure or quantity
Example::
dop = dm.doppler('radio',0.4)
freq = dm.tofrequency('lsrk', dop, dm.constants('HI'))
"""
if is_measure(rfq) and rfq['type'] == 'frequency':
rfq = dq.quantity(rfq['m0']) # depends on [control=['if'], data=[]]
elif isinstance(rfq, str):
rfq = dq.quantity(rfq) # depends on [control=['if'], data=[]]
if is_measure(v0) and v0['type'] == 'doppler' and dq.is_quantity(rfq) and rfq.conforms(dq.quantity('Hz')):
return self.doptofreq(v0, rf, rfq) # depends on [control=['if'], data=[]]
else:
raise TypeError('Illegal Doppler or rest frequency specified') |
def rm_incomplete_des_asc(des_mask, asc_mask):
'''Remove descents-ascents that have no corresponding ascent-descent
Args
----
des_mask: ndarray
Boolean mask of descents in the depth data
asc_mask: ndarray
Boolean mask of ascents in the depth data
Returns
-------
des_mask: ndarray
Boolean mask of descents with erroneous regions removed
asc_mask: ndarray
Boolean mask of ascents with erroneous regions removed
'''
from . import utils
# Get start/stop indices for descents and ascents
des_start, des_stop = utils.contiguous_regions(des_mask)
asc_start, asc_stop = utils.contiguous_regions(asc_mask)
des_mask = utils.rm_regions(des_mask, asc_mask, des_start, des_stop)
asc_mask = utils.rm_regions(asc_mask, des_mask, asc_start, asc_stop)
return des_mask, asc_mask | def function[rm_incomplete_des_asc, parameter[des_mask, asc_mask]]:
constant[Remove descents-ascents that have no corresponding ascent-descent
Args
----
des_mask: ndarray
Boolean mask of descents in the depth data
asc_mask: ndarray
Boolean mask of ascents in the depth data
Returns
-------
des_mask: ndarray
Boolean mask of descents with erroneous regions removed
asc_mask: ndarray
Boolean mask of ascents with erroneous regions removed
]
from relative_module[None] import module[utils]
<ast.Tuple object at 0x7da2045641f0> assign[=] call[name[utils].contiguous_regions, parameter[name[des_mask]]]
<ast.Tuple object at 0x7da204567a00> assign[=] call[name[utils].contiguous_regions, parameter[name[asc_mask]]]
variable[des_mask] assign[=] call[name[utils].rm_regions, parameter[name[des_mask], name[asc_mask], name[des_start], name[des_stop]]]
variable[asc_mask] assign[=] call[name[utils].rm_regions, parameter[name[asc_mask], name[des_mask], name[asc_start], name[asc_stop]]]
return[tuple[[<ast.Name object at 0x7da2045679d0>, <ast.Name object at 0x7da2045664d0>]]] | keyword[def] identifier[rm_incomplete_des_asc] ( identifier[des_mask] , identifier[asc_mask] ):
literal[string]
keyword[from] . keyword[import] identifier[utils]
identifier[des_start] , identifier[des_stop] = identifier[utils] . identifier[contiguous_regions] ( identifier[des_mask] )
identifier[asc_start] , identifier[asc_stop] = identifier[utils] . identifier[contiguous_regions] ( identifier[asc_mask] )
identifier[des_mask] = identifier[utils] . identifier[rm_regions] ( identifier[des_mask] , identifier[asc_mask] , identifier[des_start] , identifier[des_stop] )
identifier[asc_mask] = identifier[utils] . identifier[rm_regions] ( identifier[asc_mask] , identifier[des_mask] , identifier[asc_start] , identifier[asc_stop] )
keyword[return] identifier[des_mask] , identifier[asc_mask] | def rm_incomplete_des_asc(des_mask, asc_mask):
"""Remove descents-ascents that have no corresponding ascent-descent
Args
----
des_mask: ndarray
Boolean mask of descents in the depth data
asc_mask: ndarray
Boolean mask of ascents in the depth data
Returns
-------
des_mask: ndarray
Boolean mask of descents with erroneous regions removed
asc_mask: ndarray
Boolean mask of ascents with erroneous regions removed
"""
from . import utils
# Get start/stop indices for descents and ascents
(des_start, des_stop) = utils.contiguous_regions(des_mask)
(asc_start, asc_stop) = utils.contiguous_regions(asc_mask)
des_mask = utils.rm_regions(des_mask, asc_mask, des_start, des_stop)
asc_mask = utils.rm_regions(asc_mask, des_mask, asc_start, asc_stop)
return (des_mask, asc_mask) |
def Overlay_setShowFPSCounter(self, show):
"""
Function path: Overlay.setShowFPSCounter
Domain: Overlay
Method name: setShowFPSCounter
Parameters:
Required arguments:
'show' (type: boolean) -> True for showing the FPS counter
No return value.
Description: Requests that backend shows the FPS counter
"""
assert isinstance(show, (bool,)
), "Argument 'show' must be of type '['bool']'. Received type: '%s'" % type(
show)
subdom_funcs = self.synchronous_command('Overlay.setShowFPSCounter', show
=show)
return subdom_funcs | def function[Overlay_setShowFPSCounter, parameter[self, show]]:
constant[
Function path: Overlay.setShowFPSCounter
Domain: Overlay
Method name: setShowFPSCounter
Parameters:
Required arguments:
'show' (type: boolean) -> True for showing the FPS counter
No return value.
Description: Requests that backend shows the FPS counter
]
assert[call[name[isinstance], parameter[name[show], tuple[[<ast.Name object at 0x7da1b102afb0>]]]]]
variable[subdom_funcs] assign[=] call[name[self].synchronous_command, parameter[constant[Overlay.setShowFPSCounter]]]
return[name[subdom_funcs]] | keyword[def] identifier[Overlay_setShowFPSCounter] ( identifier[self] , identifier[show] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[show] ,( identifier[bool] ,)
), literal[string] % identifier[type] (
identifier[show] )
identifier[subdom_funcs] = identifier[self] . identifier[synchronous_command] ( literal[string] , identifier[show]
= identifier[show] )
keyword[return] identifier[subdom_funcs] | def Overlay_setShowFPSCounter(self, show):
"""
Function path: Overlay.setShowFPSCounter
Domain: Overlay
Method name: setShowFPSCounter
Parameters:
Required arguments:
'show' (type: boolean) -> True for showing the FPS counter
No return value.
Description: Requests that backend shows the FPS counter
"""
assert isinstance(show, (bool,)), "Argument 'show' must be of type '['bool']'. Received type: '%s'" % type(show)
subdom_funcs = self.synchronous_command('Overlay.setShowFPSCounter', show=show)
return subdom_funcs |
def convert_python_regex_to_ecma(value, flags=[]):
"""Convert Python regex to ECMA 262 regex.
If given value is already ECMA regex it will be returned unchanged.
:param string value: Python regex.
:param list flags: List of flags (allowed flags: `re.I`, `re.M`)
:return: ECMA 262 regex
:rtype: str
"""
if is_ecma_regex(value):
return value
result_flags = [PYTHON_TO_ECMA_FLAGS[f] for f in flags]
result_flags = ''.join(result_flags)
return '/{value}/{flags}'.format(value=value, flags=result_flags) | def function[convert_python_regex_to_ecma, parameter[value, flags]]:
constant[Convert Python regex to ECMA 262 regex.
If given value is already ECMA regex it will be returned unchanged.
:param string value: Python regex.
:param list flags: List of flags (allowed flags: `re.I`, `re.M`)
:return: ECMA 262 regex
:rtype: str
]
if call[name[is_ecma_regex], parameter[name[value]]] begin[:]
return[name[value]]
variable[result_flags] assign[=] <ast.ListComp object at 0x7da20c76ca30>
variable[result_flags] assign[=] call[constant[].join, parameter[name[result_flags]]]
return[call[constant[/{value}/{flags}].format, parameter[]]] | keyword[def] identifier[convert_python_regex_to_ecma] ( identifier[value] , identifier[flags] =[]):
literal[string]
keyword[if] identifier[is_ecma_regex] ( identifier[value] ):
keyword[return] identifier[value]
identifier[result_flags] =[ identifier[PYTHON_TO_ECMA_FLAGS] [ identifier[f] ] keyword[for] identifier[f] keyword[in] identifier[flags] ]
identifier[result_flags] = literal[string] . identifier[join] ( identifier[result_flags] )
keyword[return] literal[string] . identifier[format] ( identifier[value] = identifier[value] , identifier[flags] = identifier[result_flags] ) | def convert_python_regex_to_ecma(value, flags=[]):
"""Convert Python regex to ECMA 262 regex.
If given value is already ECMA regex it will be returned unchanged.
:param string value: Python regex.
:param list flags: List of flags (allowed flags: `re.I`, `re.M`)
:return: ECMA 262 regex
:rtype: str
"""
if is_ecma_regex(value):
return value # depends on [control=['if'], data=[]]
result_flags = [PYTHON_TO_ECMA_FLAGS[f] for f in flags]
result_flags = ''.join(result_flags)
return '/{value}/{flags}'.format(value=value, flags=result_flags) |
def push(self, components=None, yes=0):
"""
Executes all satchel configurators to apply pending changes to the server.
"""
from burlap import notifier
service = self.get_satchel('service')
self.lock()
try:
yes = int(yes)
if not yes:
# If we want to confirm the deployment with the user, and we're at the first server,
# then run the preview.
if self.genv.host_string == self.genv.hosts[0]:
execute(partial(self.preview, components=components, ask=1))
notifier.notify_pre_deployment()
component_order, plan_funcs = self.get_component_funcs(components=components)
service.pre_deploy()
for func_name, plan_func in plan_funcs:
print('Executing %s...' % func_name)
plan_func()
self.fake(components=components)
service.post_deploy()
notifier.notify_post_deployment()
finally:
self.unlock() | def function[push, parameter[self, components, yes]]:
constant[
Executes all satchel configurators to apply pending changes to the server.
]
from relative_module[burlap] import module[notifier]
variable[service] assign[=] call[name[self].get_satchel, parameter[constant[service]]]
call[name[self].lock, parameter[]]
<ast.Try object at 0x7da1b00ad7e0> | keyword[def] identifier[push] ( identifier[self] , identifier[components] = keyword[None] , identifier[yes] = literal[int] ):
literal[string]
keyword[from] identifier[burlap] keyword[import] identifier[notifier]
identifier[service] = identifier[self] . identifier[get_satchel] ( literal[string] )
identifier[self] . identifier[lock] ()
keyword[try] :
identifier[yes] = identifier[int] ( identifier[yes] )
keyword[if] keyword[not] identifier[yes] :
keyword[if] identifier[self] . identifier[genv] . identifier[host_string] == identifier[self] . identifier[genv] . identifier[hosts] [ literal[int] ]:
identifier[execute] ( identifier[partial] ( identifier[self] . identifier[preview] , identifier[components] = identifier[components] , identifier[ask] = literal[int] ))
identifier[notifier] . identifier[notify_pre_deployment] ()
identifier[component_order] , identifier[plan_funcs] = identifier[self] . identifier[get_component_funcs] ( identifier[components] = identifier[components] )
identifier[service] . identifier[pre_deploy] ()
keyword[for] identifier[func_name] , identifier[plan_func] keyword[in] identifier[plan_funcs] :
identifier[print] ( literal[string] % identifier[func_name] )
identifier[plan_func] ()
identifier[self] . identifier[fake] ( identifier[components] = identifier[components] )
identifier[service] . identifier[post_deploy] ()
identifier[notifier] . identifier[notify_post_deployment] ()
keyword[finally] :
identifier[self] . identifier[unlock] () | def push(self, components=None, yes=0):
"""
Executes all satchel configurators to apply pending changes to the server.
"""
from burlap import notifier
service = self.get_satchel('service')
self.lock()
try:
yes = int(yes)
if not yes:
# If we want to confirm the deployment with the user, and we're at the first server,
# then run the preview.
if self.genv.host_string == self.genv.hosts[0]:
execute(partial(self.preview, components=components, ask=1)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
notifier.notify_pre_deployment()
(component_order, plan_funcs) = self.get_component_funcs(components=components)
service.pre_deploy()
for (func_name, plan_func) in plan_funcs:
print('Executing %s...' % func_name)
plan_func() # depends on [control=['for'], data=[]]
self.fake(components=components)
service.post_deploy()
notifier.notify_post_deployment() # depends on [control=['try'], data=[]]
finally:
self.unlock() |
def fields_equal(self, instance, fields_to_ignore=("id", "change_date", "changed_by")):
"""
Compares this instance's fields to the supplied instance to test for equality.
This will ignore any fields in `fields_to_ignore`.
Note that this method ignores many-to-many fields.
Args:
instance: the model instance to compare
fields_to_ignore: List of fields that should not be compared for equality. By default
includes `id`, `change_date`, and `changed_by`.
Returns: True if the checked fields are all equivalent, else False
"""
for field in self._meta.get_fields():
if not field.many_to_many and field.name not in fields_to_ignore:
if getattr(instance, field.name) != getattr(self, field.name):
return False
return True | def function[fields_equal, parameter[self, instance, fields_to_ignore]]:
constant[
Compares this instance's fields to the supplied instance to test for equality.
This will ignore any fields in `fields_to_ignore`.
Note that this method ignores many-to-many fields.
Args:
instance: the model instance to compare
fields_to_ignore: List of fields that should not be compared for equality. By default
includes `id`, `change_date`, and `changed_by`.
Returns: True if the checked fields are all equivalent, else False
]
for taget[name[field]] in starred[call[name[self]._meta.get_fields, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da20c6c5030> begin[:]
if compare[call[name[getattr], parameter[name[instance], name[field].name]] not_equal[!=] call[name[getattr], parameter[name[self], name[field].name]]] begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[fields_equal] ( identifier[self] , identifier[instance] , identifier[fields_to_ignore] =( literal[string] , literal[string] , literal[string] )):
literal[string]
keyword[for] identifier[field] keyword[in] identifier[self] . identifier[_meta] . identifier[get_fields] ():
keyword[if] keyword[not] identifier[field] . identifier[many_to_many] keyword[and] identifier[field] . identifier[name] keyword[not] keyword[in] identifier[fields_to_ignore] :
keyword[if] identifier[getattr] ( identifier[instance] , identifier[field] . identifier[name] )!= identifier[getattr] ( identifier[self] , identifier[field] . identifier[name] ):
keyword[return] keyword[False]
keyword[return] keyword[True] | def fields_equal(self, instance, fields_to_ignore=('id', 'change_date', 'changed_by')):
"""
Compares this instance's fields to the supplied instance to test for equality.
This will ignore any fields in `fields_to_ignore`.
Note that this method ignores many-to-many fields.
Args:
instance: the model instance to compare
fields_to_ignore: List of fields that should not be compared for equality. By default
includes `id`, `change_date`, and `changed_by`.
Returns: True if the checked fields are all equivalent, else False
"""
for field in self._meta.get_fields():
if not field.many_to_many and field.name not in fields_to_ignore:
if getattr(instance, field.name) != getattr(self, field.name):
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['field']]
return True |
def __get_issue_comments(self, issue_number):
"""Get issue comments"""
comments = []
group_comments = self.client.issue_comments(issue_number)
for raw_comments in group_comments:
for comment in json.loads(raw_comments):
comment_id = comment.get('id')
comment['user_data'] = self.__get_user(comment['user']['login'])
comment['reactions_data'] = \
self.__get_issue_comment_reactions(comment_id, comment['reactions']['total_count'])
comments.append(comment)
return comments | def function[__get_issue_comments, parameter[self, issue_number]]:
constant[Get issue comments]
variable[comments] assign[=] list[[]]
variable[group_comments] assign[=] call[name[self].client.issue_comments, parameter[name[issue_number]]]
for taget[name[raw_comments]] in starred[name[group_comments]] begin[:]
for taget[name[comment]] in starred[call[name[json].loads, parameter[name[raw_comments]]]] begin[:]
variable[comment_id] assign[=] call[name[comment].get, parameter[constant[id]]]
call[name[comment]][constant[user_data]] assign[=] call[name[self].__get_user, parameter[call[call[name[comment]][constant[user]]][constant[login]]]]
call[name[comment]][constant[reactions_data]] assign[=] call[name[self].__get_issue_comment_reactions, parameter[name[comment_id], call[call[name[comment]][constant[reactions]]][constant[total_count]]]]
call[name[comments].append, parameter[name[comment]]]
return[name[comments]] | keyword[def] identifier[__get_issue_comments] ( identifier[self] , identifier[issue_number] ):
literal[string]
identifier[comments] =[]
identifier[group_comments] = identifier[self] . identifier[client] . identifier[issue_comments] ( identifier[issue_number] )
keyword[for] identifier[raw_comments] keyword[in] identifier[group_comments] :
keyword[for] identifier[comment] keyword[in] identifier[json] . identifier[loads] ( identifier[raw_comments] ):
identifier[comment_id] = identifier[comment] . identifier[get] ( literal[string] )
identifier[comment] [ literal[string] ]= identifier[self] . identifier[__get_user] ( identifier[comment] [ literal[string] ][ literal[string] ])
identifier[comment] [ literal[string] ]= identifier[self] . identifier[__get_issue_comment_reactions] ( identifier[comment_id] , identifier[comment] [ literal[string] ][ literal[string] ])
identifier[comments] . identifier[append] ( identifier[comment] )
keyword[return] identifier[comments] | def __get_issue_comments(self, issue_number):
"""Get issue comments"""
comments = []
group_comments = self.client.issue_comments(issue_number)
for raw_comments in group_comments:
for comment in json.loads(raw_comments):
comment_id = comment.get('id')
comment['user_data'] = self.__get_user(comment['user']['login'])
comment['reactions_data'] = self.__get_issue_comment_reactions(comment_id, comment['reactions']['total_count'])
comments.append(comment) # depends on [control=['for'], data=['comment']] # depends on [control=['for'], data=['raw_comments']]
return comments |
def plotRealImg(sim, cam, rawdata, t: int, odir: Path=None, fg=None):
"""
sim: histfeas/simclass.py
cam: camclass.py
rawdata: nframe x ny x nx ndarray
t: integer index to read
odir: output directory (where to write results)
plots both cameras together,
and magnetic zenith 1-D cut line
and 1 degree radar beam red circle centered on magnetic zenith
"""
ncols = len(cam)
# print('using {} cameras'.format(ncols))
T = nans(ncols, dtype=datetime)
# if asi is not None:
# ncols=3
# if isinstance(asi,(tuple,list)):
# pass
# elif isinstance(asi,(str,Path)):
# asi = Path(asi).expanduser()
# if asi.is_dir():
# asi=list(asi.glob('*.FITS'))
if fg is None:
doclose = True
fg, axs = subplots(nrows=1, ncols=ncols, figsize=(
15, 12), dpi=DPI, facecolor='black')
axs = atleast_1d(axs) # in case only 1
# fg.set_size_inches(15,5) #clips off
else: # maintain original figure handle for anim.writer
doclose = False
fg.clf()
axs = [fg.add_subplot(1, ncols, i + 1) for i in range(ncols)]
for i, C in enumerate(cam):
if C.usecam: # HiST cameras
# print('frame {}'.format(t))
# hold times for all cameras at this time step
T[i] = updateframe(t, rawdata[i], None, cam[i], axs[i], fg)
elif C.name == 'asi': # ASI
dasc = dio.load(C.fn, treq=T[sim.useCamBool][0])
C.tKeo = dasc.time
updateframe(0, dasc.values, dasc.wavelength, C,
axs[i], fg) # FIXME may need API update
try:
overlayrowcol(axs[i], C.hlrows, C.hlcols)
except AttributeError:
pass # az/el were not registered
else:
logging.error(f'unknown camera {C.name} index {i}')
if i == 0:
axs[0].set_ylabel(datetime.strftime(
T[0], '%x')).set_color('limegreen')
# NOTE: commented out due to Matplotlib 1.x bugs
# fg.suptitle(datetime.strftime(T[0],'%x')) #makes giant margins that tight_layout doesn't help, bug
# fg.text(0.5,0.15,datetime.strftime(T[0],'%x'))#, va='top',ha='center') #bug too
# fg.tight_layout()
# fg.subplots_adjust(top=0.95)
# TODO: T[0] is fastest cam now, but needs generalization
writeplots(fg, 'rawFrame', T[0], odir=odir,
dpi=sim.dpi, facecolor='k', doclose=doclose) | def function[plotRealImg, parameter[sim, cam, rawdata, t, odir, fg]]:
constant[
sim: histfeas/simclass.py
cam: camclass.py
rawdata: nframe x ny x nx ndarray
t: integer index to read
odir: output directory (where to write results)
plots both cameras together,
and magnetic zenith 1-D cut line
and 1 degree radar beam red circle centered on magnetic zenith
]
variable[ncols] assign[=] call[name[len], parameter[name[cam]]]
variable[T] assign[=] call[name[nans], parameter[name[ncols]]]
if compare[name[fg] is constant[None]] begin[:]
variable[doclose] assign[=] constant[True]
<ast.Tuple object at 0x7da1b1460430> assign[=] call[name[subplots], parameter[]]
variable[axs] assign[=] call[name[atleast_1d], parameter[name[axs]]]
for taget[tuple[[<ast.Name object at 0x7da1b14630d0>, <ast.Name object at 0x7da1b1461330>]]] in starred[call[name[enumerate], parameter[name[cam]]]] begin[:]
if name[C].usecam begin[:]
call[name[T]][name[i]] assign[=] call[name[updateframe], parameter[name[t], call[name[rawdata]][name[i]], constant[None], call[name[cam]][name[i]], call[name[axs]][name[i]], name[fg]]]
if compare[name[i] equal[==] constant[0]] begin[:]
call[call[call[name[axs]][constant[0]].set_ylabel, parameter[call[name[datetime].strftime, parameter[call[name[T]][constant[0]], constant[%x]]]]].set_color, parameter[constant[limegreen]]]
call[name[writeplots], parameter[name[fg], constant[rawFrame], call[name[T]][constant[0]]]] | keyword[def] identifier[plotRealImg] ( identifier[sim] , identifier[cam] , identifier[rawdata] , identifier[t] : identifier[int] , identifier[odir] : identifier[Path] = keyword[None] , identifier[fg] = keyword[None] ):
literal[string]
identifier[ncols] = identifier[len] ( identifier[cam] )
identifier[T] = identifier[nans] ( identifier[ncols] , identifier[dtype] = identifier[datetime] )
keyword[if] identifier[fg] keyword[is] keyword[None] :
identifier[doclose] = keyword[True]
identifier[fg] , identifier[axs] = identifier[subplots] ( identifier[nrows] = literal[int] , identifier[ncols] = identifier[ncols] , identifier[figsize] =(
literal[int] , literal[int] ), identifier[dpi] = identifier[DPI] , identifier[facecolor] = literal[string] )
identifier[axs] = identifier[atleast_1d] ( identifier[axs] )
keyword[else] :
identifier[doclose] = keyword[False]
identifier[fg] . identifier[clf] ()
identifier[axs] =[ identifier[fg] . identifier[add_subplot] ( literal[int] , identifier[ncols] , identifier[i] + literal[int] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[ncols] )]
keyword[for] identifier[i] , identifier[C] keyword[in] identifier[enumerate] ( identifier[cam] ):
keyword[if] identifier[C] . identifier[usecam] :
identifier[T] [ identifier[i] ]= identifier[updateframe] ( identifier[t] , identifier[rawdata] [ identifier[i] ], keyword[None] , identifier[cam] [ identifier[i] ], identifier[axs] [ identifier[i] ], identifier[fg] )
keyword[elif] identifier[C] . identifier[name] == literal[string] :
identifier[dasc] = identifier[dio] . identifier[load] ( identifier[C] . identifier[fn] , identifier[treq] = identifier[T] [ identifier[sim] . identifier[useCamBool] ][ literal[int] ])
identifier[C] . identifier[tKeo] = identifier[dasc] . identifier[time]
identifier[updateframe] ( literal[int] , identifier[dasc] . identifier[values] , identifier[dasc] . identifier[wavelength] , identifier[C] ,
identifier[axs] [ identifier[i] ], identifier[fg] )
keyword[try] :
identifier[overlayrowcol] ( identifier[axs] [ identifier[i] ], identifier[C] . identifier[hlrows] , identifier[C] . identifier[hlcols] )
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[else] :
identifier[logging] . identifier[error] ( literal[string] )
keyword[if] identifier[i] == literal[int] :
identifier[axs] [ literal[int] ]. identifier[set_ylabel] ( identifier[datetime] . identifier[strftime] (
identifier[T] [ literal[int] ], literal[string] )). identifier[set_color] ( literal[string] )
identifier[writeplots] ( identifier[fg] , literal[string] , identifier[T] [ literal[int] ], identifier[odir] = identifier[odir] ,
identifier[dpi] = identifier[sim] . identifier[dpi] , identifier[facecolor] = literal[string] , identifier[doclose] = identifier[doclose] ) | def plotRealImg(sim, cam, rawdata, t: int, odir: Path=None, fg=None):
"""
sim: histfeas/simclass.py
cam: camclass.py
rawdata: nframe x ny x nx ndarray
t: integer index to read
odir: output directory (where to write results)
plots both cameras together,
and magnetic zenith 1-D cut line
and 1 degree radar beam red circle centered on magnetic zenith
"""
ncols = len(cam)
# print('using {} cameras'.format(ncols))
T = nans(ncols, dtype=datetime)
# if asi is not None:
# ncols=3
# if isinstance(asi,(tuple,list)):
# pass
# elif isinstance(asi,(str,Path)):
# asi = Path(asi).expanduser()
# if asi.is_dir():
# asi=list(asi.glob('*.FITS'))
if fg is None:
doclose = True
(fg, axs) = subplots(nrows=1, ncols=ncols, figsize=(15, 12), dpi=DPI, facecolor='black')
axs = atleast_1d(axs) # in case only 1 # depends on [control=['if'], data=['fg']]
else:
# fg.set_size_inches(15,5) #clips off
# maintain original figure handle for anim.writer
doclose = False
fg.clf()
axs = [fg.add_subplot(1, ncols, i + 1) for i in range(ncols)]
for (i, C) in enumerate(cam):
if C.usecam: # HiST cameras
# print('frame {}'.format(t))
# hold times for all cameras at this time step
T[i] = updateframe(t, rawdata[i], None, cam[i], axs[i], fg) # depends on [control=['if'], data=[]]
elif C.name == 'asi': # ASI
dasc = dio.load(C.fn, treq=T[sim.useCamBool][0])
C.tKeo = dasc.time
updateframe(0, dasc.values, dasc.wavelength, C, axs[i], fg) # FIXME may need API update
try:
overlayrowcol(axs[i], C.hlrows, C.hlcols) # depends on [control=['try'], data=[]]
except AttributeError:
pass # az/el were not registered # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
logging.error(f'unknown camera {C.name} index {i}')
if i == 0:
axs[0].set_ylabel(datetime.strftime(T[0], '%x')).set_color('limegreen') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# NOTE: commented out due to Matplotlib 1.x bugs
# fg.suptitle(datetime.strftime(T[0],'%x')) #makes giant margins that tight_layout doesn't help, bug
# fg.text(0.5,0.15,datetime.strftime(T[0],'%x'))#, va='top',ha='center') #bug too
# fg.tight_layout()
# fg.subplots_adjust(top=0.95)
# TODO: T[0] is fastest cam now, but needs generalization
writeplots(fg, 'rawFrame', T[0], odir=odir, dpi=sim.dpi, facecolor='k', doclose=doclose) |
def rollback(using=None, sid=None):
"""
Possibility of calling transaction.rollback() in new Django versions (in atomic block).
Important: transaction savepoint (sid) is required for Django < 1.8
"""
if sid:
django.db.transaction.savepoint_rollback(sid)
else:
try:
django.db.transaction.rollback(using)
except django.db.transaction.TransactionManagementError:
django.db.transaction.set_rollback(True, using) | def function[rollback, parameter[using, sid]]:
constant[
Possibility of calling transaction.rollback() in new Django versions (in atomic block).
Important: transaction savepoint (sid) is required for Django < 1.8
]
if name[sid] begin[:]
call[name[django].db.transaction.savepoint_rollback, parameter[name[sid]]] | keyword[def] identifier[rollback] ( identifier[using] = keyword[None] , identifier[sid] = keyword[None] ):
literal[string]
keyword[if] identifier[sid] :
identifier[django] . identifier[db] . identifier[transaction] . identifier[savepoint_rollback] ( identifier[sid] )
keyword[else] :
keyword[try] :
identifier[django] . identifier[db] . identifier[transaction] . identifier[rollback] ( identifier[using] )
keyword[except] identifier[django] . identifier[db] . identifier[transaction] . identifier[TransactionManagementError] :
identifier[django] . identifier[db] . identifier[transaction] . identifier[set_rollback] ( keyword[True] , identifier[using] ) | def rollback(using=None, sid=None):
"""
Possibility of calling transaction.rollback() in new Django versions (in atomic block).
Important: transaction savepoint (sid) is required for Django < 1.8
"""
if sid:
django.db.transaction.savepoint_rollback(sid) # depends on [control=['if'], data=[]]
else:
try:
django.db.transaction.rollback(using) # depends on [control=['try'], data=[]]
except django.db.transaction.TransactionManagementError:
django.db.transaction.set_rollback(True, using) # depends on [control=['except'], data=[]] |
def _validate_write(self, address):
"""Raises an exception if the address is not allowed to be set
in this context, based on txn outputs.
Notes:
Checks that the address is either listed fully as one of the
outputs, or some portion of the address is listed as a namespace
in the outputs of the txn.
Args:
address (str): The address to be validated. The context manager
validates the address correctness (70 hex characters).
Returns:
None
Raises:
AuthorizationException
"""
if not any(address.startswith(ns) for ns in self._write_list):
raise AuthorizationException(address=address) | def function[_validate_write, parameter[self, address]]:
constant[Raises an exception if the address is not allowed to be set
in this context, based on txn outputs.
Notes:
Checks that the address is either listed fully as one of the
outputs, or some portion of the address is listed as a namespace
in the outputs of the txn.
Args:
address (str): The address to be validated. The context manager
validates the address correctness (70 hex characters).
Returns:
None
Raises:
AuthorizationException
]
if <ast.UnaryOp object at 0x7da18f09c8e0> begin[:]
<ast.Raise object at 0x7da18f09cc10> | keyword[def] identifier[_validate_write] ( identifier[self] , identifier[address] ):
literal[string]
keyword[if] keyword[not] identifier[any] ( identifier[address] . identifier[startswith] ( identifier[ns] ) keyword[for] identifier[ns] keyword[in] identifier[self] . identifier[_write_list] ):
keyword[raise] identifier[AuthorizationException] ( identifier[address] = identifier[address] ) | def _validate_write(self, address):
"""Raises an exception if the address is not allowed to be set
in this context, based on txn outputs.
Notes:
Checks that the address is either listed fully as one of the
outputs, or some portion of the address is listed as a namespace
in the outputs of the txn.
Args:
address (str): The address to be validated. The context manager
validates the address correctness (70 hex characters).
Returns:
None
Raises:
AuthorizationException
"""
if not any((address.startswith(ns) for ns in self._write_list)):
raise AuthorizationException(address=address) # depends on [control=['if'], data=[]] |
def DiffDoArrays(self, oldObj, newObj, isElementLinks):
"""Diff two DataObject arrays"""
if len(oldObj) != len(newObj):
__Log__.debug('DiffDoArrays: Array lengths do not match %d != %d'
% (len(oldObj), len(newObj)))
return False
for i, j in zip(oldObj, newObj):
if isElementLinks:
if i.GetKey() != j.GetKey():
__Log__.debug('DiffDoArrays: Keys do not match %s != %s'
% (i.GetKey(), j.GetKey()))
return False
else:
if not self.DiffDataObjects(i, j):
__Log__.debug(
'DiffDoArrays: one of the elements do not match')
return False
return True | def function[DiffDoArrays, parameter[self, oldObj, newObj, isElementLinks]]:
constant[Diff two DataObject arrays]
if compare[call[name[len], parameter[name[oldObj]]] not_equal[!=] call[name[len], parameter[name[newObj]]]] begin[:]
call[name[__Log__].debug, parameter[binary_operation[constant[DiffDoArrays: Array lengths do not match %d != %d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da20e962e90>, <ast.Call object at 0x7da20e961c30>]]]]]
return[constant[False]]
for taget[tuple[[<ast.Name object at 0x7da20e960a90>, <ast.Name object at 0x7da20e961690>]]] in starred[call[name[zip], parameter[name[oldObj], name[newObj]]]] begin[:]
if name[isElementLinks] begin[:]
if compare[call[name[i].GetKey, parameter[]] not_equal[!=] call[name[j].GetKey, parameter[]]] begin[:]
call[name[__Log__].debug, parameter[binary_operation[constant[DiffDoArrays: Keys do not match %s != %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da20e961660>, <ast.Call object at 0x7da20e9638e0>]]]]]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[DiffDoArrays] ( identifier[self] , identifier[oldObj] , identifier[newObj] , identifier[isElementLinks] ):
literal[string]
keyword[if] identifier[len] ( identifier[oldObj] )!= identifier[len] ( identifier[newObj] ):
identifier[__Log__] . identifier[debug] ( literal[string]
%( identifier[len] ( identifier[oldObj] ), identifier[len] ( identifier[newObj] )))
keyword[return] keyword[False]
keyword[for] identifier[i] , identifier[j] keyword[in] identifier[zip] ( identifier[oldObj] , identifier[newObj] ):
keyword[if] identifier[isElementLinks] :
keyword[if] identifier[i] . identifier[GetKey] ()!= identifier[j] . identifier[GetKey] ():
identifier[__Log__] . identifier[debug] ( literal[string]
%( identifier[i] . identifier[GetKey] (), identifier[j] . identifier[GetKey] ()))
keyword[return] keyword[False]
keyword[else] :
keyword[if] keyword[not] identifier[self] . identifier[DiffDataObjects] ( identifier[i] , identifier[j] ):
identifier[__Log__] . identifier[debug] (
literal[string] )
keyword[return] keyword[False]
keyword[return] keyword[True] | def DiffDoArrays(self, oldObj, newObj, isElementLinks):
"""Diff two DataObject arrays"""
if len(oldObj) != len(newObj):
__Log__.debug('DiffDoArrays: Array lengths do not match %d != %d' % (len(oldObj), len(newObj)))
return False # depends on [control=['if'], data=[]]
for (i, j) in zip(oldObj, newObj):
if isElementLinks:
if i.GetKey() != j.GetKey():
__Log__.debug('DiffDoArrays: Keys do not match %s != %s' % (i.GetKey(), j.GetKey()))
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif not self.DiffDataObjects(i, j):
__Log__.debug('DiffDoArrays: one of the elements do not match')
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return True |
def gsea(data, gene_sets, cls, outdir='GSEA_', min_size=15, max_size=500, permutation_num=1000,
weighted_score_type=1,permutation_type='gene_set', method='log2_ratio_of_classes',
ascending=False, processes=1, figsize=(6.5,6), format='pdf',
graph_num=20, no_plot=False, seed=None, verbose=False):
""" Run Gene Set Enrichment Analysis.
:param data: Gene expression data table, Pandas DataFrame, gct file.
:param gene_sets: Enrichr Library name or .gmt gene sets file or dict of gene sets. Same input with GSEA.
:param cls: A list or a .cls file format required for GSEA.
:param str outdir: Results output directory.
:param int permutation_num: Number of permutations for significance computation. Default: 1000.
:param str permutation_type: Permutation type, "phenotype" for phenotypes, "gene_set" for genes.
:param int min_size: Minimum allowed number of genes from gene set also the data set. Default: 15.
:param int max_size: Maximum allowed number of genes from gene set also the data set. Default: 500.
:param float weighted_score_type: Refer to :func:`algorithm.enrichment_score`. Default:1.
:param method: The method used to calculate a correlation or ranking. Default: 'log2_ratio_of_classes'.
Others methods are:
1. 'signal_to_noise'
You must have at least three samples for each phenotype to use this metric.
The larger the signal-to-noise ratio, the larger the differences of the means (scaled by the standard deviations);
that is, the more distinct the gene expression is in each phenotype and the more the gene acts as a “class marker.”
2. 't_test'
Uses the difference of means scaled by the standard deviation and number of samples.
Note: You must have at least three samples for each phenotype to use this metric.
The larger the tTest ratio, the more distinct the gene expression is in each phenotype
and the more the gene acts as a “class marker.”
3. 'ratio_of_classes' (also referred to as fold change).
Uses the ratio of class means to calculate fold change for natural scale data.
4. 'diff_of_classes'
Uses the difference of class means to calculate fold change for nature scale data
5. 'log2_ratio_of_classes'
Uses the log2 ratio of class means to calculate fold change for natural scale data.
This is the recommended statistic for calculating fold change for log scale data.
:param bool ascending: Sorting order of rankings. Default: False.
:param int processes: Number of Processes you are going to use. Default: 1.
:param list figsize: Matplotlib figsize, accept a tuple or list, e.g. [width,height]. Default: [6.5,6].
:param str format: Matplotlib figure format. Default: 'pdf'.
:param int graph_num: Plot graphs for top sets of each phenotype.
:param bool no_plot: If equals to True, no figure will be drawn. Default: False.
:param seed: Random seed. expect an integer. Default:None.
:param bool verbose: Bool, increase output verbosity, print out progress of your job, Default: False.
:return: Return a GSEA obj. All results store to a dictionary, obj.results,
where contains::
| {es: enrichment score,
| nes: normalized enrichment score,
| p: P-value,
| fdr: FDR,
| size: gene set size,
| matched_size: genes matched to the data,
| genes: gene names from the data set
| ledge_genes: leading edge genes}
"""
gs = GSEA(data, gene_sets, cls, outdir, min_size, max_size, permutation_num,
weighted_score_type, permutation_type, method, ascending, processes,
figsize, format, graph_num, no_plot, seed, verbose)
gs.run()
return gs | def function[gsea, parameter[data, gene_sets, cls, outdir, min_size, max_size, permutation_num, weighted_score_type, permutation_type, method, ascending, processes, figsize, format, graph_num, no_plot, seed, verbose]]:
constant[ Run Gene Set Enrichment Analysis.
:param data: Gene expression data table, Pandas DataFrame, gct file.
:param gene_sets: Enrichr Library name or .gmt gene sets file or dict of gene sets. Same input with GSEA.
:param cls: A list or a .cls file format required for GSEA.
:param str outdir: Results output directory.
:param int permutation_num: Number of permutations for significance computation. Default: 1000.
:param str permutation_type: Permutation type, "phenotype" for phenotypes, "gene_set" for genes.
:param int min_size: Minimum allowed number of genes from gene set also the data set. Default: 15.
:param int max_size: Maximum allowed number of genes from gene set also the data set. Default: 500.
:param float weighted_score_type: Refer to :func:`algorithm.enrichment_score`. Default:1.
:param method: The method used to calculate a correlation or ranking. Default: 'log2_ratio_of_classes'.
Others methods are:
1. 'signal_to_noise'
You must have at least three samples for each phenotype to use this metric.
The larger the signal-to-noise ratio, the larger the differences of the means (scaled by the standard deviations);
that is, the more distinct the gene expression is in each phenotype and the more the gene acts as a “class marker.”
2. 't_test'
Uses the difference of means scaled by the standard deviation and number of samples.
Note: You must have at least three samples for each phenotype to use this metric.
The larger the tTest ratio, the more distinct the gene expression is in each phenotype
and the more the gene acts as a “class marker.”
3. 'ratio_of_classes' (also referred to as fold change).
Uses the ratio of class means to calculate fold change for natural scale data.
4. 'diff_of_classes'
Uses the difference of class means to calculate fold change for nature scale data
5. 'log2_ratio_of_classes'
Uses the log2 ratio of class means to calculate fold change for natural scale data.
This is the recommended statistic for calculating fold change for log scale data.
:param bool ascending: Sorting order of rankings. Default: False.
:param int processes: Number of Processes you are going to use. Default: 1.
:param list figsize: Matplotlib figsize, accept a tuple or list, e.g. [width,height]. Default: [6.5,6].
:param str format: Matplotlib figure format. Default: 'pdf'.
:param int graph_num: Plot graphs for top sets of each phenotype.
:param bool no_plot: If equals to True, no figure will be drawn. Default: False.
:param seed: Random seed. expect an integer. Default:None.
:param bool verbose: Bool, increase output verbosity, print out progress of your job, Default: False.
:return: Return a GSEA obj. All results store to a dictionary, obj.results,
where contains::
| {es: enrichment score,
| nes: normalized enrichment score,
| p: P-value,
| fdr: FDR,
| size: gene set size,
| matched_size: genes matched to the data,
| genes: gene names from the data set
| ledge_genes: leading edge genes}
]
variable[gs] assign[=] call[name[GSEA], parameter[name[data], name[gene_sets], name[cls], name[outdir], name[min_size], name[max_size], name[permutation_num], name[weighted_score_type], name[permutation_type], name[method], name[ascending], name[processes], name[figsize], name[format], name[graph_num], name[no_plot], name[seed], name[verbose]]]
call[name[gs].run, parameter[]]
return[name[gs]] | keyword[def] identifier[gsea] ( identifier[data] , identifier[gene_sets] , identifier[cls] , identifier[outdir] = literal[string] , identifier[min_size] = literal[int] , identifier[max_size] = literal[int] , identifier[permutation_num] = literal[int] ,
identifier[weighted_score_type] = literal[int] , identifier[permutation_type] = literal[string] , identifier[method] = literal[string] ,
identifier[ascending] = keyword[False] , identifier[processes] = literal[int] , identifier[figsize] =( literal[int] , literal[int] ), identifier[format] = literal[string] ,
identifier[graph_num] = literal[int] , identifier[no_plot] = keyword[False] , identifier[seed] = keyword[None] , identifier[verbose] = keyword[False] ):
literal[string]
identifier[gs] = identifier[GSEA] ( identifier[data] , identifier[gene_sets] , identifier[cls] , identifier[outdir] , identifier[min_size] , identifier[max_size] , identifier[permutation_num] ,
identifier[weighted_score_type] , identifier[permutation_type] , identifier[method] , identifier[ascending] , identifier[processes] ,
identifier[figsize] , identifier[format] , identifier[graph_num] , identifier[no_plot] , identifier[seed] , identifier[verbose] )
identifier[gs] . identifier[run] ()
keyword[return] identifier[gs] | def gsea(data, gene_sets, cls, outdir='GSEA_', min_size=15, max_size=500, permutation_num=1000, weighted_score_type=1, permutation_type='gene_set', method='log2_ratio_of_classes', ascending=False, processes=1, figsize=(6.5, 6), format='pdf', graph_num=20, no_plot=False, seed=None, verbose=False):
""" Run Gene Set Enrichment Analysis.
:param data: Gene expression data table, Pandas DataFrame, gct file.
:param gene_sets: Enrichr Library name or .gmt gene sets file or dict of gene sets. Same input with GSEA.
:param cls: A list or a .cls file format required for GSEA.
:param str outdir: Results output directory.
:param int permutation_num: Number of permutations for significance computation. Default: 1000.
:param str permutation_type: Permutation type, "phenotype" for phenotypes, "gene_set" for genes.
:param int min_size: Minimum allowed number of genes from gene set also the data set. Default: 15.
:param int max_size: Maximum allowed number of genes from gene set also the data set. Default: 500.
:param float weighted_score_type: Refer to :func:`algorithm.enrichment_score`. Default:1.
:param method: The method used to calculate a correlation or ranking. Default: 'log2_ratio_of_classes'.
Others methods are:
1. 'signal_to_noise'
You must have at least three samples for each phenotype to use this metric.
The larger the signal-to-noise ratio, the larger the differences of the means (scaled by the standard deviations);
that is, the more distinct the gene expression is in each phenotype and the more the gene acts as a “class marker.”
2. 't_test'
Uses the difference of means scaled by the standard deviation and number of samples.
Note: You must have at least three samples for each phenotype to use this metric.
The larger the tTest ratio, the more distinct the gene expression is in each phenotype
and the more the gene acts as a “class marker.”
3. 'ratio_of_classes' (also referred to as fold change).
Uses the ratio of class means to calculate fold change for natural scale data.
4. 'diff_of_classes'
Uses the difference of class means to calculate fold change for nature scale data
5. 'log2_ratio_of_classes'
Uses the log2 ratio of class means to calculate fold change for natural scale data.
This is the recommended statistic for calculating fold change for log scale data.
:param bool ascending: Sorting order of rankings. Default: False.
:param int processes: Number of Processes you are going to use. Default: 1.
:param list figsize: Matplotlib figsize, accept a tuple or list, e.g. [width,height]. Default: [6.5,6].
:param str format: Matplotlib figure format. Default: 'pdf'.
:param int graph_num: Plot graphs for top sets of each phenotype.
:param bool no_plot: If equals to True, no figure will be drawn. Default: False.
:param seed: Random seed. expect an integer. Default:None.
:param bool verbose: Bool, increase output verbosity, print out progress of your job, Default: False.
:return: Return a GSEA obj. All results store to a dictionary, obj.results,
where contains::
| {es: enrichment score,
| nes: normalized enrichment score,
| p: P-value,
| fdr: FDR,
| size: gene set size,
| matched_size: genes matched to the data,
| genes: gene names from the data set
| ledge_genes: leading edge genes}
"""
gs = GSEA(data, gene_sets, cls, outdir, min_size, max_size, permutation_num, weighted_score_type, permutation_type, method, ascending, processes, figsize, format, graph_num, no_plot, seed, verbose)
gs.run()
return gs |
def reload_handler(self, c, e):
"""This handles reloads."""
cmd = self.is_reload(e)
cmdchar = self.config['core']['cmdchar']
if cmd is not None:
# If we're in a minimal reload state, only the owner can do stuff, as we can't rely on the db working.
if self.reload_event.set():
admins = [self.config['auth']['owner']]
else:
with self.handler.db.session_scope() as session:
admins = [x.nick for x in session.query(orm.Permissions).all()]
if e.source.nick not in admins:
c.privmsg(self.get_target(e), "Nope, not gonna do it.")
return
importlib.reload(reloader)
self.reload_event.set()
cmdargs = cmd[len('%sreload' % cmdchar) + 1:]
try:
if reloader.do_reload(self, e, cmdargs):
if self.config.getboolean('feature', 'server'):
self.server = server.init_server(self)
self.reload_event.clear()
logging.info("Successfully reloaded")
except Exception as ex:
backtrace.handle_traceback(ex, c, self.get_target(e), self.config) | def function[reload_handler, parameter[self, c, e]]:
constant[This handles reloads.]
variable[cmd] assign[=] call[name[self].is_reload, parameter[name[e]]]
variable[cmdchar] assign[=] call[call[name[self].config][constant[core]]][constant[cmdchar]]
if compare[name[cmd] is_not constant[None]] begin[:]
if call[name[self].reload_event.set, parameter[]] begin[:]
variable[admins] assign[=] list[[<ast.Subscript object at 0x7da1b1e208b0>]]
if compare[name[e].source.nick <ast.NotIn object at 0x7da2590d7190> name[admins]] begin[:]
call[name[c].privmsg, parameter[call[name[self].get_target, parameter[name[e]]], constant[Nope, not gonna do it.]]]
return[None]
call[name[importlib].reload, parameter[name[reloader]]]
call[name[self].reload_event.set, parameter[]]
variable[cmdargs] assign[=] call[name[cmd]][<ast.Slice object at 0x7da1b1e43c70>]
<ast.Try object at 0x7da1b1e43af0> | keyword[def] identifier[reload_handler] ( identifier[self] , identifier[c] , identifier[e] ):
literal[string]
identifier[cmd] = identifier[self] . identifier[is_reload] ( identifier[e] )
identifier[cmdchar] = identifier[self] . identifier[config] [ literal[string] ][ literal[string] ]
keyword[if] identifier[cmd] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[self] . identifier[reload_event] . identifier[set] ():
identifier[admins] =[ identifier[self] . identifier[config] [ literal[string] ][ literal[string] ]]
keyword[else] :
keyword[with] identifier[self] . identifier[handler] . identifier[db] . identifier[session_scope] () keyword[as] identifier[session] :
identifier[admins] =[ identifier[x] . identifier[nick] keyword[for] identifier[x] keyword[in] identifier[session] . identifier[query] ( identifier[orm] . identifier[Permissions] ). identifier[all] ()]
keyword[if] identifier[e] . identifier[source] . identifier[nick] keyword[not] keyword[in] identifier[admins] :
identifier[c] . identifier[privmsg] ( identifier[self] . identifier[get_target] ( identifier[e] ), literal[string] )
keyword[return]
identifier[importlib] . identifier[reload] ( identifier[reloader] )
identifier[self] . identifier[reload_event] . identifier[set] ()
identifier[cmdargs] = identifier[cmd] [ identifier[len] ( literal[string] % identifier[cmdchar] )+ literal[int] :]
keyword[try] :
keyword[if] identifier[reloader] . identifier[do_reload] ( identifier[self] , identifier[e] , identifier[cmdargs] ):
keyword[if] identifier[self] . identifier[config] . identifier[getboolean] ( literal[string] , literal[string] ):
identifier[self] . identifier[server] = identifier[server] . identifier[init_server] ( identifier[self] )
identifier[self] . identifier[reload_event] . identifier[clear] ()
identifier[logging] . identifier[info] ( literal[string] )
keyword[except] identifier[Exception] keyword[as] identifier[ex] :
identifier[backtrace] . identifier[handle_traceback] ( identifier[ex] , identifier[c] , identifier[self] . identifier[get_target] ( identifier[e] ), identifier[self] . identifier[config] ) | def reload_handler(self, c, e):
"""This handles reloads."""
cmd = self.is_reload(e)
cmdchar = self.config['core']['cmdchar']
if cmd is not None:
# If we're in a minimal reload state, only the owner can do stuff, as we can't rely on the db working.
if self.reload_event.set():
admins = [self.config['auth']['owner']] # depends on [control=['if'], data=[]]
else:
with self.handler.db.session_scope() as session:
admins = [x.nick for x in session.query(orm.Permissions).all()] # depends on [control=['with'], data=['session']]
if e.source.nick not in admins:
c.privmsg(self.get_target(e), 'Nope, not gonna do it.')
return # depends on [control=['if'], data=[]]
importlib.reload(reloader)
self.reload_event.set()
cmdargs = cmd[len('%sreload' % cmdchar) + 1:]
try:
if reloader.do_reload(self, e, cmdargs):
if self.config.getboolean('feature', 'server'):
self.server = server.init_server(self) # depends on [control=['if'], data=[]]
self.reload_event.clear() # depends on [control=['if'], data=[]]
logging.info('Successfully reloaded') # depends on [control=['try'], data=[]]
except Exception as ex:
backtrace.handle_traceback(ex, c, self.get_target(e), self.config) # depends on [control=['except'], data=['ex']] # depends on [control=['if'], data=['cmd']] |
def referenced_tables(self):
"""Return referenced tables from job statistics, if present.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.referencedTables
:rtype: list of dict
:returns: mappings describing the query plan, or an empty list
if the query has not yet completed.
"""
tables = []
datasets_by_project_name = {}
for table in self._job_statistics().get("referencedTables", ()):
t_project = table["projectId"]
ds_id = table["datasetId"]
t_dataset = datasets_by_project_name.get((t_project, ds_id))
if t_dataset is None:
t_dataset = DatasetReference(t_project, ds_id)
datasets_by_project_name[(t_project, ds_id)] = t_dataset
t_name = table["tableId"]
tables.append(t_dataset.table(t_name))
return tables | def function[referenced_tables, parameter[self]]:
constant[Return referenced tables from job statistics, if present.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.referencedTables
:rtype: list of dict
:returns: mappings describing the query plan, or an empty list
if the query has not yet completed.
]
variable[tables] assign[=] list[[]]
variable[datasets_by_project_name] assign[=] dictionary[[], []]
for taget[name[table]] in starred[call[call[name[self]._job_statistics, parameter[]].get, parameter[constant[referencedTables], tuple[[]]]]] begin[:]
variable[t_project] assign[=] call[name[table]][constant[projectId]]
variable[ds_id] assign[=] call[name[table]][constant[datasetId]]
variable[t_dataset] assign[=] call[name[datasets_by_project_name].get, parameter[tuple[[<ast.Name object at 0x7da18ede5630>, <ast.Name object at 0x7da18ede7b20>]]]]
if compare[name[t_dataset] is constant[None]] begin[:]
variable[t_dataset] assign[=] call[name[DatasetReference], parameter[name[t_project], name[ds_id]]]
call[name[datasets_by_project_name]][tuple[[<ast.Name object at 0x7da207f00bb0>, <ast.Name object at 0x7da207f015d0>]]] assign[=] name[t_dataset]
variable[t_name] assign[=] call[name[table]][constant[tableId]]
call[name[tables].append, parameter[call[name[t_dataset].table, parameter[name[t_name]]]]]
return[name[tables]] | keyword[def] identifier[referenced_tables] ( identifier[self] ):
literal[string]
identifier[tables] =[]
identifier[datasets_by_project_name] ={}
keyword[for] identifier[table] keyword[in] identifier[self] . identifier[_job_statistics] (). identifier[get] ( literal[string] ,()):
identifier[t_project] = identifier[table] [ literal[string] ]
identifier[ds_id] = identifier[table] [ literal[string] ]
identifier[t_dataset] = identifier[datasets_by_project_name] . identifier[get] (( identifier[t_project] , identifier[ds_id] ))
keyword[if] identifier[t_dataset] keyword[is] keyword[None] :
identifier[t_dataset] = identifier[DatasetReference] ( identifier[t_project] , identifier[ds_id] )
identifier[datasets_by_project_name] [( identifier[t_project] , identifier[ds_id] )]= identifier[t_dataset]
identifier[t_name] = identifier[table] [ literal[string] ]
identifier[tables] . identifier[append] ( identifier[t_dataset] . identifier[table] ( identifier[t_name] ))
keyword[return] identifier[tables] | def referenced_tables(self):
"""Return referenced tables from job statistics, if present.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.referencedTables
:rtype: list of dict
:returns: mappings describing the query plan, or an empty list
if the query has not yet completed.
"""
tables = []
datasets_by_project_name = {}
for table in self._job_statistics().get('referencedTables', ()):
t_project = table['projectId']
ds_id = table['datasetId']
t_dataset = datasets_by_project_name.get((t_project, ds_id))
if t_dataset is None:
t_dataset = DatasetReference(t_project, ds_id)
datasets_by_project_name[t_project, ds_id] = t_dataset # depends on [control=['if'], data=['t_dataset']]
t_name = table['tableId']
tables.append(t_dataset.table(t_name)) # depends on [control=['for'], data=['table']]
return tables |
def parse(self):
"""
Parses format string looking for substitutions
This method is responsible for returning a list of fields (as strings)
to include in all log messages.
"""
standard_formatters = re.compile(r'\((.+?)\)', re.IGNORECASE)
return standard_formatters.findall(self._fmt) | def function[parse, parameter[self]]:
constant[
Parses format string looking for substitutions
This method is responsible for returning a list of fields (as strings)
to include in all log messages.
]
variable[standard_formatters] assign[=] call[name[re].compile, parameter[constant[\((.+?)\)], name[re].IGNORECASE]]
return[call[name[standard_formatters].findall, parameter[name[self]._fmt]]] | keyword[def] identifier[parse] ( identifier[self] ):
literal[string]
identifier[standard_formatters] = identifier[re] . identifier[compile] ( literal[string] , identifier[re] . identifier[IGNORECASE] )
keyword[return] identifier[standard_formatters] . identifier[findall] ( identifier[self] . identifier[_fmt] ) | def parse(self):
"""
Parses format string looking for substitutions
This method is responsible for returning a list of fields (as strings)
to include in all log messages.
"""
standard_formatters = re.compile('\\((.+?)\\)', re.IGNORECASE)
return standard_formatters.findall(self._fmt) |
def get_user_groups(self, dn, group_search_dn=None, _connection=None):
"""
Gets a list of groups a user at dn is a member of
Args:
dn (str): The dn of the user to find memberships for.
_connection (ldap3.Connection): A connection object to use when
searching. If not given, a temporary connection will be
created, and destroyed after use.
group_search_dn (str): The search dn for groups. Defaults to
``'{LDAP_GROUP_DN},{LDAP_BASE_DN}'``.
Returns:
list: A list of LDAP groups the user is a member of.
"""
connection = _connection
if not connection:
connection = self._make_connection(
bind_user=self.config.get('LDAP_BIND_USER_DN'),
bind_password=self.config.get('LDAP_BIND_USER_PASSWORD')
)
connection.bind()
safe_dn = ldap3.utils.conv.escape_filter_chars(dn)
search_filter = '(&{group_filter}({members_attr}={user_dn}))'.format(
group_filter=self.config.get('LDAP_GROUP_OBJECT_FILTER'),
members_attr=self.config.get('LDAP_GROUP_MEMBERS_ATTR'),
user_dn=safe_dn
)
log.debug(
"Searching for groups for specific user with filter '{0}' "
", base '{1}' and scope '{2}'".format(
search_filter,
group_search_dn or self.full_group_search_dn,
self.config.get('LDAP_GROUP_SEARCH_SCOPE')
))
connection.search(
search_base=group_search_dn or self.full_group_search_dn,
search_filter=search_filter,
attributes=self.config.get('LDAP_GET_GROUP_ATTRIBUTES'),
search_scope=getattr(
ldap3, self.config.get('LDAP_GROUP_SEARCH_SCOPE'))
)
results = []
for item in connection.response:
if 'type' not in item or item.get('type') != 'searchResEntry':
# Issue #13 - Don't return non-entry results.
continue
group_data = item['attributes']
group_data['dn'] = item['dn']
results.append(group_data)
if not _connection:
# We made a connection, so we need to kill it.
self.destroy_connection(connection)
return results | def function[get_user_groups, parameter[self, dn, group_search_dn, _connection]]:
constant[
Gets a list of groups a user at dn is a member of
Args:
dn (str): The dn of the user to find memberships for.
_connection (ldap3.Connection): A connection object to use when
searching. If not given, a temporary connection will be
created, and destroyed after use.
group_search_dn (str): The search dn for groups. Defaults to
``'{LDAP_GROUP_DN},{LDAP_BASE_DN}'``.
Returns:
list: A list of LDAP groups the user is a member of.
]
variable[connection] assign[=] name[_connection]
if <ast.UnaryOp object at 0x7da1b02b9390> begin[:]
variable[connection] assign[=] call[name[self]._make_connection, parameter[]]
call[name[connection].bind, parameter[]]
variable[safe_dn] assign[=] call[name[ldap3].utils.conv.escape_filter_chars, parameter[name[dn]]]
variable[search_filter] assign[=] call[constant[(&{group_filter}({members_attr}={user_dn}))].format, parameter[]]
call[name[log].debug, parameter[call[constant[Searching for groups for specific user with filter '{0}' , base '{1}' and scope '{2}'].format, parameter[name[search_filter], <ast.BoolOp object at 0x7da1b02b8670>, call[name[self].config.get, parameter[constant[LDAP_GROUP_SEARCH_SCOPE]]]]]]]
call[name[connection].search, parameter[]]
variable[results] assign[=] list[[]]
for taget[name[item]] in starred[name[connection].response] begin[:]
if <ast.BoolOp object at 0x7da1b02bb190> begin[:]
continue
variable[group_data] assign[=] call[name[item]][constant[attributes]]
call[name[group_data]][constant[dn]] assign[=] call[name[item]][constant[dn]]
call[name[results].append, parameter[name[group_data]]]
if <ast.UnaryOp object at 0x7da1b02ba350> begin[:]
call[name[self].destroy_connection, parameter[name[connection]]]
return[name[results]] | keyword[def] identifier[get_user_groups] ( identifier[self] , identifier[dn] , identifier[group_search_dn] = keyword[None] , identifier[_connection] = keyword[None] ):
literal[string]
identifier[connection] = identifier[_connection]
keyword[if] keyword[not] identifier[connection] :
identifier[connection] = identifier[self] . identifier[_make_connection] (
identifier[bind_user] = identifier[self] . identifier[config] . identifier[get] ( literal[string] ),
identifier[bind_password] = identifier[self] . identifier[config] . identifier[get] ( literal[string] )
)
identifier[connection] . identifier[bind] ()
identifier[safe_dn] = identifier[ldap3] . identifier[utils] . identifier[conv] . identifier[escape_filter_chars] ( identifier[dn] )
identifier[search_filter] = literal[string] . identifier[format] (
identifier[group_filter] = identifier[self] . identifier[config] . identifier[get] ( literal[string] ),
identifier[members_attr] = identifier[self] . identifier[config] . identifier[get] ( literal[string] ),
identifier[user_dn] = identifier[safe_dn]
)
identifier[log] . identifier[debug] (
literal[string]
literal[string] . identifier[format] (
identifier[search_filter] ,
identifier[group_search_dn] keyword[or] identifier[self] . identifier[full_group_search_dn] ,
identifier[self] . identifier[config] . identifier[get] ( literal[string] )
))
identifier[connection] . identifier[search] (
identifier[search_base] = identifier[group_search_dn] keyword[or] identifier[self] . identifier[full_group_search_dn] ,
identifier[search_filter] = identifier[search_filter] ,
identifier[attributes] = identifier[self] . identifier[config] . identifier[get] ( literal[string] ),
identifier[search_scope] = identifier[getattr] (
identifier[ldap3] , identifier[self] . identifier[config] . identifier[get] ( literal[string] ))
)
identifier[results] =[]
keyword[for] identifier[item] keyword[in] identifier[connection] . identifier[response] :
keyword[if] literal[string] keyword[not] keyword[in] identifier[item] keyword[or] identifier[item] . identifier[get] ( literal[string] )!= literal[string] :
keyword[continue]
identifier[group_data] = identifier[item] [ literal[string] ]
identifier[group_data] [ literal[string] ]= identifier[item] [ literal[string] ]
identifier[results] . identifier[append] ( identifier[group_data] )
keyword[if] keyword[not] identifier[_connection] :
identifier[self] . identifier[destroy_connection] ( identifier[connection] )
keyword[return] identifier[results] | def get_user_groups(self, dn, group_search_dn=None, _connection=None):
"""
Gets a list of groups a user at dn is a member of
Args:
dn (str): The dn of the user to find memberships for.
_connection (ldap3.Connection): A connection object to use when
searching. If not given, a temporary connection will be
created, and destroyed after use.
group_search_dn (str): The search dn for groups. Defaults to
``'{LDAP_GROUP_DN},{LDAP_BASE_DN}'``.
Returns:
list: A list of LDAP groups the user is a member of.
"""
connection = _connection
if not connection:
connection = self._make_connection(bind_user=self.config.get('LDAP_BIND_USER_DN'), bind_password=self.config.get('LDAP_BIND_USER_PASSWORD'))
connection.bind() # depends on [control=['if'], data=[]]
safe_dn = ldap3.utils.conv.escape_filter_chars(dn)
search_filter = '(&{group_filter}({members_attr}={user_dn}))'.format(group_filter=self.config.get('LDAP_GROUP_OBJECT_FILTER'), members_attr=self.config.get('LDAP_GROUP_MEMBERS_ATTR'), user_dn=safe_dn)
log.debug("Searching for groups for specific user with filter '{0}' , base '{1}' and scope '{2}'".format(search_filter, group_search_dn or self.full_group_search_dn, self.config.get('LDAP_GROUP_SEARCH_SCOPE')))
connection.search(search_base=group_search_dn or self.full_group_search_dn, search_filter=search_filter, attributes=self.config.get('LDAP_GET_GROUP_ATTRIBUTES'), search_scope=getattr(ldap3, self.config.get('LDAP_GROUP_SEARCH_SCOPE')))
results = []
for item in connection.response:
if 'type' not in item or item.get('type') != 'searchResEntry':
# Issue #13 - Don't return non-entry results.
continue # depends on [control=['if'], data=[]]
group_data = item['attributes']
group_data['dn'] = item['dn']
results.append(group_data) # depends on [control=['for'], data=['item']]
if not _connection:
# We made a connection, so we need to kill it.
self.destroy_connection(connection) # depends on [control=['if'], data=[]]
return results |
def setGroups(self, *args, **kwargs):
"""Adds the groups to which this client belongs.
The 'groupKeys' field of the client holds a list of the
encodedKeys of the groups to which this client belongs.
Returns the number of requests done to Mambu.
"""
requests = 0
groups = []
try:
for gk in self['groupKeys']:
try:
g = self.mambugroupclass(entid=gk, *args, **kwargs)
except AttributeError as ae:
from .mambugroup import MambuGroup
self.mambugroupclass = MambuGroup
g = self.mambugroupclass(entid=gk, *args, **kwargs)
requests += 1
groups.append(g)
except KeyError:
pass
self['groups'] = groups
return requests | def function[setGroups, parameter[self]]:
constant[Adds the groups to which this client belongs.
The 'groupKeys' field of the client holds a list of the
encodedKeys of the groups to which this client belongs.
Returns the number of requests done to Mambu.
]
variable[requests] assign[=] constant[0]
variable[groups] assign[=] list[[]]
<ast.Try object at 0x7da204566f80>
call[name[self]][constant[groups]] assign[=] name[groups]
return[name[requests]] | keyword[def] identifier[setGroups] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[requests] = literal[int]
identifier[groups] =[]
keyword[try] :
keyword[for] identifier[gk] keyword[in] identifier[self] [ literal[string] ]:
keyword[try] :
identifier[g] = identifier[self] . identifier[mambugroupclass] ( identifier[entid] = identifier[gk] ,* identifier[args] ,** identifier[kwargs] )
keyword[except] identifier[AttributeError] keyword[as] identifier[ae] :
keyword[from] . identifier[mambugroup] keyword[import] identifier[MambuGroup]
identifier[self] . identifier[mambugroupclass] = identifier[MambuGroup]
identifier[g] = identifier[self] . identifier[mambugroupclass] ( identifier[entid] = identifier[gk] ,* identifier[args] ,** identifier[kwargs] )
identifier[requests] += literal[int]
identifier[groups] . identifier[append] ( identifier[g] )
keyword[except] identifier[KeyError] :
keyword[pass]
identifier[self] [ literal[string] ]= identifier[groups]
keyword[return] identifier[requests] | def setGroups(self, *args, **kwargs):
"""Adds the groups to which this client belongs.
The 'groupKeys' field of the client holds a list of the
encodedKeys of the groups to which this client belongs.
Returns the number of requests done to Mambu.
"""
requests = 0
groups = []
try:
for gk in self['groupKeys']:
try:
g = self.mambugroupclass(*args, entid=gk, **kwargs) # depends on [control=['try'], data=[]]
except AttributeError as ae:
from .mambugroup import MambuGroup
self.mambugroupclass = MambuGroup
g = self.mambugroupclass(*args, entid=gk, **kwargs) # depends on [control=['except'], data=[]]
requests += 1
groups.append(g) # depends on [control=['for'], data=['gk']] # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]]
self['groups'] = groups
return requests |
def cast(self, val: str):
"""converts string to type requested by `cast_as`"""
try:
return getattr(self, 'cast_as_{}'.format(
self.cast_as.__name__.lower()))(val)
except AttributeError:
return self.cast_as(val) | def function[cast, parameter[self, val]]:
constant[converts string to type requested by `cast_as`]
<ast.Try object at 0x7da1b0553820> | keyword[def] identifier[cast] ( identifier[self] , identifier[val] : identifier[str] ):
literal[string]
keyword[try] :
keyword[return] identifier[getattr] ( identifier[self] , literal[string] . identifier[format] (
identifier[self] . identifier[cast_as] . identifier[__name__] . identifier[lower] ()))( identifier[val] )
keyword[except] identifier[AttributeError] :
keyword[return] identifier[self] . identifier[cast_as] ( identifier[val] ) | def cast(self, val: str):
"""converts string to type requested by `cast_as`"""
try:
return getattr(self, 'cast_as_{}'.format(self.cast_as.__name__.lower()))(val) # depends on [control=['try'], data=[]]
except AttributeError:
return self.cast_as(val) # depends on [control=['except'], data=[]] |
def get_value_by_row_col(self, row, col):
"""Get raster value by (row, col).
Args:
row: row number.
col: col number.
Returns:
raster value, None if the input are invalid.
"""
if row < 0 or row >= self.nRows or col < 0 or col >= self.nCols:
raise ValueError("The row or col must be >=0 and less than "
"nRows (%d) or nCols (%d)!" % (self.nRows, self.nCols))
else:
value = self.data[int(round(row))][int(round(col))]
if value == self.noDataValue:
return None
else:
return value | def function[get_value_by_row_col, parameter[self, row, col]]:
constant[Get raster value by (row, col).
Args:
row: row number.
col: col number.
Returns:
raster value, None if the input are invalid.
]
if <ast.BoolOp object at 0x7da1b244a0e0> begin[:]
<ast.Raise object at 0x7da1b23722f0> | keyword[def] identifier[get_value_by_row_col] ( identifier[self] , identifier[row] , identifier[col] ):
literal[string]
keyword[if] identifier[row] < literal[int] keyword[or] identifier[row] >= identifier[self] . identifier[nRows] keyword[or] identifier[col] < literal[int] keyword[or] identifier[col] >= identifier[self] . identifier[nCols] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] %( identifier[self] . identifier[nRows] , identifier[self] . identifier[nCols] ))
keyword[else] :
identifier[value] = identifier[self] . identifier[data] [ identifier[int] ( identifier[round] ( identifier[row] ))][ identifier[int] ( identifier[round] ( identifier[col] ))]
keyword[if] identifier[value] == identifier[self] . identifier[noDataValue] :
keyword[return] keyword[None]
keyword[else] :
keyword[return] identifier[value] | def get_value_by_row_col(self, row, col):
"""Get raster value by (row, col).
Args:
row: row number.
col: col number.
Returns:
raster value, None if the input are invalid.
"""
if row < 0 or row >= self.nRows or col < 0 or (col >= self.nCols):
raise ValueError('The row or col must be >=0 and less than nRows (%d) or nCols (%d)!' % (self.nRows, self.nCols)) # depends on [control=['if'], data=[]]
else:
value = self.data[int(round(row))][int(round(col))]
if value == self.noDataValue:
return None # depends on [control=['if'], data=[]]
else:
return value |
def newline(self, node=None, extra=0):
"""Add one or more newlines before the next write."""
self._new_lines = max(self._new_lines, 1 + extra)
if node is not None and node.lineno != self._last_line:
self._write_debug_info = node.lineno
self._last_line = node.lineno | def function[newline, parameter[self, node, extra]]:
constant[Add one or more newlines before the next write.]
name[self]._new_lines assign[=] call[name[max], parameter[name[self]._new_lines, binary_operation[constant[1] + name[extra]]]]
if <ast.BoolOp object at 0x7da1b1ea19f0> begin[:]
name[self]._write_debug_info assign[=] name[node].lineno
name[self]._last_line assign[=] name[node].lineno | keyword[def] identifier[newline] ( identifier[self] , identifier[node] = keyword[None] , identifier[extra] = literal[int] ):
literal[string]
identifier[self] . identifier[_new_lines] = identifier[max] ( identifier[self] . identifier[_new_lines] , literal[int] + identifier[extra] )
keyword[if] identifier[node] keyword[is] keyword[not] keyword[None] keyword[and] identifier[node] . identifier[lineno] != identifier[self] . identifier[_last_line] :
identifier[self] . identifier[_write_debug_info] = identifier[node] . identifier[lineno]
identifier[self] . identifier[_last_line] = identifier[node] . identifier[lineno] | def newline(self, node=None, extra=0):
"""Add one or more newlines before the next write."""
self._new_lines = max(self._new_lines, 1 + extra)
if node is not None and node.lineno != self._last_line:
self._write_debug_info = node.lineno
self._last_line = node.lineno # depends on [control=['if'], data=[]] |
def removecolkeyword(self, columnname, keyword):
"""Remove a column keyword.
It is similar to :func:`removekeyword`.
"""
if isinstance(keyword, str):
self._removekeyword(columnname, keyword, -1)
else:
self._removekeyword(columnname, '', keyword) | def function[removecolkeyword, parameter[self, columnname, keyword]]:
constant[Remove a column keyword.
It is similar to :func:`removekeyword`.
]
if call[name[isinstance], parameter[name[keyword], name[str]]] begin[:]
call[name[self]._removekeyword, parameter[name[columnname], name[keyword], <ast.UnaryOp object at 0x7da1b0dc33d0>]] | keyword[def] identifier[removecolkeyword] ( identifier[self] , identifier[columnname] , identifier[keyword] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[keyword] , identifier[str] ):
identifier[self] . identifier[_removekeyword] ( identifier[columnname] , identifier[keyword] ,- literal[int] )
keyword[else] :
identifier[self] . identifier[_removekeyword] ( identifier[columnname] , literal[string] , identifier[keyword] ) | def removecolkeyword(self, columnname, keyword):
"""Remove a column keyword.
It is similar to :func:`removekeyword`.
"""
if isinstance(keyword, str):
self._removekeyword(columnname, keyword, -1) # depends on [control=['if'], data=[]]
else:
self._removekeyword(columnname, '', keyword) |
def verify_relayable_signature(public_key, doc, signature):
"""
Verify the signed XML elements to have confidence that the claimed
author did actually generate this message.
"""
sig_hash = _create_signature_hash(doc)
cipher = PKCS1_v1_5.new(RSA.importKey(public_key))
return cipher.verify(sig_hash, b64decode(signature)) | def function[verify_relayable_signature, parameter[public_key, doc, signature]]:
constant[
Verify the signed XML elements to have confidence that the claimed
author did actually generate this message.
]
variable[sig_hash] assign[=] call[name[_create_signature_hash], parameter[name[doc]]]
variable[cipher] assign[=] call[name[PKCS1_v1_5].new, parameter[call[name[RSA].importKey, parameter[name[public_key]]]]]
return[call[name[cipher].verify, parameter[name[sig_hash], call[name[b64decode], parameter[name[signature]]]]]] | keyword[def] identifier[verify_relayable_signature] ( identifier[public_key] , identifier[doc] , identifier[signature] ):
literal[string]
identifier[sig_hash] = identifier[_create_signature_hash] ( identifier[doc] )
identifier[cipher] = identifier[PKCS1_v1_5] . identifier[new] ( identifier[RSA] . identifier[importKey] ( identifier[public_key] ))
keyword[return] identifier[cipher] . identifier[verify] ( identifier[sig_hash] , identifier[b64decode] ( identifier[signature] )) | def verify_relayable_signature(public_key, doc, signature):
"""
Verify the signed XML elements to have confidence that the claimed
author did actually generate this message.
"""
sig_hash = _create_signature_hash(doc)
cipher = PKCS1_v1_5.new(RSA.importKey(public_key))
return cipher.verify(sig_hash, b64decode(signature)) |
def worker_logstart(self, node, nodeid, location):
"""Emitted when a node calls the pytest_runtest_logstart hook."""
self.config.hook.pytest_runtest_logstart(nodeid=nodeid, location=location) | def function[worker_logstart, parameter[self, node, nodeid, location]]:
constant[Emitted when a node calls the pytest_runtest_logstart hook.]
call[name[self].config.hook.pytest_runtest_logstart, parameter[]] | keyword[def] identifier[worker_logstart] ( identifier[self] , identifier[node] , identifier[nodeid] , identifier[location] ):
literal[string]
identifier[self] . identifier[config] . identifier[hook] . identifier[pytest_runtest_logstart] ( identifier[nodeid] = identifier[nodeid] , identifier[location] = identifier[location] ) | def worker_logstart(self, node, nodeid, location):
"""Emitted when a node calls the pytest_runtest_logstart hook."""
self.config.hook.pytest_runtest_logstart(nodeid=nodeid, location=location) |
def unicodeInScripts(uv, scripts):
""" Check UnicodeData's ScriptExtension property for unicode codepoint
'uv' and return True if it intersects with the set of 'scripts' provided,
False if it does not intersect.
Return None for 'Common' script ('Zyyy').
"""
sx = unicodedata.script_extension(unichr(uv))
if "Zyyy" in sx:
return None
return not sx.isdisjoint(scripts) | def function[unicodeInScripts, parameter[uv, scripts]]:
constant[ Check UnicodeData's ScriptExtension property for unicode codepoint
'uv' and return True if it intersects with the set of 'scripts' provided,
False if it does not intersect.
Return None for 'Common' script ('Zyyy').
]
variable[sx] assign[=] call[name[unicodedata].script_extension, parameter[call[name[unichr], parameter[name[uv]]]]]
if compare[constant[Zyyy] in name[sx]] begin[:]
return[constant[None]]
return[<ast.UnaryOp object at 0x7da20c993940>] | keyword[def] identifier[unicodeInScripts] ( identifier[uv] , identifier[scripts] ):
literal[string]
identifier[sx] = identifier[unicodedata] . identifier[script_extension] ( identifier[unichr] ( identifier[uv] ))
keyword[if] literal[string] keyword[in] identifier[sx] :
keyword[return] keyword[None]
keyword[return] keyword[not] identifier[sx] . identifier[isdisjoint] ( identifier[scripts] ) | def unicodeInScripts(uv, scripts):
""" Check UnicodeData's ScriptExtension property for unicode codepoint
'uv' and return True if it intersects with the set of 'scripts' provided,
False if it does not intersect.
Return None for 'Common' script ('Zyyy').
"""
sx = unicodedata.script_extension(unichr(uv))
if 'Zyyy' in sx:
return None # depends on [control=['if'], data=[]]
return not sx.isdisjoint(scripts) |
def _mq_callback(self, message):
""" method processes messages from Synergy Worker and updates corresponding Timetable record,
as well as the job itself
:param message: <MqTransmission> mq message """
try:
self.logger.info('UowStatusListener {')
mq_request = MqTransmission.from_json(message.body)
uow = self.uow_dao.get_one(mq_request.record_db_id)
if uow.unit_of_work_type != unit_of_work.TYPE_MANAGED:
self.logger.info('Received transmission from non-managed UOW execution: {0}. Ignoring it.'
.format(uow.unit_of_work_type))
return
tree = self.timetable.get_tree(uow.process_name)
node = tree.get_node(uow.process_name, uow.timeperiod)
if uow.db_id != node.job_record.related_unit_of_work:
self.logger.info('Received transmission is likely outdated. Ignoring it.')
return
if not uow.is_finished:
# rely on Garbage Collector to re-trigger the failing unit_of_work
self.logger.info('Received transmission from {0}@{1} in non-final state {2}. Ignoring it.'
.format(uow.process_name, uow.timeperiod, uow.state))
return
state_machine = self.scheduler.state_machine_for(node.process_name)
self.logger.info('Commencing StateMachine.notify with UOW from {0}@{1} in {2}.'
.format(uow.process_name, uow.timeperiod, uow.state))
state_machine.notify(uow)
except KeyError:
self.logger.error('Access error for {0}'.format(message.body), exc_info=True)
except Exception:
self.logger.error('Error during StateMachine.notify call {0}'.format(message.body), exc_info=True)
finally:
self.consumer.acknowledge(message.delivery_tag)
self.logger.info('UowStatusListener }') | def function[_mq_callback, parameter[self, message]]:
constant[ method processes messages from Synergy Worker and updates corresponding Timetable record,
as well as the job itself
:param message: <MqTransmission> mq message ]
<ast.Try object at 0x7da207f9b730> | keyword[def] identifier[_mq_callback] ( identifier[self] , identifier[message] ):
literal[string]
keyword[try] :
identifier[self] . identifier[logger] . identifier[info] ( literal[string] )
identifier[mq_request] = identifier[MqTransmission] . identifier[from_json] ( identifier[message] . identifier[body] )
identifier[uow] = identifier[self] . identifier[uow_dao] . identifier[get_one] ( identifier[mq_request] . identifier[record_db_id] )
keyword[if] identifier[uow] . identifier[unit_of_work_type] != identifier[unit_of_work] . identifier[TYPE_MANAGED] :
identifier[self] . identifier[logger] . identifier[info] ( literal[string]
. identifier[format] ( identifier[uow] . identifier[unit_of_work_type] ))
keyword[return]
identifier[tree] = identifier[self] . identifier[timetable] . identifier[get_tree] ( identifier[uow] . identifier[process_name] )
identifier[node] = identifier[tree] . identifier[get_node] ( identifier[uow] . identifier[process_name] , identifier[uow] . identifier[timeperiod] )
keyword[if] identifier[uow] . identifier[db_id] != identifier[node] . identifier[job_record] . identifier[related_unit_of_work] :
identifier[self] . identifier[logger] . identifier[info] ( literal[string] )
keyword[return]
keyword[if] keyword[not] identifier[uow] . identifier[is_finished] :
identifier[self] . identifier[logger] . identifier[info] ( literal[string]
. identifier[format] ( identifier[uow] . identifier[process_name] , identifier[uow] . identifier[timeperiod] , identifier[uow] . identifier[state] ))
keyword[return]
identifier[state_machine] = identifier[self] . identifier[scheduler] . identifier[state_machine_for] ( identifier[node] . identifier[process_name] )
identifier[self] . identifier[logger] . identifier[info] ( literal[string]
. identifier[format] ( identifier[uow] . identifier[process_name] , identifier[uow] . identifier[timeperiod] , identifier[uow] . identifier[state] ))
identifier[state_machine] . identifier[notify] ( identifier[uow] )
keyword[except] identifier[KeyError] :
identifier[self] . identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[message] . identifier[body] ), identifier[exc_info] = keyword[True] )
keyword[except] identifier[Exception] :
identifier[self] . identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[message] . identifier[body] ), identifier[exc_info] = keyword[True] )
keyword[finally] :
identifier[self] . identifier[consumer] . identifier[acknowledge] ( identifier[message] . identifier[delivery_tag] )
identifier[self] . identifier[logger] . identifier[info] ( literal[string] ) | def _mq_callback(self, message):
""" method processes messages from Synergy Worker and updates corresponding Timetable record,
as well as the job itself
:param message: <MqTransmission> mq message """
try:
self.logger.info('UowStatusListener {')
mq_request = MqTransmission.from_json(message.body)
uow = self.uow_dao.get_one(mq_request.record_db_id)
if uow.unit_of_work_type != unit_of_work.TYPE_MANAGED:
self.logger.info('Received transmission from non-managed UOW execution: {0}. Ignoring it.'.format(uow.unit_of_work_type))
return # depends on [control=['if'], data=[]]
tree = self.timetable.get_tree(uow.process_name)
node = tree.get_node(uow.process_name, uow.timeperiod)
if uow.db_id != node.job_record.related_unit_of_work:
self.logger.info('Received transmission is likely outdated. Ignoring it.')
return # depends on [control=['if'], data=[]]
if not uow.is_finished:
# rely on Garbage Collector to re-trigger the failing unit_of_work
self.logger.info('Received transmission from {0}@{1} in non-final state {2}. Ignoring it.'.format(uow.process_name, uow.timeperiod, uow.state))
return # depends on [control=['if'], data=[]]
state_machine = self.scheduler.state_machine_for(node.process_name)
self.logger.info('Commencing StateMachine.notify with UOW from {0}@{1} in {2}.'.format(uow.process_name, uow.timeperiod, uow.state))
state_machine.notify(uow) # depends on [control=['try'], data=[]]
except KeyError:
self.logger.error('Access error for {0}'.format(message.body), exc_info=True) # depends on [control=['except'], data=[]]
except Exception:
self.logger.error('Error during StateMachine.notify call {0}'.format(message.body), exc_info=True) # depends on [control=['except'], data=[]]
finally:
self.consumer.acknowledge(message.delivery_tag)
self.logger.info('UowStatusListener }') |
def keep_session_alive(self):
"""If the session expired, logs back in."""
try:
self.resources()
except xmlrpclib.Fault as fault:
if fault.faultCode == 5:
self.login()
else:
raise | def function[keep_session_alive, parameter[self]]:
constant[If the session expired, logs back in.]
<ast.Try object at 0x7da18f720580> | keyword[def] identifier[keep_session_alive] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[self] . identifier[resources] ()
keyword[except] identifier[xmlrpclib] . identifier[Fault] keyword[as] identifier[fault] :
keyword[if] identifier[fault] . identifier[faultCode] == literal[int] :
identifier[self] . identifier[login] ()
keyword[else] :
keyword[raise] | def keep_session_alive(self):
"""If the session expired, logs back in."""
try:
self.resources() # depends on [control=['try'], data=[]]
except xmlrpclib.Fault as fault:
if fault.faultCode == 5:
self.login() # depends on [control=['if'], data=[]]
else:
raise # depends on [control=['except'], data=['fault']] |
def parse(self):
"""Parse a Supybot IRC stream.
Returns an iterator of dicts. Each dicts contains information
about the date, type, nick and body of a single log entry.
:returns: iterator of parsed lines
:raises ParseError: when an invalid line is found parsing the given
stream
"""
for line in self.stream:
line = line.rstrip('\n')
self.nline += 1
if self.SUPYBOT_EMPTY_REGEX.match(line):
continue
ts, msg = self._parse_supybot_timestamp(line)
if self.SUPYBOT_EMPTY_COMMENT_REGEX.match(msg):
continue
elif self.SUPYBOT_EMPTY_COMMENT_ACTION_REGEX.match(msg):
continue
elif self.SUPYBOT_EMPTY_BOT_REGEX.match(msg):
continue
itype, nick, body = self._parse_supybot_msg(msg)
item = self._build_item(ts, itype, nick, body)
yield item | def function[parse, parameter[self]]:
constant[Parse a Supybot IRC stream.
Returns an iterator of dicts. Each dicts contains information
about the date, type, nick and body of a single log entry.
:returns: iterator of parsed lines
:raises ParseError: when an invalid line is found parsing the given
stream
]
for taget[name[line]] in starred[name[self].stream] begin[:]
variable[line] assign[=] call[name[line].rstrip, parameter[constant[
]]]
<ast.AugAssign object at 0x7da1b0382d40>
if call[name[self].SUPYBOT_EMPTY_REGEX.match, parameter[name[line]]] begin[:]
continue
<ast.Tuple object at 0x7da1b0382590> assign[=] call[name[self]._parse_supybot_timestamp, parameter[name[line]]]
if call[name[self].SUPYBOT_EMPTY_COMMENT_REGEX.match, parameter[name[msg]]] begin[:]
continue
<ast.Tuple object at 0x7da1b0381600> assign[=] call[name[self]._parse_supybot_msg, parameter[name[msg]]]
variable[item] assign[=] call[name[self]._build_item, parameter[name[ts], name[itype], name[nick], name[body]]]
<ast.Yield object at 0x7da1b0383b20> | keyword[def] identifier[parse] ( identifier[self] ):
literal[string]
keyword[for] identifier[line] keyword[in] identifier[self] . identifier[stream] :
identifier[line] = identifier[line] . identifier[rstrip] ( literal[string] )
identifier[self] . identifier[nline] += literal[int]
keyword[if] identifier[self] . identifier[SUPYBOT_EMPTY_REGEX] . identifier[match] ( identifier[line] ):
keyword[continue]
identifier[ts] , identifier[msg] = identifier[self] . identifier[_parse_supybot_timestamp] ( identifier[line] )
keyword[if] identifier[self] . identifier[SUPYBOT_EMPTY_COMMENT_REGEX] . identifier[match] ( identifier[msg] ):
keyword[continue]
keyword[elif] identifier[self] . identifier[SUPYBOT_EMPTY_COMMENT_ACTION_REGEX] . identifier[match] ( identifier[msg] ):
keyword[continue]
keyword[elif] identifier[self] . identifier[SUPYBOT_EMPTY_BOT_REGEX] . identifier[match] ( identifier[msg] ):
keyword[continue]
identifier[itype] , identifier[nick] , identifier[body] = identifier[self] . identifier[_parse_supybot_msg] ( identifier[msg] )
identifier[item] = identifier[self] . identifier[_build_item] ( identifier[ts] , identifier[itype] , identifier[nick] , identifier[body] )
keyword[yield] identifier[item] | def parse(self):
"""Parse a Supybot IRC stream.
Returns an iterator of dicts. Each dicts contains information
about the date, type, nick and body of a single log entry.
:returns: iterator of parsed lines
:raises ParseError: when an invalid line is found parsing the given
stream
"""
for line in self.stream:
line = line.rstrip('\n')
self.nline += 1
if self.SUPYBOT_EMPTY_REGEX.match(line):
continue # depends on [control=['if'], data=[]]
(ts, msg) = self._parse_supybot_timestamp(line)
if self.SUPYBOT_EMPTY_COMMENT_REGEX.match(msg):
continue # depends on [control=['if'], data=[]]
elif self.SUPYBOT_EMPTY_COMMENT_ACTION_REGEX.match(msg):
continue # depends on [control=['if'], data=[]]
elif self.SUPYBOT_EMPTY_BOT_REGEX.match(msg):
continue # depends on [control=['if'], data=[]]
(itype, nick, body) = self._parse_supybot_msg(msg)
item = self._build_item(ts, itype, nick, body)
yield item # depends on [control=['for'], data=['line']] |
def _get_svc_path(name='*', status=None):
'''
Return a list of paths to services with ``name`` that have the specified ``status``
name
a glob for service name. default is '*'
status
None : all services (no filter, default choice)
'DISABLED' : available service(s) that is not enabled
'ENABLED' : enabled service (whether started on boot or not)
'''
# This is the core routine to work with services, called by many
# other functions of this module.
#
# The name of a service is the "apparent" folder's name that contains its
# "run" script. If its "folder" is a symlink, the service is an "alias" of
# the targeted service.
if not SERVICE_DIR:
raise CommandExecutionError('Could not find service directory.')
# path list of enabled services as /AVAIL_SVR_DIRS/$service,
# taking care of any service aliases (do not use os.path.realpath()).
ena = set()
for el in glob.glob(os.path.join(SERVICE_DIR, name)):
if _is_svc(el):
ena.add(os.readlink(el))
log.trace('found enabled service path: %s', el)
if status == 'ENABLED':
return sorted(ena)
# path list of available services as /AVAIL_SVR_DIRS/$service
ava = set()
for d in AVAIL_SVR_DIRS:
for el in glob.glob(os.path.join(d, name)):
if _is_svc(el):
ava.add(el)
log.trace('found available service path: %s', el)
if status == 'DISABLED':
# service available but not enabled
ret = ava.difference(ena)
else:
# default: return available services
ret = ava.union(ena)
return sorted(ret) | def function[_get_svc_path, parameter[name, status]]:
constant[
Return a list of paths to services with ``name`` that have the specified ``status``
name
a glob for service name. default is '*'
status
None : all services (no filter, default choice)
'DISABLED' : available service(s) that is not enabled
'ENABLED' : enabled service (whether started on boot or not)
]
if <ast.UnaryOp object at 0x7da1b2037c40> begin[:]
<ast.Raise object at 0x7da1b2037ac0>
variable[ena] assign[=] call[name[set], parameter[]]
for taget[name[el]] in starred[call[name[glob].glob, parameter[call[name[os].path.join, parameter[name[SERVICE_DIR], name[name]]]]]] begin[:]
if call[name[_is_svc], parameter[name[el]]] begin[:]
call[name[ena].add, parameter[call[name[os].readlink, parameter[name[el]]]]]
call[name[log].trace, parameter[constant[found enabled service path: %s], name[el]]]
if compare[name[status] equal[==] constant[ENABLED]] begin[:]
return[call[name[sorted], parameter[name[ena]]]]
variable[ava] assign[=] call[name[set], parameter[]]
for taget[name[d]] in starred[name[AVAIL_SVR_DIRS]] begin[:]
for taget[name[el]] in starred[call[name[glob].glob, parameter[call[name[os].path.join, parameter[name[d], name[name]]]]]] begin[:]
if call[name[_is_svc], parameter[name[el]]] begin[:]
call[name[ava].add, parameter[name[el]]]
call[name[log].trace, parameter[constant[found available service path: %s], name[el]]]
if compare[name[status] equal[==] constant[DISABLED]] begin[:]
variable[ret] assign[=] call[name[ava].difference, parameter[name[ena]]]
return[call[name[sorted], parameter[name[ret]]]] | keyword[def] identifier[_get_svc_path] ( identifier[name] = literal[string] , identifier[status] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[SERVICE_DIR] :
keyword[raise] identifier[CommandExecutionError] ( literal[string] )
identifier[ena] = identifier[set] ()
keyword[for] identifier[el] keyword[in] identifier[glob] . identifier[glob] ( identifier[os] . identifier[path] . identifier[join] ( identifier[SERVICE_DIR] , identifier[name] )):
keyword[if] identifier[_is_svc] ( identifier[el] ):
identifier[ena] . identifier[add] ( identifier[os] . identifier[readlink] ( identifier[el] ))
identifier[log] . identifier[trace] ( literal[string] , identifier[el] )
keyword[if] identifier[status] == literal[string] :
keyword[return] identifier[sorted] ( identifier[ena] )
identifier[ava] = identifier[set] ()
keyword[for] identifier[d] keyword[in] identifier[AVAIL_SVR_DIRS] :
keyword[for] identifier[el] keyword[in] identifier[glob] . identifier[glob] ( identifier[os] . identifier[path] . identifier[join] ( identifier[d] , identifier[name] )):
keyword[if] identifier[_is_svc] ( identifier[el] ):
identifier[ava] . identifier[add] ( identifier[el] )
identifier[log] . identifier[trace] ( literal[string] , identifier[el] )
keyword[if] identifier[status] == literal[string] :
identifier[ret] = identifier[ava] . identifier[difference] ( identifier[ena] )
keyword[else] :
identifier[ret] = identifier[ava] . identifier[union] ( identifier[ena] )
keyword[return] identifier[sorted] ( identifier[ret] ) | def _get_svc_path(name='*', status=None):
"""
Return a list of paths to services with ``name`` that have the specified ``status``
name
a glob for service name. default is '*'
status
None : all services (no filter, default choice)
'DISABLED' : available service(s) that is not enabled
'ENABLED' : enabled service (whether started on boot or not)
"""
# This is the core routine to work with services, called by many
# other functions of this module.
#
# The name of a service is the "apparent" folder's name that contains its
# "run" script. If its "folder" is a symlink, the service is an "alias" of
# the targeted service.
if not SERVICE_DIR:
raise CommandExecutionError('Could not find service directory.') # depends on [control=['if'], data=[]]
# path list of enabled services as /AVAIL_SVR_DIRS/$service,
# taking care of any service aliases (do not use os.path.realpath()).
ena = set()
for el in glob.glob(os.path.join(SERVICE_DIR, name)):
if _is_svc(el):
ena.add(os.readlink(el))
log.trace('found enabled service path: %s', el) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['el']]
if status == 'ENABLED':
return sorted(ena) # depends on [control=['if'], data=[]]
# path list of available services as /AVAIL_SVR_DIRS/$service
ava = set()
for d in AVAIL_SVR_DIRS:
for el in glob.glob(os.path.join(d, name)):
if _is_svc(el):
ava.add(el)
log.trace('found available service path: %s', el) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['el']] # depends on [control=['for'], data=['d']]
if status == 'DISABLED':
# service available but not enabled
ret = ava.difference(ena) # depends on [control=['if'], data=[]]
else:
# default: return available services
ret = ava.union(ena)
return sorted(ret) |
def codify(combination):
"""
Gets escape-codes for flag combinations.
Arguments:
combination (int): Either a single integer-convertible flag
or an OR'd flag-combination.
Returns:
A semi-colon-delimited string of appropriate escape sequences.
Raises:
errors.FlagError if the combination is out-of-range.
"""
if (isinstance(combination, int) and
(combination < 0 or combination >= LIMIT)):
raise errors.FlagError("Out-of-range flag-combination!")
codes = []
for enum in (Style, Color, Fill):
for flag in enum:
if combination & flag:
codes.append(str(flag))
return ";".join(codes) | def function[codify, parameter[combination]]:
constant[
Gets escape-codes for flag combinations.
Arguments:
combination (int): Either a single integer-convertible flag
or an OR'd flag-combination.
Returns:
A semi-colon-delimited string of appropriate escape sequences.
Raises:
errors.FlagError if the combination is out-of-range.
]
if <ast.BoolOp object at 0x7da18dc05420> begin[:]
<ast.Raise object at 0x7da18dc07100>
variable[codes] assign[=] list[[]]
for taget[name[enum]] in starred[tuple[[<ast.Name object at 0x7da18dc04c40>, <ast.Name object at 0x7da18dc06050>, <ast.Name object at 0x7da18dc04820>]]] begin[:]
for taget[name[flag]] in starred[name[enum]] begin[:]
if binary_operation[name[combination] <ast.BitAnd object at 0x7da2590d6b60> name[flag]] begin[:]
call[name[codes].append, parameter[call[name[str], parameter[name[flag]]]]]
return[call[constant[;].join, parameter[name[codes]]]] | keyword[def] identifier[codify] ( identifier[combination] ):
literal[string]
keyword[if] ( identifier[isinstance] ( identifier[combination] , identifier[int] ) keyword[and]
( identifier[combination] < literal[int] keyword[or] identifier[combination] >= identifier[LIMIT] )):
keyword[raise] identifier[errors] . identifier[FlagError] ( literal[string] )
identifier[codes] =[]
keyword[for] identifier[enum] keyword[in] ( identifier[Style] , identifier[Color] , identifier[Fill] ):
keyword[for] identifier[flag] keyword[in] identifier[enum] :
keyword[if] identifier[combination] & identifier[flag] :
identifier[codes] . identifier[append] ( identifier[str] ( identifier[flag] ))
keyword[return] literal[string] . identifier[join] ( identifier[codes] ) | def codify(combination):
"""
Gets escape-codes for flag combinations.
Arguments:
combination (int): Either a single integer-convertible flag
or an OR'd flag-combination.
Returns:
A semi-colon-delimited string of appropriate escape sequences.
Raises:
errors.FlagError if the combination is out-of-range.
"""
if isinstance(combination, int) and (combination < 0 or combination >= LIMIT):
raise errors.FlagError('Out-of-range flag-combination!') # depends on [control=['if'], data=[]]
codes = []
for enum in (Style, Color, Fill):
for flag in enum:
if combination & flag:
codes.append(str(flag)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['flag']] # depends on [control=['for'], data=['enum']]
return ';'.join(codes) |
def send(self, msg, error_check=False):
"""
Send a raw string with the CR-LF appended to it.
Required arguments:
* msg - Message to send.
Optional arguments:
* error_check=False - Check for errors.
If an error is found the relevant exception will be raised.
"""
with self.lock:
msg = msg.replace('\r', '\\r').replace('\n', '\\n') + self._crlf
try:
data = msg.encode(self.encoding)
except UnicodeEncodeError:
data = msg.encode(self.fallback_encoding)
if len(data) > 512:
raise self.MessageTooLong("LurklibError: MessageTooLong")
self._socket.send(data)
if error_check and self.readable():
self._recv()
self.stepback() | def function[send, parameter[self, msg, error_check]]:
constant[
Send a raw string with the CR-LF appended to it.
Required arguments:
* msg - Message to send.
Optional arguments:
* error_check=False - Check for errors.
If an error is found the relevant exception will be raised.
]
with name[self].lock begin[:]
variable[msg] assign[=] binary_operation[call[call[name[msg].replace, parameter[constant[
], constant[\r]]].replace, parameter[constant[
], constant[\n]]] + name[self]._crlf]
<ast.Try object at 0x7da20cabd7b0>
if compare[call[name[len], parameter[name[data]]] greater[>] constant[512]] begin[:]
<ast.Raise object at 0x7da2054a6ec0>
call[name[self]._socket.send, parameter[name[data]]]
if <ast.BoolOp object at 0x7da2054a6bf0> begin[:]
call[name[self]._recv, parameter[]]
call[name[self].stepback, parameter[]] | keyword[def] identifier[send] ( identifier[self] , identifier[msg] , identifier[error_check] = keyword[False] ):
literal[string]
keyword[with] identifier[self] . identifier[lock] :
identifier[msg] = identifier[msg] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )+ identifier[self] . identifier[_crlf]
keyword[try] :
identifier[data] = identifier[msg] . identifier[encode] ( identifier[self] . identifier[encoding] )
keyword[except] identifier[UnicodeEncodeError] :
identifier[data] = identifier[msg] . identifier[encode] ( identifier[self] . identifier[fallback_encoding] )
keyword[if] identifier[len] ( identifier[data] )> literal[int] :
keyword[raise] identifier[self] . identifier[MessageTooLong] ( literal[string] )
identifier[self] . identifier[_socket] . identifier[send] ( identifier[data] )
keyword[if] identifier[error_check] keyword[and] identifier[self] . identifier[readable] ():
identifier[self] . identifier[_recv] ()
identifier[self] . identifier[stepback] () | def send(self, msg, error_check=False):
"""
Send a raw string with the CR-LF appended to it.
Required arguments:
* msg - Message to send.
Optional arguments:
* error_check=False - Check for errors.
If an error is found the relevant exception will be raised.
"""
with self.lock:
msg = msg.replace('\r', '\\r').replace('\n', '\\n') + self._crlf
try:
data = msg.encode(self.encoding) # depends on [control=['try'], data=[]]
except UnicodeEncodeError:
data = msg.encode(self.fallback_encoding) # depends on [control=['except'], data=[]]
if len(data) > 512:
raise self.MessageTooLong('LurklibError: MessageTooLong') # depends on [control=['if'], data=[]]
self._socket.send(data)
if error_check and self.readable():
self._recv()
self.stepback() # depends on [control=['if'], data=[]] # depends on [control=['with'], data=[]] |
def read_price_data(files, name_func=None):
"""
Convenience function for reading in pricing data from csv files
Parameters
----------
files: list
List of strings refering to csv files to read data in from, first
column should be dates
name_func: func
A function to apply to the file strings to infer the instrument name,
used in the second level of the MultiIndex index. Default is the file
name excluding the pathname and file ending,
e.g. /path/to/file/name.csv -> name
Returns
-------
A pandas.DataFrame with a pandas.MultiIndex where the top level is
pandas.Timestamps and the second level is instrument names. Columns are
given by the csv file columns.
"""
if name_func is None:
def name_func(x):
return os.path.split(x)[1].split(".")[0]
dfs = []
for f in files:
name = name_func(f)
df = pd.read_csv(f, index_col=0, parse_dates=True)
df.sort_index(inplace=True)
df.index = pd.MultiIndex.from_product([df.index, [name]],
names=["date", "contract"])
dfs.append(df)
return pd.concat(dfs, axis=0, sort=False).sort_index() | def function[read_price_data, parameter[files, name_func]]:
constant[
Convenience function for reading in pricing data from csv files
Parameters
----------
files: list
List of strings refering to csv files to read data in from, first
column should be dates
name_func: func
A function to apply to the file strings to infer the instrument name,
used in the second level of the MultiIndex index. Default is the file
name excluding the pathname and file ending,
e.g. /path/to/file/name.csv -> name
Returns
-------
A pandas.DataFrame with a pandas.MultiIndex where the top level is
pandas.Timestamps and the second level is instrument names. Columns are
given by the csv file columns.
]
if compare[name[name_func] is constant[None]] begin[:]
def function[name_func, parameter[x]]:
return[call[call[call[call[name[os].path.split, parameter[name[x]]]][constant[1]].split, parameter[constant[.]]]][constant[0]]]
variable[dfs] assign[=] list[[]]
for taget[name[f]] in starred[name[files]] begin[:]
variable[name] assign[=] call[name[name_func], parameter[name[f]]]
variable[df] assign[=] call[name[pd].read_csv, parameter[name[f]]]
call[name[df].sort_index, parameter[]]
name[df].index assign[=] call[name[pd].MultiIndex.from_product, parameter[list[[<ast.Attribute object at 0x7da1b1c7fa90>, <ast.List object at 0x7da1b1c7e230>]]]]
call[name[dfs].append, parameter[name[df]]]
return[call[call[name[pd].concat, parameter[name[dfs]]].sort_index, parameter[]]] | keyword[def] identifier[read_price_data] ( identifier[files] , identifier[name_func] = keyword[None] ):
literal[string]
keyword[if] identifier[name_func] keyword[is] keyword[None] :
keyword[def] identifier[name_func] ( identifier[x] ):
keyword[return] identifier[os] . identifier[path] . identifier[split] ( identifier[x] )[ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]
identifier[dfs] =[]
keyword[for] identifier[f] keyword[in] identifier[files] :
identifier[name] = identifier[name_func] ( identifier[f] )
identifier[df] = identifier[pd] . identifier[read_csv] ( identifier[f] , identifier[index_col] = literal[int] , identifier[parse_dates] = keyword[True] )
identifier[df] . identifier[sort_index] ( identifier[inplace] = keyword[True] )
identifier[df] . identifier[index] = identifier[pd] . identifier[MultiIndex] . identifier[from_product] ([ identifier[df] . identifier[index] ,[ identifier[name] ]],
identifier[names] =[ literal[string] , literal[string] ])
identifier[dfs] . identifier[append] ( identifier[df] )
keyword[return] identifier[pd] . identifier[concat] ( identifier[dfs] , identifier[axis] = literal[int] , identifier[sort] = keyword[False] ). identifier[sort_index] () | def read_price_data(files, name_func=None):
"""
Convenience function for reading in pricing data from csv files
Parameters
----------
files: list
List of strings refering to csv files to read data in from, first
column should be dates
name_func: func
A function to apply to the file strings to infer the instrument name,
used in the second level of the MultiIndex index. Default is the file
name excluding the pathname and file ending,
e.g. /path/to/file/name.csv -> name
Returns
-------
A pandas.DataFrame with a pandas.MultiIndex where the top level is
pandas.Timestamps and the second level is instrument names. Columns are
given by the csv file columns.
"""
if name_func is None:
def name_func(x):
return os.path.split(x)[1].split('.')[0] # depends on [control=['if'], data=[]]
dfs = []
for f in files:
name = name_func(f)
df = pd.read_csv(f, index_col=0, parse_dates=True)
df.sort_index(inplace=True)
df.index = pd.MultiIndex.from_product([df.index, [name]], names=['date', 'contract'])
dfs.append(df) # depends on [control=['for'], data=['f']]
return pd.concat(dfs, axis=0, sort=False).sort_index() |
async def insert(**data):
"""
RPC method for inserting data to the table
:return: None
"""
table = data.get('table')
try:
clickhouse_queries.insert_into_table(table, data)
return 'Data was successfully inserted into table'
except ServerException as e:
exception_code = int(str(e)[5:8].strip())
if exception_code == 60:
return 'Table does not exists'
elif exception_code == 50:
return 'Invalid params' | <ast.AsyncFunctionDef object at 0x7da1b0aa6830> | keyword[async] keyword[def] identifier[insert] (** identifier[data] ):
literal[string]
identifier[table] = identifier[data] . identifier[get] ( literal[string] )
keyword[try] :
identifier[clickhouse_queries] . identifier[insert_into_table] ( identifier[table] , identifier[data] )
keyword[return] literal[string]
keyword[except] identifier[ServerException] keyword[as] identifier[e] :
identifier[exception_code] = identifier[int] ( identifier[str] ( identifier[e] )[ literal[int] : literal[int] ]. identifier[strip] ())
keyword[if] identifier[exception_code] == literal[int] :
keyword[return] literal[string]
keyword[elif] identifier[exception_code] == literal[int] :
keyword[return] literal[string] | async def insert(**data):
"""
RPC method for inserting data to the table
:return: None
"""
table = data.get('table')
try:
clickhouse_queries.insert_into_table(table, data)
return 'Data was successfully inserted into table' # depends on [control=['try'], data=[]]
except ServerException as e:
exception_code = int(str(e)[5:8].strip())
if exception_code == 60:
return 'Table does not exists' # depends on [control=['if'], data=[]]
elif exception_code == 50:
return 'Invalid params' # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']] |
def update(self):
"""Update disk I/O stats using the input method."""
# Init new stats
stats = self.get_init_value()
if self.input_method == 'local':
# Update stats using the standard system lib
# Grab the stat using the psutil disk_io_counters method
# read_count: number of reads
# write_count: number of writes
# read_bytes: number of bytes read
# write_bytes: number of bytes written
# read_time: time spent reading from disk (in milliseconds)
# write_time: time spent writing to disk (in milliseconds)
try:
diskiocounters = psutil.disk_io_counters(perdisk=True)
except Exception:
return stats
# Previous disk IO stats are stored in the diskio_old variable
if not hasattr(self, 'diskio_old'):
# First call, we init the diskio_old var
try:
self.diskio_old = diskiocounters
except (IOError, UnboundLocalError):
pass
else:
# By storing time data we enable Rx/s and Tx/s calculations in the
# XML/RPC API, which would otherwise be overly difficult work
# for users of the API
time_since_update = getTimeSinceLastUpdate('disk')
diskio_new = diskiocounters
for disk in diskio_new:
# By default, RamFS is not displayed (issue #714)
if self.args is not None and not self.args.diskio_show_ramfs and disk.startswith('ram'):
continue
# Do not take hide disk into account
if self.is_hide(disk):
continue
# Compute count and bit rate
try:
read_count = (diskio_new[disk].read_count -
self.diskio_old[disk].read_count)
write_count = (diskio_new[disk].write_count -
self.diskio_old[disk].write_count)
read_bytes = (diskio_new[disk].read_bytes -
self.diskio_old[disk].read_bytes)
write_bytes = (diskio_new[disk].write_bytes -
self.diskio_old[disk].write_bytes)
diskstat = {
'time_since_update': time_since_update,
'disk_name': n(disk),
'read_count': read_count,
'write_count': write_count,
'read_bytes': read_bytes,
'write_bytes': write_bytes}
# Add alias if exist (define in the configuration file)
if self.has_alias(disk) is not None:
diskstat['alias'] = self.has_alias(disk)
except KeyError:
continue
else:
diskstat['key'] = self.get_key()
stats.append(diskstat)
# Save stats to compute next bitrate
self.diskio_old = diskio_new
elif self.input_method == 'snmp':
# Update stats using SNMP
# No standard way for the moment...
pass
# Update the stats
self.stats = stats
return self.stats | def function[update, parameter[self]]:
constant[Update disk I/O stats using the input method.]
variable[stats] assign[=] call[name[self].get_init_value, parameter[]]
if compare[name[self].input_method equal[==] constant[local]] begin[:]
<ast.Try object at 0x7da18f09eb60>
if <ast.UnaryOp object at 0x7da18f09e7a0> begin[:]
<ast.Try object at 0x7da18f09f670>
name[self].stats assign[=] name[stats]
return[name[self].stats] | keyword[def] identifier[update] ( identifier[self] ):
literal[string]
identifier[stats] = identifier[self] . identifier[get_init_value] ()
keyword[if] identifier[self] . identifier[input_method] == literal[string] :
keyword[try] :
identifier[diskiocounters] = identifier[psutil] . identifier[disk_io_counters] ( identifier[perdisk] = keyword[True] )
keyword[except] identifier[Exception] :
keyword[return] identifier[stats]
keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ):
keyword[try] :
identifier[self] . identifier[diskio_old] = identifier[diskiocounters]
keyword[except] ( identifier[IOError] , identifier[UnboundLocalError] ):
keyword[pass]
keyword[else] :
identifier[time_since_update] = identifier[getTimeSinceLastUpdate] ( literal[string] )
identifier[diskio_new] = identifier[diskiocounters]
keyword[for] identifier[disk] keyword[in] identifier[diskio_new] :
keyword[if] identifier[self] . identifier[args] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[self] . identifier[args] . identifier[diskio_show_ramfs] keyword[and] identifier[disk] . identifier[startswith] ( literal[string] ):
keyword[continue]
keyword[if] identifier[self] . identifier[is_hide] ( identifier[disk] ):
keyword[continue]
keyword[try] :
identifier[read_count] =( identifier[diskio_new] [ identifier[disk] ]. identifier[read_count] -
identifier[self] . identifier[diskio_old] [ identifier[disk] ]. identifier[read_count] )
identifier[write_count] =( identifier[diskio_new] [ identifier[disk] ]. identifier[write_count] -
identifier[self] . identifier[diskio_old] [ identifier[disk] ]. identifier[write_count] )
identifier[read_bytes] =( identifier[diskio_new] [ identifier[disk] ]. identifier[read_bytes] -
identifier[self] . identifier[diskio_old] [ identifier[disk] ]. identifier[read_bytes] )
identifier[write_bytes] =( identifier[diskio_new] [ identifier[disk] ]. identifier[write_bytes] -
identifier[self] . identifier[diskio_old] [ identifier[disk] ]. identifier[write_bytes] )
identifier[diskstat] ={
literal[string] : identifier[time_since_update] ,
literal[string] : identifier[n] ( identifier[disk] ),
literal[string] : identifier[read_count] ,
literal[string] : identifier[write_count] ,
literal[string] : identifier[read_bytes] ,
literal[string] : identifier[write_bytes] }
keyword[if] identifier[self] . identifier[has_alias] ( identifier[disk] ) keyword[is] keyword[not] keyword[None] :
identifier[diskstat] [ literal[string] ]= identifier[self] . identifier[has_alias] ( identifier[disk] )
keyword[except] identifier[KeyError] :
keyword[continue]
keyword[else] :
identifier[diskstat] [ literal[string] ]= identifier[self] . identifier[get_key] ()
identifier[stats] . identifier[append] ( identifier[diskstat] )
identifier[self] . identifier[diskio_old] = identifier[diskio_new]
keyword[elif] identifier[self] . identifier[input_method] == literal[string] :
keyword[pass]
identifier[self] . identifier[stats] = identifier[stats]
keyword[return] identifier[self] . identifier[stats] | def update(self):
"""Update disk I/O stats using the input method."""
# Init new stats
stats = self.get_init_value()
if self.input_method == 'local':
# Update stats using the standard system lib
# Grab the stat using the psutil disk_io_counters method
# read_count: number of reads
# write_count: number of writes
# read_bytes: number of bytes read
# write_bytes: number of bytes written
# read_time: time spent reading from disk (in milliseconds)
# write_time: time spent writing to disk (in milliseconds)
try:
diskiocounters = psutil.disk_io_counters(perdisk=True) # depends on [control=['try'], data=[]]
except Exception:
return stats # depends on [control=['except'], data=[]]
# Previous disk IO stats are stored in the diskio_old variable
if not hasattr(self, 'diskio_old'):
# First call, we init the diskio_old var
try:
self.diskio_old = diskiocounters # depends on [control=['try'], data=[]]
except (IOError, UnboundLocalError):
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
# By storing time data we enable Rx/s and Tx/s calculations in the
# XML/RPC API, which would otherwise be overly difficult work
# for users of the API
time_since_update = getTimeSinceLastUpdate('disk')
diskio_new = diskiocounters
for disk in diskio_new:
# By default, RamFS is not displayed (issue #714)
if self.args is not None and (not self.args.diskio_show_ramfs) and disk.startswith('ram'):
continue # depends on [control=['if'], data=[]]
# Do not take hide disk into account
if self.is_hide(disk):
continue # depends on [control=['if'], data=[]]
# Compute count and bit rate
try:
read_count = diskio_new[disk].read_count - self.diskio_old[disk].read_count
write_count = diskio_new[disk].write_count - self.diskio_old[disk].write_count
read_bytes = diskio_new[disk].read_bytes - self.diskio_old[disk].read_bytes
write_bytes = diskio_new[disk].write_bytes - self.diskio_old[disk].write_bytes
diskstat = {'time_since_update': time_since_update, 'disk_name': n(disk), 'read_count': read_count, 'write_count': write_count, 'read_bytes': read_bytes, 'write_bytes': write_bytes}
# Add alias if exist (define in the configuration file)
if self.has_alias(disk) is not None:
diskstat['alias'] = self.has_alias(disk) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except KeyError:
continue # depends on [control=['except'], data=[]]
else:
diskstat['key'] = self.get_key()
stats.append(diskstat) # depends on [control=['for'], data=['disk']]
# Save stats to compute next bitrate
self.diskio_old = diskio_new # depends on [control=['if'], data=[]]
elif self.input_method == 'snmp':
# Update stats using SNMP
# No standard way for the moment...
pass # depends on [control=['if'], data=[]]
# Update the stats
self.stats = stats
return self.stats |
def hash_codeblocks(text, hashes):
"""Hashes codeblocks (<pre> elements).
Codeblocks are strictly defined to be (non-list) lines that are
indented at least 4 spaces from the newline. Exactly 4 spaces will
be stripped from the beginning of the line -- any leading
whitespace after that is preserved.
Codeblock lines that are separated only by blank lines will be
included in the same codeblock (as will the intermediate newlines).
Certain HTML entities (&, <, >, ", ') will always be escaped inside
code blocks.
Markdown defines code blocks to be <pre><code>, not just <pre>.
Certain highlighting packages (like highlight.js) are designed
to accomodate (and even look) for this type of conversion.
"""
def sub(match):
block = match.group(1).rstrip('\n')
block = re.sub(r'(?:(?<=\n)|(?<=\A)) {4}', '', block)
block = escape(block)
block = '<pre><code>{}</code></pre>'.format(block)
hashed = hash_text(block, 'pre')
hashes[hashed] = block
return '\n\n' + hashed + '\n\n'
return re_codeblock.sub(sub, text) | def function[hash_codeblocks, parameter[text, hashes]]:
constant[Hashes codeblocks (<pre> elements).
Codeblocks are strictly defined to be (non-list) lines that are
indented at least 4 spaces from the newline. Exactly 4 spaces will
be stripped from the beginning of the line -- any leading
whitespace after that is preserved.
Codeblock lines that are separated only by blank lines will be
included in the same codeblock (as will the intermediate newlines).
Certain HTML entities (&, <, >, ", ') will always be escaped inside
code blocks.
Markdown defines code blocks to be <pre><code>, not just <pre>.
Certain highlighting packages (like highlight.js) are designed
to accomodate (and even look) for this type of conversion.
]
def function[sub, parameter[match]]:
variable[block] assign[=] call[call[name[match].group, parameter[constant[1]]].rstrip, parameter[constant[
]]]
variable[block] assign[=] call[name[re].sub, parameter[constant[(?:(?<=\n)|(?<=\A)) {4}], constant[], name[block]]]
variable[block] assign[=] call[name[escape], parameter[name[block]]]
variable[block] assign[=] call[constant[<pre><code>{}</code></pre>].format, parameter[name[block]]]
variable[hashed] assign[=] call[name[hash_text], parameter[name[block], constant[pre]]]
call[name[hashes]][name[hashed]] assign[=] name[block]
return[binary_operation[binary_operation[constant[
] + name[hashed]] + constant[
]]]
return[call[name[re_codeblock].sub, parameter[name[sub], name[text]]]] | keyword[def] identifier[hash_codeblocks] ( identifier[text] , identifier[hashes] ):
literal[string]
keyword[def] identifier[sub] ( identifier[match] ):
identifier[block] = identifier[match] . identifier[group] ( literal[int] ). identifier[rstrip] ( literal[string] )
identifier[block] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[block] )
identifier[block] = identifier[escape] ( identifier[block] )
identifier[block] = literal[string] . identifier[format] ( identifier[block] )
identifier[hashed] = identifier[hash_text] ( identifier[block] , literal[string] )
identifier[hashes] [ identifier[hashed] ]= identifier[block]
keyword[return] literal[string] + identifier[hashed] + literal[string]
keyword[return] identifier[re_codeblock] . identifier[sub] ( identifier[sub] , identifier[text] ) | def hash_codeblocks(text, hashes):
"""Hashes codeblocks (<pre> elements).
Codeblocks are strictly defined to be (non-list) lines that are
indented at least 4 spaces from the newline. Exactly 4 spaces will
be stripped from the beginning of the line -- any leading
whitespace after that is preserved.
Codeblock lines that are separated only by blank lines will be
included in the same codeblock (as will the intermediate newlines).
Certain HTML entities (&, <, >, ", ') will always be escaped inside
code blocks.
Markdown defines code blocks to be <pre><code>, not just <pre>.
Certain highlighting packages (like highlight.js) are designed
to accomodate (and even look) for this type of conversion.
"""
def sub(match):
block = match.group(1).rstrip('\n')
block = re.sub('(?:(?<=\\n)|(?<=\\A)) {4}', '', block)
block = escape(block)
block = '<pre><code>{}</code></pre>'.format(block)
hashed = hash_text(block, 'pre')
hashes[hashed] = block
return '\n\n' + hashed + '\n\n'
return re_codeblock.sub(sub, text) |
def get_dependencies(self, version=None):
'''
Parameters
----------
version: str
string representing version number whose dependencies you are
looking up
'''
version = _process_version(self, version)
history = self.get_history()
for v in reversed(history):
if BumpableVersion(v['version']) == version:
return v['dependencies']
raise ValueError('Version {} not found'.format(version)) | def function[get_dependencies, parameter[self, version]]:
constant[
Parameters
----------
version: str
string representing version number whose dependencies you are
looking up
]
variable[version] assign[=] call[name[_process_version], parameter[name[self], name[version]]]
variable[history] assign[=] call[name[self].get_history, parameter[]]
for taget[name[v]] in starred[call[name[reversed], parameter[name[history]]]] begin[:]
if compare[call[name[BumpableVersion], parameter[call[name[v]][constant[version]]]] equal[==] name[version]] begin[:]
return[call[name[v]][constant[dependencies]]]
<ast.Raise object at 0x7da1b0bceef0> | keyword[def] identifier[get_dependencies] ( identifier[self] , identifier[version] = keyword[None] ):
literal[string]
identifier[version] = identifier[_process_version] ( identifier[self] , identifier[version] )
identifier[history] = identifier[self] . identifier[get_history] ()
keyword[for] identifier[v] keyword[in] identifier[reversed] ( identifier[history] ):
keyword[if] identifier[BumpableVersion] ( identifier[v] [ literal[string] ])== identifier[version] :
keyword[return] identifier[v] [ literal[string] ]
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[version] )) | def get_dependencies(self, version=None):
"""
Parameters
----------
version: str
string representing version number whose dependencies you are
looking up
"""
version = _process_version(self, version)
history = self.get_history()
for v in reversed(history):
if BumpableVersion(v['version']) == version:
return v['dependencies'] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['v']]
raise ValueError('Version {} not found'.format(version)) |
def register_processor(self, processor):
"""
Register a new processor.
Note that processors are called in the order that they are registered.
"""
if not callable(processor):
raise ValueError('Processor %s is not callable.' % processor.__class__.__name__)
else:
self.processors.append(processor) | def function[register_processor, parameter[self, processor]]:
constant[
Register a new processor.
Note that processors are called in the order that they are registered.
]
if <ast.UnaryOp object at 0x7da207f031c0> begin[:]
<ast.Raise object at 0x7da207f03700> | keyword[def] identifier[register_processor] ( identifier[self] , identifier[processor] ):
literal[string]
keyword[if] keyword[not] identifier[callable] ( identifier[processor] ):
keyword[raise] identifier[ValueError] ( literal[string] % identifier[processor] . identifier[__class__] . identifier[__name__] )
keyword[else] :
identifier[self] . identifier[processors] . identifier[append] ( identifier[processor] ) | def register_processor(self, processor):
"""
Register a new processor.
Note that processors are called in the order that they are registered.
"""
if not callable(processor):
raise ValueError('Processor %s is not callable.' % processor.__class__.__name__) # depends on [control=['if'], data=[]]
else:
self.processors.append(processor) |
def fields(self, fields):
"""Sets `sysparm_fields` after joining the given list of `fields`
:param fields: List of fields to include in the response
:raise:
:InvalidUsage: if fields is of an unexpected type
"""
if not isinstance(fields, list):
raise InvalidUsage('fields must be of type `list`')
self._sysparms['sysparm_fields'] = ",".join(fields) | def function[fields, parameter[self, fields]]:
constant[Sets `sysparm_fields` after joining the given list of `fields`
:param fields: List of fields to include in the response
:raise:
:InvalidUsage: if fields is of an unexpected type
]
if <ast.UnaryOp object at 0x7da1b07ae800> begin[:]
<ast.Raise object at 0x7da1b07acd00>
call[name[self]._sysparms][constant[sysparm_fields]] assign[=] call[constant[,].join, parameter[name[fields]]] | keyword[def] identifier[fields] ( identifier[self] , identifier[fields] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[fields] , identifier[list] ):
keyword[raise] identifier[InvalidUsage] ( literal[string] )
identifier[self] . identifier[_sysparms] [ literal[string] ]= literal[string] . identifier[join] ( identifier[fields] ) | def fields(self, fields):
"""Sets `sysparm_fields` after joining the given list of `fields`
:param fields: List of fields to include in the response
:raise:
:InvalidUsage: if fields is of an unexpected type
"""
if not isinstance(fields, list):
raise InvalidUsage('fields must be of type `list`') # depends on [control=['if'], data=[]]
self._sysparms['sysparm_fields'] = ','.join(fields) |
def startCc(CallControlCapabilities_presence=0):
"""START CC Section 9.3.23a"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x9) # 00001001
packet = a / b
if CallControlCapabilities_presence is 1:
c = CallControlCapabilitiesHdr(ieiCCC=0x15, eightBitCCC=0x0)
packet = paclet / c
return packet | def function[startCc, parameter[CallControlCapabilities_presence]]:
constant[START CC Section 9.3.23a]
variable[a] assign[=] call[name[TpPd], parameter[]]
variable[b] assign[=] call[name[MessageType], parameter[]]
variable[packet] assign[=] binary_operation[name[a] / name[b]]
if compare[name[CallControlCapabilities_presence] is constant[1]] begin[:]
variable[c] assign[=] call[name[CallControlCapabilitiesHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[paclet] / name[c]]
return[name[packet]] | keyword[def] identifier[startCc] ( identifier[CallControlCapabilities_presence] = literal[int] ):
literal[string]
identifier[a] = identifier[TpPd] ( identifier[pd] = literal[int] )
identifier[b] = identifier[MessageType] ( identifier[mesType] = literal[int] )
identifier[packet] = identifier[a] / identifier[b]
keyword[if] identifier[CallControlCapabilities_presence] keyword[is] literal[int] :
identifier[c] = identifier[CallControlCapabilitiesHdr] ( identifier[ieiCCC] = literal[int] , identifier[eightBitCCC] = literal[int] )
identifier[packet] = identifier[paclet] / identifier[c]
keyword[return] identifier[packet] | def startCc(CallControlCapabilities_presence=0):
"""START CC Section 9.3.23a"""
a = TpPd(pd=3)
b = MessageType(mesType=9) # 00001001
packet = a / b
if CallControlCapabilities_presence is 1:
c = CallControlCapabilitiesHdr(ieiCCC=21, eightBitCCC=0)
packet = paclet / c # depends on [control=['if'], data=[]]
return packet |
def load_sensor_composites(self, sensor_name):
"""Load all compositor configs for the provided sensor."""
config_filename = sensor_name + ".yaml"
LOG.debug("Looking for composites config file %s", config_filename)
composite_configs = config_search_paths(
os.path.join("composites", config_filename),
self.ppp_config_dir, check_exists=True)
if not composite_configs:
LOG.debug("No composite config found called {}".format(
config_filename))
return
self._load_config(composite_configs) | def function[load_sensor_composites, parameter[self, sensor_name]]:
constant[Load all compositor configs for the provided sensor.]
variable[config_filename] assign[=] binary_operation[name[sensor_name] + constant[.yaml]]
call[name[LOG].debug, parameter[constant[Looking for composites config file %s], name[config_filename]]]
variable[composite_configs] assign[=] call[name[config_search_paths], parameter[call[name[os].path.join, parameter[constant[composites], name[config_filename]]], name[self].ppp_config_dir]]
if <ast.UnaryOp object at 0x7da1b22ac970> begin[:]
call[name[LOG].debug, parameter[call[constant[No composite config found called {}].format, parameter[name[config_filename]]]]]
return[None]
call[name[self]._load_config, parameter[name[composite_configs]]] | keyword[def] identifier[load_sensor_composites] ( identifier[self] , identifier[sensor_name] ):
literal[string]
identifier[config_filename] = identifier[sensor_name] + literal[string]
identifier[LOG] . identifier[debug] ( literal[string] , identifier[config_filename] )
identifier[composite_configs] = identifier[config_search_paths] (
identifier[os] . identifier[path] . identifier[join] ( literal[string] , identifier[config_filename] ),
identifier[self] . identifier[ppp_config_dir] , identifier[check_exists] = keyword[True] )
keyword[if] keyword[not] identifier[composite_configs] :
identifier[LOG] . identifier[debug] ( literal[string] . identifier[format] (
identifier[config_filename] ))
keyword[return]
identifier[self] . identifier[_load_config] ( identifier[composite_configs] ) | def load_sensor_composites(self, sensor_name):
"""Load all compositor configs for the provided sensor."""
config_filename = sensor_name + '.yaml'
LOG.debug('Looking for composites config file %s', config_filename)
composite_configs = config_search_paths(os.path.join('composites', config_filename), self.ppp_config_dir, check_exists=True)
if not composite_configs:
LOG.debug('No composite config found called {}'.format(config_filename))
return # depends on [control=['if'], data=[]]
self._load_config(composite_configs) |
def get_tab(self, tab_name, allow_disabled=False):
"""Returns a specific tab from this tab group.
If the tab is not allowed or not enabled this method returns ``None``.
If the tab is disabled but you wish to return it anyway, you can pass
``True`` to the allow_disabled argument.
"""
tab = self._tabs.get(tab_name, None)
if tab and tab._allowed and (tab._enabled or allow_disabled):
return tab
return None | def function[get_tab, parameter[self, tab_name, allow_disabled]]:
constant[Returns a specific tab from this tab group.
If the tab is not allowed or not enabled this method returns ``None``.
If the tab is disabled but you wish to return it anyway, you can pass
``True`` to the allow_disabled argument.
]
variable[tab] assign[=] call[name[self]._tabs.get, parameter[name[tab_name], constant[None]]]
if <ast.BoolOp object at 0x7da1b18bdf90> begin[:]
return[name[tab]]
return[constant[None]] | keyword[def] identifier[get_tab] ( identifier[self] , identifier[tab_name] , identifier[allow_disabled] = keyword[False] ):
literal[string]
identifier[tab] = identifier[self] . identifier[_tabs] . identifier[get] ( identifier[tab_name] , keyword[None] )
keyword[if] identifier[tab] keyword[and] identifier[tab] . identifier[_allowed] keyword[and] ( identifier[tab] . identifier[_enabled] keyword[or] identifier[allow_disabled] ):
keyword[return] identifier[tab]
keyword[return] keyword[None] | def get_tab(self, tab_name, allow_disabled=False):
"""Returns a specific tab from this tab group.
If the tab is not allowed or not enabled this method returns ``None``.
If the tab is disabled but you wish to return it anyway, you can pass
``True`` to the allow_disabled argument.
"""
tab = self._tabs.get(tab_name, None)
if tab and tab._allowed and (tab._enabled or allow_disabled):
return tab # depends on [control=['if'], data=[]]
return None |
def connectRelay(self):
"""Builds the target protocol and connects it to the relay transport.
"""
self.protocol = self.connector.buildProtocol(None)
self.connected = True
self.protocol.makeConnection(self) | def function[connectRelay, parameter[self]]:
constant[Builds the target protocol and connects it to the relay transport.
]
name[self].protocol assign[=] call[name[self].connector.buildProtocol, parameter[constant[None]]]
name[self].connected assign[=] constant[True]
call[name[self].protocol.makeConnection, parameter[name[self]]] | keyword[def] identifier[connectRelay] ( identifier[self] ):
literal[string]
identifier[self] . identifier[protocol] = identifier[self] . identifier[connector] . identifier[buildProtocol] ( keyword[None] )
identifier[self] . identifier[connected] = keyword[True]
identifier[self] . identifier[protocol] . identifier[makeConnection] ( identifier[self] ) | def connectRelay(self):
"""Builds the target protocol and connects it to the relay transport.
"""
self.protocol = self.connector.buildProtocol(None)
self.connected = True
self.protocol.makeConnection(self) |
def prompt_unset_inputs(self, force=False):
""" Prompt for unset input values """
for k, v in self._inputs.items():
if force or v.is_empty(False):
self.get_input(k, force=force) | def function[prompt_unset_inputs, parameter[self, force]]:
constant[ Prompt for unset input values ]
for taget[tuple[[<ast.Name object at 0x7da20c7c92a0>, <ast.Name object at 0x7da20c7ca980>]]] in starred[call[name[self]._inputs.items, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da20c7cbd00> begin[:]
call[name[self].get_input, parameter[name[k]]] | keyword[def] identifier[prompt_unset_inputs] ( identifier[self] , identifier[force] = keyword[False] ):
literal[string]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[_inputs] . identifier[items] ():
keyword[if] identifier[force] keyword[or] identifier[v] . identifier[is_empty] ( keyword[False] ):
identifier[self] . identifier[get_input] ( identifier[k] , identifier[force] = identifier[force] ) | def prompt_unset_inputs(self, force=False):
""" Prompt for unset input values """
for (k, v) in self._inputs.items():
if force or v.is_empty(False):
self.get_input(k, force=force) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] |
def hook_key(key, callback, suppress=False):
"""
Hooks key up and key down events for a single key. Returns the event handler
created. To remove a hooked key use `unhook_key(key)` or
`unhook_key(handler)`.
Note: this function shares state with hotkeys, so `clear_all_hotkeys`
affects it aswell.
"""
_listener.start_if_necessary()
store = _listener.blocking_keys if suppress else _listener.nonblocking_keys
scan_codes = key_to_scan_codes(key)
for scan_code in scan_codes:
store[scan_code].append(callback)
def remove_():
del _hooks[callback]
del _hooks[key]
del _hooks[remove_]
for scan_code in scan_codes:
store[scan_code].remove(callback)
_hooks[callback] = _hooks[key] = _hooks[remove_] = remove_
return remove_ | def function[hook_key, parameter[key, callback, suppress]]:
constant[
Hooks key up and key down events for a single key. Returns the event handler
created. To remove a hooked key use `unhook_key(key)` or
`unhook_key(handler)`.
Note: this function shares state with hotkeys, so `clear_all_hotkeys`
affects it aswell.
]
call[name[_listener].start_if_necessary, parameter[]]
variable[store] assign[=] <ast.IfExp object at 0x7da1b1bfb430>
variable[scan_codes] assign[=] call[name[key_to_scan_codes], parameter[name[key]]]
for taget[name[scan_code]] in starred[name[scan_codes]] begin[:]
call[call[name[store]][name[scan_code]].append, parameter[name[callback]]]
def function[remove_, parameter[]]:
<ast.Delete object at 0x7da1b1b1aa40>
<ast.Delete object at 0x7da1b1b1a3e0>
<ast.Delete object at 0x7da1b1b1ab60>
for taget[name[scan_code]] in starred[name[scan_codes]] begin[:]
call[call[name[store]][name[scan_code]].remove, parameter[name[callback]]]
call[name[_hooks]][name[callback]] assign[=] name[remove_]
return[name[remove_]] | keyword[def] identifier[hook_key] ( identifier[key] , identifier[callback] , identifier[suppress] = keyword[False] ):
literal[string]
identifier[_listener] . identifier[start_if_necessary] ()
identifier[store] = identifier[_listener] . identifier[blocking_keys] keyword[if] identifier[suppress] keyword[else] identifier[_listener] . identifier[nonblocking_keys]
identifier[scan_codes] = identifier[key_to_scan_codes] ( identifier[key] )
keyword[for] identifier[scan_code] keyword[in] identifier[scan_codes] :
identifier[store] [ identifier[scan_code] ]. identifier[append] ( identifier[callback] )
keyword[def] identifier[remove_] ():
keyword[del] identifier[_hooks] [ identifier[callback] ]
keyword[del] identifier[_hooks] [ identifier[key] ]
keyword[del] identifier[_hooks] [ identifier[remove_] ]
keyword[for] identifier[scan_code] keyword[in] identifier[scan_codes] :
identifier[store] [ identifier[scan_code] ]. identifier[remove] ( identifier[callback] )
identifier[_hooks] [ identifier[callback] ]= identifier[_hooks] [ identifier[key] ]= identifier[_hooks] [ identifier[remove_] ]= identifier[remove_]
keyword[return] identifier[remove_] | def hook_key(key, callback, suppress=False):
"""
Hooks key up and key down events for a single key. Returns the event handler
created. To remove a hooked key use `unhook_key(key)` or
`unhook_key(handler)`.
Note: this function shares state with hotkeys, so `clear_all_hotkeys`
affects it aswell.
"""
_listener.start_if_necessary()
store = _listener.blocking_keys if suppress else _listener.nonblocking_keys
scan_codes = key_to_scan_codes(key)
for scan_code in scan_codes:
store[scan_code].append(callback) # depends on [control=['for'], data=['scan_code']]
def remove_():
del _hooks[callback]
del _hooks[key]
del _hooks[remove_]
for scan_code in scan_codes:
store[scan_code].remove(callback) # depends on [control=['for'], data=['scan_code']]
_hooks[callback] = _hooks[key] = _hooks[remove_] = remove_
return remove_ |
def get_medium_attachments_of_controller(self, name):
"""Returns an array of medium attachments which are attached to the
the controller with the given name.
in name of type str
return medium_attachments of type :class:`IMediumAttachment`
raises :class:`VBoxErrorObjectNotFound`
A storage controller with given name doesn't exist.
"""
if not isinstance(name, basestring):
raise TypeError("name can only be an instance of type basestring")
medium_attachments = self._call("getMediumAttachmentsOfController",
in_p=[name])
medium_attachments = [IMediumAttachment(a) for a in medium_attachments]
return medium_attachments | def function[get_medium_attachments_of_controller, parameter[self, name]]:
constant[Returns an array of medium attachments which are attached to the
the controller with the given name.
in name of type str
return medium_attachments of type :class:`IMediumAttachment`
raises :class:`VBoxErrorObjectNotFound`
A storage controller with given name doesn't exist.
]
if <ast.UnaryOp object at 0x7da2047e8910> begin[:]
<ast.Raise object at 0x7da2047ea350>
variable[medium_attachments] assign[=] call[name[self]._call, parameter[constant[getMediumAttachmentsOfController]]]
variable[medium_attachments] assign[=] <ast.ListComp object at 0x7da20c6c7b20>
return[name[medium_attachments]] | keyword[def] identifier[get_medium_attachments_of_controller] ( identifier[self] , identifier[name] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[name] , identifier[basestring] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[medium_attachments] = identifier[self] . identifier[_call] ( literal[string] ,
identifier[in_p] =[ identifier[name] ])
identifier[medium_attachments] =[ identifier[IMediumAttachment] ( identifier[a] ) keyword[for] identifier[a] keyword[in] identifier[medium_attachments] ]
keyword[return] identifier[medium_attachments] | def get_medium_attachments_of_controller(self, name):
"""Returns an array of medium attachments which are attached to the
the controller with the given name.
in name of type str
return medium_attachments of type :class:`IMediumAttachment`
raises :class:`VBoxErrorObjectNotFound`
A storage controller with given name doesn't exist.
"""
if not isinstance(name, basestring):
raise TypeError('name can only be an instance of type basestring') # depends on [control=['if'], data=[]]
medium_attachments = self._call('getMediumAttachmentsOfController', in_p=[name])
medium_attachments = [IMediumAttachment(a) for a in medium_attachments]
return medium_attachments |
def get_current_instruction(self) -> Dict:
"""Gets the current instruction for this GlobalState.
:return:
"""
instructions = self.environment.code.instruction_list
return instructions[self.mstate.pc] | def function[get_current_instruction, parameter[self]]:
constant[Gets the current instruction for this GlobalState.
:return:
]
variable[instructions] assign[=] name[self].environment.code.instruction_list
return[call[name[instructions]][name[self].mstate.pc]] | keyword[def] identifier[get_current_instruction] ( identifier[self] )-> identifier[Dict] :
literal[string]
identifier[instructions] = identifier[self] . identifier[environment] . identifier[code] . identifier[instruction_list]
keyword[return] identifier[instructions] [ identifier[self] . identifier[mstate] . identifier[pc] ] | def get_current_instruction(self) -> Dict:
"""Gets the current instruction for this GlobalState.
:return:
"""
instructions = self.environment.code.instruction_list
return instructions[self.mstate.pc] |
def create_WCSname(wcsname):
""" Verify that a valid WCSNAME has been provided, and if not, create a
default WCSNAME based on current date.
"""
if util.is_blank(wcsname):
ptime = fileutil.getDate()
wcsname = "User_"+ptime
return wcsname | def function[create_WCSname, parameter[wcsname]]:
constant[ Verify that a valid WCSNAME has been provided, and if not, create a
default WCSNAME based on current date.
]
if call[name[util].is_blank, parameter[name[wcsname]]] begin[:]
variable[ptime] assign[=] call[name[fileutil].getDate, parameter[]]
variable[wcsname] assign[=] binary_operation[constant[User_] + name[ptime]]
return[name[wcsname]] | keyword[def] identifier[create_WCSname] ( identifier[wcsname] ):
literal[string]
keyword[if] identifier[util] . identifier[is_blank] ( identifier[wcsname] ):
identifier[ptime] = identifier[fileutil] . identifier[getDate] ()
identifier[wcsname] = literal[string] + identifier[ptime]
keyword[return] identifier[wcsname] | def create_WCSname(wcsname):
""" Verify that a valid WCSNAME has been provided, and if not, create a
default WCSNAME based on current date.
"""
if util.is_blank(wcsname):
ptime = fileutil.getDate()
wcsname = 'User_' + ptime # depends on [control=['if'], data=[]]
return wcsname |
def write_iocs(self, directory=None, source=None):
"""
Serializes IOCs to a directory.
:param directory: Directory to write IOCs to. If not provided, the current working directory is used.
:param source: Dictionary contianing iocid -> IOC mapping. Defaults to self.iocs_10. This is not normally modifed by a user for this class.
:return:
"""
"""
if directory is None, write the iocs to the current working directory
source: allows specifying a different dictionry of elmentTree ioc objects
"""
if not source:
source = self.iocs_10
if len(source) < 1:
log.error('no iocs available to write out')
return False
if not directory:
directory = os.getcwd()
if os.path.isfile(directory):
log.error('cannot writes iocs to a directory')
return False
source_iocs = set(source.keys())
source_iocs = source_iocs.difference(self.pruned_11_iocs)
source_iocs = source_iocs.difference(self.null_pruned_iocs)
if not source_iocs:
log.error('no iocs available to write out after removing pruned/null iocs')
return False
utils.safe_makedirs(directory)
output_dir = os.path.abspath(directory)
log.info('Writing IOCs to %s' % (str(output_dir)))
# serialize the iocs
for iocid in source_iocs:
ioc_obj = source[iocid]
ioc_obj.write_ioc_to_file(output_dir=output_dir, force=True)
return True | def function[write_iocs, parameter[self, directory, source]]:
constant[
Serializes IOCs to a directory.
:param directory: Directory to write IOCs to. If not provided, the current working directory is used.
:param source: Dictionary contianing iocid -> IOC mapping. Defaults to self.iocs_10. This is not normally modifed by a user for this class.
:return:
]
constant[
if directory is None, write the iocs to the current working directory
source: allows specifying a different dictionry of elmentTree ioc objects
]
if <ast.UnaryOp object at 0x7da1b1034700> begin[:]
variable[source] assign[=] name[self].iocs_10
if compare[call[name[len], parameter[name[source]]] less[<] constant[1]] begin[:]
call[name[log].error, parameter[constant[no iocs available to write out]]]
return[constant[False]]
if <ast.UnaryOp object at 0x7da1b1024ac0> begin[:]
variable[directory] assign[=] call[name[os].getcwd, parameter[]]
if call[name[os].path.isfile, parameter[name[directory]]] begin[:]
call[name[log].error, parameter[constant[cannot writes iocs to a directory]]]
return[constant[False]]
variable[source_iocs] assign[=] call[name[set], parameter[call[name[source].keys, parameter[]]]]
variable[source_iocs] assign[=] call[name[source_iocs].difference, parameter[name[self].pruned_11_iocs]]
variable[source_iocs] assign[=] call[name[source_iocs].difference, parameter[name[self].null_pruned_iocs]]
if <ast.UnaryOp object at 0x7da1b10cdab0> begin[:]
call[name[log].error, parameter[constant[no iocs available to write out after removing pruned/null iocs]]]
return[constant[False]]
call[name[utils].safe_makedirs, parameter[name[directory]]]
variable[output_dir] assign[=] call[name[os].path.abspath, parameter[name[directory]]]
call[name[log].info, parameter[binary_operation[constant[Writing IOCs to %s] <ast.Mod object at 0x7da2590d6920> call[name[str], parameter[name[output_dir]]]]]]
for taget[name[iocid]] in starred[name[source_iocs]] begin[:]
variable[ioc_obj] assign[=] call[name[source]][name[iocid]]
call[name[ioc_obj].write_ioc_to_file, parameter[]]
return[constant[True]] | keyword[def] identifier[write_iocs] ( identifier[self] , identifier[directory] = keyword[None] , identifier[source] = keyword[None] ):
literal[string]
literal[string]
keyword[if] keyword[not] identifier[source] :
identifier[source] = identifier[self] . identifier[iocs_10]
keyword[if] identifier[len] ( identifier[source] )< literal[int] :
identifier[log] . identifier[error] ( literal[string] )
keyword[return] keyword[False]
keyword[if] keyword[not] identifier[directory] :
identifier[directory] = identifier[os] . identifier[getcwd] ()
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[directory] ):
identifier[log] . identifier[error] ( literal[string] )
keyword[return] keyword[False]
identifier[source_iocs] = identifier[set] ( identifier[source] . identifier[keys] ())
identifier[source_iocs] = identifier[source_iocs] . identifier[difference] ( identifier[self] . identifier[pruned_11_iocs] )
identifier[source_iocs] = identifier[source_iocs] . identifier[difference] ( identifier[self] . identifier[null_pruned_iocs] )
keyword[if] keyword[not] identifier[source_iocs] :
identifier[log] . identifier[error] ( literal[string] )
keyword[return] keyword[False]
identifier[utils] . identifier[safe_makedirs] ( identifier[directory] )
identifier[output_dir] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[directory] )
identifier[log] . identifier[info] ( literal[string] %( identifier[str] ( identifier[output_dir] )))
keyword[for] identifier[iocid] keyword[in] identifier[source_iocs] :
identifier[ioc_obj] = identifier[source] [ identifier[iocid] ]
identifier[ioc_obj] . identifier[write_ioc_to_file] ( identifier[output_dir] = identifier[output_dir] , identifier[force] = keyword[True] )
keyword[return] keyword[True] | def write_iocs(self, directory=None, source=None):
"""
Serializes IOCs to a directory.
:param directory: Directory to write IOCs to. If not provided, the current working directory is used.
:param source: Dictionary contianing iocid -> IOC mapping. Defaults to self.iocs_10. This is not normally modifed by a user for this class.
:return:
"""
'\n\n\n if directory is None, write the iocs to the current working directory\n source: allows specifying a different dictionry of elmentTree ioc objects\n '
if not source:
source = self.iocs_10 # depends on [control=['if'], data=[]]
if len(source) < 1:
log.error('no iocs available to write out')
return False # depends on [control=['if'], data=[]]
if not directory:
directory = os.getcwd() # depends on [control=['if'], data=[]]
if os.path.isfile(directory):
log.error('cannot writes iocs to a directory')
return False # depends on [control=['if'], data=[]]
source_iocs = set(source.keys())
source_iocs = source_iocs.difference(self.pruned_11_iocs)
source_iocs = source_iocs.difference(self.null_pruned_iocs)
if not source_iocs:
log.error('no iocs available to write out after removing pruned/null iocs')
return False # depends on [control=['if'], data=[]]
utils.safe_makedirs(directory)
output_dir = os.path.abspath(directory)
log.info('Writing IOCs to %s' % str(output_dir))
# serialize the iocs
for iocid in source_iocs:
ioc_obj = source[iocid]
ioc_obj.write_ioc_to_file(output_dir=output_dir, force=True) # depends on [control=['for'], data=['iocid']]
return True |
def reassign_ids(doc, verbose = False):
"""
Assign new IDs to all rows in all LSC tables in doc so that there
are no collisions when the LIGO_LW elements are merged.
"""
# Can't simply run reassign_ids() on doc because we need to
# construct a fresh old --> new mapping within each LIGO_LW block.
for n, elem in enumerate(doc.childNodes):
if verbose:
print >>sys.stderr, "reassigning row IDs: %.1f%%\r" % (100.0 * (n + 1) / len(doc.childNodes)),
if elem.tagName == ligolw.LIGO_LW.tagName:
table.reassign_ids(elem)
if verbose:
print >>sys.stderr, "reassigning row IDs: 100.0%"
return doc | def function[reassign_ids, parameter[doc, verbose]]:
constant[
Assign new IDs to all rows in all LSC tables in doc so that there
are no collisions when the LIGO_LW elements are merged.
]
for taget[tuple[[<ast.Name object at 0x7da18dc06e00>, <ast.Name object at 0x7da18dc07b80>]]] in starred[call[name[enumerate], parameter[name[doc].childNodes]]] begin[:]
if name[verbose] begin[:]
tuple[[<ast.BinOp object at 0x7da18dc07850>, <ast.BinOp object at 0x7da1b0b567a0>]]
if compare[name[elem].tagName equal[==] name[ligolw].LIGO_LW.tagName] begin[:]
call[name[table].reassign_ids, parameter[name[elem]]]
if name[verbose] begin[:]
tuple[[<ast.BinOp object at 0x7da1b0b56590>, <ast.Constant object at 0x7da1b0b56320>]]
return[name[doc]] | keyword[def] identifier[reassign_ids] ( identifier[doc] , identifier[verbose] = keyword[False] ):
literal[string]
keyword[for] identifier[n] , identifier[elem] keyword[in] identifier[enumerate] ( identifier[doc] . identifier[childNodes] ):
keyword[if] identifier[verbose] :
identifier[print] >> identifier[sys] . identifier[stderr] , literal[string] %( literal[int] *( identifier[n] + literal[int] )/ identifier[len] ( identifier[doc] . identifier[childNodes] )),
keyword[if] identifier[elem] . identifier[tagName] == identifier[ligolw] . identifier[LIGO_LW] . identifier[tagName] :
identifier[table] . identifier[reassign_ids] ( identifier[elem] )
keyword[if] identifier[verbose] :
identifier[print] >> identifier[sys] . identifier[stderr] , literal[string]
keyword[return] identifier[doc] | def reassign_ids(doc, verbose=False):
"""
Assign new IDs to all rows in all LSC tables in doc so that there
are no collisions when the LIGO_LW elements are merged.
""" # Can't simply run reassign_ids() on doc because we need to
# construct a fresh old --> new mapping within each LIGO_LW block.
for (n, elem) in enumerate(doc.childNodes):
if verbose:
(print >> sys.stderr, 'reassigning row IDs: %.1f%%\r' % (100.0 * (n + 1) / len(doc.childNodes))) # depends on [control=['if'], data=[]]
if elem.tagName == ligolw.LIGO_LW.tagName:
table.reassign_ids(elem) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if verbose:
(print >> sys.stderr, 'reassigning row IDs: 100.0%') # depends on [control=['if'], data=[]]
return doc |
def averaged(*values):
"""
Returns the mean of all supplied values. One or more *values* can be
specified. For example, to light a :class:`~gpiozero.PWMLED` as the average
of several potentiometers connected to an :class:`~gpiozero.MCP3008` ADC::
from gpiozero import MCP3008, PWMLED
from gpiozero.tools import averaged
from signal import pause
pot1 = MCP3008(channel=0)
pot2 = MCP3008(channel=1)
pot3 = MCP3008(channel=2)
led = PWMLED(4)
led.source = averaged(pot1, pot2, pot3)
pause()
"""
values = [_normalize(v) for v in values]
for v in zip(*values):
yield mean(v) | def function[averaged, parameter[]]:
constant[
Returns the mean of all supplied values. One or more *values* can be
specified. For example, to light a :class:`~gpiozero.PWMLED` as the average
of several potentiometers connected to an :class:`~gpiozero.MCP3008` ADC::
from gpiozero import MCP3008, PWMLED
from gpiozero.tools import averaged
from signal import pause
pot1 = MCP3008(channel=0)
pot2 = MCP3008(channel=1)
pot3 = MCP3008(channel=2)
led = PWMLED(4)
led.source = averaged(pot1, pot2, pot3)
pause()
]
variable[values] assign[=] <ast.ListComp object at 0x7da18f09c850>
for taget[name[v]] in starred[call[name[zip], parameter[<ast.Starred object at 0x7da18f09cb50>]]] begin[:]
<ast.Yield object at 0x7da18f09e4a0> | keyword[def] identifier[averaged] (* identifier[values] ):
literal[string]
identifier[values] =[ identifier[_normalize] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[values] ]
keyword[for] identifier[v] keyword[in] identifier[zip] (* identifier[values] ):
keyword[yield] identifier[mean] ( identifier[v] ) | def averaged(*values):
"""
Returns the mean of all supplied values. One or more *values* can be
specified. For example, to light a :class:`~gpiozero.PWMLED` as the average
of several potentiometers connected to an :class:`~gpiozero.MCP3008` ADC::
from gpiozero import MCP3008, PWMLED
from gpiozero.tools import averaged
from signal import pause
pot1 = MCP3008(channel=0)
pot2 = MCP3008(channel=1)
pot3 = MCP3008(channel=2)
led = PWMLED(4)
led.source = averaged(pot1, pot2, pot3)
pause()
"""
values = [_normalize(v) for v in values]
for v in zip(*values):
yield mean(v) # depends on [control=['for'], data=['v']] |
def sendline(self, data, linesep=os.linesep):
'''
Send the provided data to the terminal appending a line feed.
'''
return self.send('{0}{1}'.format(data, linesep)) | def function[sendline, parameter[self, data, linesep]]:
constant[
Send the provided data to the terminal appending a line feed.
]
return[call[name[self].send, parameter[call[constant[{0}{1}].format, parameter[name[data], name[linesep]]]]]] | keyword[def] identifier[sendline] ( identifier[self] , identifier[data] , identifier[linesep] = identifier[os] . identifier[linesep] ):
literal[string]
keyword[return] identifier[self] . identifier[send] ( literal[string] . identifier[format] ( identifier[data] , identifier[linesep] )) | def sendline(self, data, linesep=os.linesep):
"""
Send the provided data to the terminal appending a line feed.
"""
return self.send('{0}{1}'.format(data, linesep)) |
def parse_spec(self):
"""Parse the specification of how to recruit participants.
Example: recruiters = bots: 5, mturk: 1
"""
recruiters = []
spec = get_config().get("recruiters")
for match in self.SPEC_RE.finditer(spec):
name = match.group(1)
count = int(match.group(2))
recruiters.append((name, count))
return recruiters | def function[parse_spec, parameter[self]]:
constant[Parse the specification of how to recruit participants.
Example: recruiters = bots: 5, mturk: 1
]
variable[recruiters] assign[=] list[[]]
variable[spec] assign[=] call[call[name[get_config], parameter[]].get, parameter[constant[recruiters]]]
for taget[name[match]] in starred[call[name[self].SPEC_RE.finditer, parameter[name[spec]]]] begin[:]
variable[name] assign[=] call[name[match].group, parameter[constant[1]]]
variable[count] assign[=] call[name[int], parameter[call[name[match].group, parameter[constant[2]]]]]
call[name[recruiters].append, parameter[tuple[[<ast.Name object at 0x7da1b0395a50>, <ast.Name object at 0x7da1b03973a0>]]]]
return[name[recruiters]] | keyword[def] identifier[parse_spec] ( identifier[self] ):
literal[string]
identifier[recruiters] =[]
identifier[spec] = identifier[get_config] (). identifier[get] ( literal[string] )
keyword[for] identifier[match] keyword[in] identifier[self] . identifier[SPEC_RE] . identifier[finditer] ( identifier[spec] ):
identifier[name] = identifier[match] . identifier[group] ( literal[int] )
identifier[count] = identifier[int] ( identifier[match] . identifier[group] ( literal[int] ))
identifier[recruiters] . identifier[append] (( identifier[name] , identifier[count] ))
keyword[return] identifier[recruiters] | def parse_spec(self):
"""Parse the specification of how to recruit participants.
Example: recruiters = bots: 5, mturk: 1
"""
recruiters = []
spec = get_config().get('recruiters')
for match in self.SPEC_RE.finditer(spec):
name = match.group(1)
count = int(match.group(2))
recruiters.append((name, count)) # depends on [control=['for'], data=['match']]
return recruiters |
def _build_kernel_function_declaration(self, name='kernel'):
"""Build and return kernel function declaration"""
array_declarations, array_dimensions = self._build_array_declarations(with_init=False)
scalar_declarations = self._build_scalar_declarations(with_init=False)
const_declarations = self._build_const_declartions(with_init=False)
return c_ast.FuncDecl(args=c_ast.ParamList(params=array_declarations + scalar_declarations +
const_declarations),
type=c_ast.TypeDecl(declname=name,
quals=[],
type=c_ast.IdentifierType(names=['void']))) | def function[_build_kernel_function_declaration, parameter[self, name]]:
constant[Build and return kernel function declaration]
<ast.Tuple object at 0x7da20c6c59f0> assign[=] call[name[self]._build_array_declarations, parameter[]]
variable[scalar_declarations] assign[=] call[name[self]._build_scalar_declarations, parameter[]]
variable[const_declarations] assign[=] call[name[self]._build_const_declartions, parameter[]]
return[call[name[c_ast].FuncDecl, parameter[]]] | keyword[def] identifier[_build_kernel_function_declaration] ( identifier[self] , identifier[name] = literal[string] ):
literal[string]
identifier[array_declarations] , identifier[array_dimensions] = identifier[self] . identifier[_build_array_declarations] ( identifier[with_init] = keyword[False] )
identifier[scalar_declarations] = identifier[self] . identifier[_build_scalar_declarations] ( identifier[with_init] = keyword[False] )
identifier[const_declarations] = identifier[self] . identifier[_build_const_declartions] ( identifier[with_init] = keyword[False] )
keyword[return] identifier[c_ast] . identifier[FuncDecl] ( identifier[args] = identifier[c_ast] . identifier[ParamList] ( identifier[params] = identifier[array_declarations] + identifier[scalar_declarations] +
identifier[const_declarations] ),
identifier[type] = identifier[c_ast] . identifier[TypeDecl] ( identifier[declname] = identifier[name] ,
identifier[quals] =[],
identifier[type] = identifier[c_ast] . identifier[IdentifierType] ( identifier[names] =[ literal[string] ]))) | def _build_kernel_function_declaration(self, name='kernel'):
"""Build and return kernel function declaration"""
(array_declarations, array_dimensions) = self._build_array_declarations(with_init=False)
scalar_declarations = self._build_scalar_declarations(with_init=False)
const_declarations = self._build_const_declartions(with_init=False)
return c_ast.FuncDecl(args=c_ast.ParamList(params=array_declarations + scalar_declarations + const_declarations), type=c_ast.TypeDecl(declname=name, quals=[], type=c_ast.IdentifierType(names=['void']))) |
def _set_auto_fields(self, model_obj):
"""Set the values of the auto field using counter"""
for field_name, field_obj in \
self.entity_cls.meta_.auto_fields:
counter_key = f'{self.schema_name}_{field_name}'
if not (field_name in model_obj and model_obj[field_name] is not None):
# Increment the counter and it should start from 1
counter = next(self.conn['counters'][counter_key])
if not counter:
counter = next(self.conn['counters'][counter_key])
model_obj[field_name] = counter
return model_obj | def function[_set_auto_fields, parameter[self, model_obj]]:
constant[Set the values of the auto field using counter]
for taget[tuple[[<ast.Name object at 0x7da1b1bba410>, <ast.Name object at 0x7da1b1bbb7c0>]]] in starred[name[self].entity_cls.meta_.auto_fields] begin[:]
variable[counter_key] assign[=] <ast.JoinedStr object at 0x7da1b1bbbfd0>
if <ast.UnaryOp object at 0x7da1b1bba890> begin[:]
variable[counter] assign[=] call[name[next], parameter[call[call[name[self].conn][constant[counters]]][name[counter_key]]]]
if <ast.UnaryOp object at 0x7da1b1bbbe50> begin[:]
variable[counter] assign[=] call[name[next], parameter[call[call[name[self].conn][constant[counters]]][name[counter_key]]]]
call[name[model_obj]][name[field_name]] assign[=] name[counter]
return[name[model_obj]] | keyword[def] identifier[_set_auto_fields] ( identifier[self] , identifier[model_obj] ):
literal[string]
keyword[for] identifier[field_name] , identifier[field_obj] keyword[in] identifier[self] . identifier[entity_cls] . identifier[meta_] . identifier[auto_fields] :
identifier[counter_key] = literal[string]
keyword[if] keyword[not] ( identifier[field_name] keyword[in] identifier[model_obj] keyword[and] identifier[model_obj] [ identifier[field_name] ] keyword[is] keyword[not] keyword[None] ):
identifier[counter] = identifier[next] ( identifier[self] . identifier[conn] [ literal[string] ][ identifier[counter_key] ])
keyword[if] keyword[not] identifier[counter] :
identifier[counter] = identifier[next] ( identifier[self] . identifier[conn] [ literal[string] ][ identifier[counter_key] ])
identifier[model_obj] [ identifier[field_name] ]= identifier[counter]
keyword[return] identifier[model_obj] | def _set_auto_fields(self, model_obj):
"""Set the values of the auto field using counter"""
for (field_name, field_obj) in self.entity_cls.meta_.auto_fields:
counter_key = f'{self.schema_name}_{field_name}'
if not (field_name in model_obj and model_obj[field_name] is not None):
# Increment the counter and it should start from 1
counter = next(self.conn['counters'][counter_key])
if not counter:
counter = next(self.conn['counters'][counter_key]) # depends on [control=['if'], data=[]]
model_obj[field_name] = counter # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return model_obj |
def add_wic_ports(self, wic_slot):
"""
Add the ports for a specific WIC to the node['ports'] dictionary
:param str wic_slot: WIC Slot (wic0)
"""
wic_slot_number = int(wic_slot[3])
wic_adapter = self.node['properties'][wic_slot]
num_ports = ADAPTER_MATRIX[wic_adapter]['ports']
port_type = ADAPTER_MATRIX[wic_adapter]['type']
ports = []
# Dynamips WICs port number start on a multiple of 16.
base = 16 * (wic_slot_number + 1)
# WICs are always in adapter slot 0.
slot = 0
for port_number in range(num_ports):
phy_port_number = port_number + self.port_numbering[port_type]
port_name = PORT_TYPES[port_type] + '%s/%s' % (slot,
phy_port_number)
port_temp = {'name': port_name,
'id': self.port_id,
'port_number': base + port_number,
'slot_number': slot}
ports.append(port_temp)
self.port_id += 1
self.port_numbering[port_type] += num_ports
self.node['ports'].extend(ports) | def function[add_wic_ports, parameter[self, wic_slot]]:
constant[
Add the ports for a specific WIC to the node['ports'] dictionary
:param str wic_slot: WIC Slot (wic0)
]
variable[wic_slot_number] assign[=] call[name[int], parameter[call[name[wic_slot]][constant[3]]]]
variable[wic_adapter] assign[=] call[call[name[self].node][constant[properties]]][name[wic_slot]]
variable[num_ports] assign[=] call[call[name[ADAPTER_MATRIX]][name[wic_adapter]]][constant[ports]]
variable[port_type] assign[=] call[call[name[ADAPTER_MATRIX]][name[wic_adapter]]][constant[type]]
variable[ports] assign[=] list[[]]
variable[base] assign[=] binary_operation[constant[16] * binary_operation[name[wic_slot_number] + constant[1]]]
variable[slot] assign[=] constant[0]
for taget[name[port_number]] in starred[call[name[range], parameter[name[num_ports]]]] begin[:]
variable[phy_port_number] assign[=] binary_operation[name[port_number] + call[name[self].port_numbering][name[port_type]]]
variable[port_name] assign[=] binary_operation[call[name[PORT_TYPES]][name[port_type]] + binary_operation[constant[%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b28f37f0>, <ast.Name object at 0x7da1b28f0580>]]]]
variable[port_temp] assign[=] dictionary[[<ast.Constant object at 0x7da1b28f1090>, <ast.Constant object at 0x7da1b28f35e0>, <ast.Constant object at 0x7da1b28f1870>, <ast.Constant object at 0x7da1b28f2bf0>], [<ast.Name object at 0x7da1b28f17e0>, <ast.Attribute object at 0x7da1b28f1510>, <ast.BinOp object at 0x7da1b28f2620>, <ast.Name object at 0x7da1b28f1420>]]
call[name[ports].append, parameter[name[port_temp]]]
<ast.AugAssign object at 0x7da1b28f0ac0>
<ast.AugAssign object at 0x7da1b28f1a50>
call[call[name[self].node][constant[ports]].extend, parameter[name[ports]]] | keyword[def] identifier[add_wic_ports] ( identifier[self] , identifier[wic_slot] ):
literal[string]
identifier[wic_slot_number] = identifier[int] ( identifier[wic_slot] [ literal[int] ])
identifier[wic_adapter] = identifier[self] . identifier[node] [ literal[string] ][ identifier[wic_slot] ]
identifier[num_ports] = identifier[ADAPTER_MATRIX] [ identifier[wic_adapter] ][ literal[string] ]
identifier[port_type] = identifier[ADAPTER_MATRIX] [ identifier[wic_adapter] ][ literal[string] ]
identifier[ports] =[]
identifier[base] = literal[int] *( identifier[wic_slot_number] + literal[int] )
identifier[slot] = literal[int]
keyword[for] identifier[port_number] keyword[in] identifier[range] ( identifier[num_ports] ):
identifier[phy_port_number] = identifier[port_number] + identifier[self] . identifier[port_numbering] [ identifier[port_type] ]
identifier[port_name] = identifier[PORT_TYPES] [ identifier[port_type] ]+ literal[string] %( identifier[slot] ,
identifier[phy_port_number] )
identifier[port_temp] ={ literal[string] : identifier[port_name] ,
literal[string] : identifier[self] . identifier[port_id] ,
literal[string] : identifier[base] + identifier[port_number] ,
literal[string] : identifier[slot] }
identifier[ports] . identifier[append] ( identifier[port_temp] )
identifier[self] . identifier[port_id] += literal[int]
identifier[self] . identifier[port_numbering] [ identifier[port_type] ]+= identifier[num_ports]
identifier[self] . identifier[node] [ literal[string] ]. identifier[extend] ( identifier[ports] ) | def add_wic_ports(self, wic_slot):
"""
Add the ports for a specific WIC to the node['ports'] dictionary
:param str wic_slot: WIC Slot (wic0)
"""
wic_slot_number = int(wic_slot[3])
wic_adapter = self.node['properties'][wic_slot]
num_ports = ADAPTER_MATRIX[wic_adapter]['ports']
port_type = ADAPTER_MATRIX[wic_adapter]['type']
ports = []
# Dynamips WICs port number start on a multiple of 16.
base = 16 * (wic_slot_number + 1)
# WICs are always in adapter slot 0.
slot = 0
for port_number in range(num_ports):
phy_port_number = port_number + self.port_numbering[port_type]
port_name = PORT_TYPES[port_type] + '%s/%s' % (slot, phy_port_number)
port_temp = {'name': port_name, 'id': self.port_id, 'port_number': base + port_number, 'slot_number': slot}
ports.append(port_temp)
self.port_id += 1 # depends on [control=['for'], data=['port_number']]
self.port_numbering[port_type] += num_ports
self.node['ports'].extend(ports) |
def button_clicked(self, button):
"""Action when button was clicked.
Parameters
----------
button : instance of QPushButton
which button was pressed
"""
if button is self.idx_ok:
fn = Path(self.filename)
xp_format = self.xp_format.get_value()
if self.all_types.get_value():
evt_type = self.event_types
else:
evt_type = [
x.text() for x in self.idx_evt_type.selectedItems()]
if 'CSV' == xp_format:
self.parent.notes.annot.export_events(fn, evt_type)
elif 'Brain Vision' == xp_format:
events = []
for et in evt_type:
events.extend(self.parent.notes.annot.get_events(name=et))
if not events:
self.parent.statusBar.showMessage('No events found.')
return
events = sorted(events, key=lambda x: x['start'])
dataset = self.parent.info.dataset
data = ChanTime()
data.start_time = dataset.header['start_time']
data.s_freq = int(dataset.header['s_freq'])
with fn.with_suffix('.vmrk').open('w') as f:
lg.info('Writing to ' + str(fn) + '.vmrk')
f.write(_write_vmrk(data, fn, events))
self.accept()
if button is self.idx_cancel:
self.reject() | def function[button_clicked, parameter[self, button]]:
constant[Action when button was clicked.
Parameters
----------
button : instance of QPushButton
which button was pressed
]
if compare[name[button] is name[self].idx_ok] begin[:]
variable[fn] assign[=] call[name[Path], parameter[name[self].filename]]
variable[xp_format] assign[=] call[name[self].xp_format.get_value, parameter[]]
if call[name[self].all_types.get_value, parameter[]] begin[:]
variable[evt_type] assign[=] name[self].event_types
if compare[constant[CSV] equal[==] name[xp_format]] begin[:]
call[name[self].parent.notes.annot.export_events, parameter[name[fn], name[evt_type]]]
call[name[self].accept, parameter[]]
if compare[name[button] is name[self].idx_cancel] begin[:]
call[name[self].reject, parameter[]] | keyword[def] identifier[button_clicked] ( identifier[self] , identifier[button] ):
literal[string]
keyword[if] identifier[button] keyword[is] identifier[self] . identifier[idx_ok] :
identifier[fn] = identifier[Path] ( identifier[self] . identifier[filename] )
identifier[xp_format] = identifier[self] . identifier[xp_format] . identifier[get_value] ()
keyword[if] identifier[self] . identifier[all_types] . identifier[get_value] ():
identifier[evt_type] = identifier[self] . identifier[event_types]
keyword[else] :
identifier[evt_type] =[
identifier[x] . identifier[text] () keyword[for] identifier[x] keyword[in] identifier[self] . identifier[idx_evt_type] . identifier[selectedItems] ()]
keyword[if] literal[string] == identifier[xp_format] :
identifier[self] . identifier[parent] . identifier[notes] . identifier[annot] . identifier[export_events] ( identifier[fn] , identifier[evt_type] )
keyword[elif] literal[string] == identifier[xp_format] :
identifier[events] =[]
keyword[for] identifier[et] keyword[in] identifier[evt_type] :
identifier[events] . identifier[extend] ( identifier[self] . identifier[parent] . identifier[notes] . identifier[annot] . identifier[get_events] ( identifier[name] = identifier[et] ))
keyword[if] keyword[not] identifier[events] :
identifier[self] . identifier[parent] . identifier[statusBar] . identifier[showMessage] ( literal[string] )
keyword[return]
identifier[events] = identifier[sorted] ( identifier[events] , identifier[key] = keyword[lambda] identifier[x] : identifier[x] [ literal[string] ])
identifier[dataset] = identifier[self] . identifier[parent] . identifier[info] . identifier[dataset]
identifier[data] = identifier[ChanTime] ()
identifier[data] . identifier[start_time] = identifier[dataset] . identifier[header] [ literal[string] ]
identifier[data] . identifier[s_freq] = identifier[int] ( identifier[dataset] . identifier[header] [ literal[string] ])
keyword[with] identifier[fn] . identifier[with_suffix] ( literal[string] ). identifier[open] ( literal[string] ) keyword[as] identifier[f] :
identifier[lg] . identifier[info] ( literal[string] + identifier[str] ( identifier[fn] )+ literal[string] )
identifier[f] . identifier[write] ( identifier[_write_vmrk] ( identifier[data] , identifier[fn] , identifier[events] ))
identifier[self] . identifier[accept] ()
keyword[if] identifier[button] keyword[is] identifier[self] . identifier[idx_cancel] :
identifier[self] . identifier[reject] () | def button_clicked(self, button):
"""Action when button was clicked.
Parameters
----------
button : instance of QPushButton
which button was pressed
"""
if button is self.idx_ok:
fn = Path(self.filename)
xp_format = self.xp_format.get_value()
if self.all_types.get_value():
evt_type = self.event_types # depends on [control=['if'], data=[]]
else:
evt_type = [x.text() for x in self.idx_evt_type.selectedItems()]
if 'CSV' == xp_format:
self.parent.notes.annot.export_events(fn, evt_type) # depends on [control=['if'], data=[]]
elif 'Brain Vision' == xp_format:
events = []
for et in evt_type:
events.extend(self.parent.notes.annot.get_events(name=et)) # depends on [control=['for'], data=['et']]
if not events:
self.parent.statusBar.showMessage('No events found.')
return # depends on [control=['if'], data=[]]
events = sorted(events, key=lambda x: x['start'])
dataset = self.parent.info.dataset
data = ChanTime()
data.start_time = dataset.header['start_time']
data.s_freq = int(dataset.header['s_freq'])
with fn.with_suffix('.vmrk').open('w') as f:
lg.info('Writing to ' + str(fn) + '.vmrk')
f.write(_write_vmrk(data, fn, events)) # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]]
self.accept() # depends on [control=['if'], data=[]]
if button is self.idx_cancel:
self.reject() # depends on [control=['if'], data=[]] |
def _keys(self, pattern):
"""Execute the KEYS command on all Redis shards.
Args:
pattern: The KEYS pattern to query.
Returns:
The concatenated list of results from all shards.
"""
result = []
for client in self.redis_clients:
result.extend(list(client.scan_iter(match=pattern)))
return result | def function[_keys, parameter[self, pattern]]:
constant[Execute the KEYS command on all Redis shards.
Args:
pattern: The KEYS pattern to query.
Returns:
The concatenated list of results from all shards.
]
variable[result] assign[=] list[[]]
for taget[name[client]] in starred[name[self].redis_clients] begin[:]
call[name[result].extend, parameter[call[name[list], parameter[call[name[client].scan_iter, parameter[]]]]]]
return[name[result]] | keyword[def] identifier[_keys] ( identifier[self] , identifier[pattern] ):
literal[string]
identifier[result] =[]
keyword[for] identifier[client] keyword[in] identifier[self] . identifier[redis_clients] :
identifier[result] . identifier[extend] ( identifier[list] ( identifier[client] . identifier[scan_iter] ( identifier[match] = identifier[pattern] )))
keyword[return] identifier[result] | def _keys(self, pattern):
"""Execute the KEYS command on all Redis shards.
Args:
pattern: The KEYS pattern to query.
Returns:
The concatenated list of results from all shards.
"""
result = []
for client in self.redis_clients:
result.extend(list(client.scan_iter(match=pattern))) # depends on [control=['for'], data=['client']]
return result |
def riemann_metric( Y, laplacian, n_dim=None, invert_h=False, mode_inv = 'svd'):
"""
Parameters
----------
Y: array-like, shape = (n_samples, mdimY )
The embedding coordinates of the points
laplacian: array-like, shape = (n_samples, n_samples)
The Laplacian of the data. It is recommended to use the "geometric"
Laplacian (default) option from geometry.graph_laplacian()
n_dim : integer, optional
Use only the first n_dim <= mdimY dimensions.All dimensions
n_dim:mdimY are ignored.
invert_h: boolean, optional
if False, only the "dual Riemannian metric" is computed
if True, the dual metric matrices are inverted to obtain the
Riemannian metric G.
mode_inv: string, optional
How to compute the inverses of h_dual_metric, if invert_h
"inv", use numpy.inv()
"svd" (default), use numpy.linalg.svd(), then invert the eigenvalues
(possibly a more numerically stable method with H is symmetric and
ill conditioned)
Returns
-------
h_dual_metric : array, shape=(n_samples, n_dim, n_dim)
Optionally :
g_riemann_metric : array, shape=(n_samples, n_dim, n_dim )
Hvv : singular vectors of H, transposed, shape = ( n_samples, n_dim, n_dim )
Hsvals : singular values of H, shape = ( n_samples, n_dim )
Gsvals : singular values of G, shape = ( n_samples, n_dim )
Notes
-----
References
----------
"Non-linear dimensionality reduction: Riemannian metric estimation and
the problem of geometric discovery",
Dominique Perraul-Joncas, Marina Meila, arXiv:1305.7255
"""
n_samples = laplacian.shape[0]
h_dual_metric = np.zeros((n_samples, n_dim, n_dim ))
for i in np.arange(n_dim ):
for j in np.arange(i,n_dim ):
yij = Y[:,i]*Y[:,j]
h_dual_metric[ :, i, j] = 0.5*(laplacian.dot(yij)-Y[:,j]*laplacian.dot(Y[:,i])-Y[:,i]*laplacian.dot(Y[:,j]))
for j in np.arange(n_dim-1):
for i in np.arange(j+1,n_dim):
h_dual_metric[ :,i,j] = h_dual_metric[:,j,i]
# compute rmetric if requested
if( invert_h ):
riemann_metric, Hvv, Hsvals, Gsvals = compute_G_from_H( h_dual_metric )
else:
riemann_metric = Hvv = Hsvals = Gsvals = None
return h_dual_metric, riemann_metric, Hvv, Hsvals, Gsvals | def function[riemann_metric, parameter[Y, laplacian, n_dim, invert_h, mode_inv]]:
constant[
Parameters
----------
Y: array-like, shape = (n_samples, mdimY )
The embedding coordinates of the points
laplacian: array-like, shape = (n_samples, n_samples)
The Laplacian of the data. It is recommended to use the "geometric"
Laplacian (default) option from geometry.graph_laplacian()
n_dim : integer, optional
Use only the first n_dim <= mdimY dimensions.All dimensions
n_dim:mdimY are ignored.
invert_h: boolean, optional
if False, only the "dual Riemannian metric" is computed
if True, the dual metric matrices are inverted to obtain the
Riemannian metric G.
mode_inv: string, optional
How to compute the inverses of h_dual_metric, if invert_h
"inv", use numpy.inv()
"svd" (default), use numpy.linalg.svd(), then invert the eigenvalues
(possibly a more numerically stable method with H is symmetric and
ill conditioned)
Returns
-------
h_dual_metric : array, shape=(n_samples, n_dim, n_dim)
Optionally :
g_riemann_metric : array, shape=(n_samples, n_dim, n_dim )
Hvv : singular vectors of H, transposed, shape = ( n_samples, n_dim, n_dim )
Hsvals : singular values of H, shape = ( n_samples, n_dim )
Gsvals : singular values of G, shape = ( n_samples, n_dim )
Notes
-----
References
----------
"Non-linear dimensionality reduction: Riemannian metric estimation and
the problem of geometric discovery",
Dominique Perraul-Joncas, Marina Meila, arXiv:1305.7255
]
variable[n_samples] assign[=] call[name[laplacian].shape][constant[0]]
variable[h_dual_metric] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da2044c3640>, <ast.Name object at 0x7da2044c1600>, <ast.Name object at 0x7da2044c2fe0>]]]]
for taget[name[i]] in starred[call[name[np].arange, parameter[name[n_dim]]]] begin[:]
for taget[name[j]] in starred[call[name[np].arange, parameter[name[i], name[n_dim]]]] begin[:]
variable[yij] assign[=] binary_operation[call[name[Y]][tuple[[<ast.Slice object at 0x7da2044c1e70>, <ast.Name object at 0x7da2044c3cd0>]]] * call[name[Y]][tuple[[<ast.Slice object at 0x7da2044c1f60>, <ast.Name object at 0x7da2044c32b0>]]]]
call[name[h_dual_metric]][tuple[[<ast.Slice object at 0x7da2044c12d0>, <ast.Name object at 0x7da2044c2710>, <ast.Name object at 0x7da2044c1660>]]] assign[=] binary_operation[constant[0.5] * binary_operation[binary_operation[call[name[laplacian].dot, parameter[name[yij]]] - binary_operation[call[name[Y]][tuple[[<ast.Slice object at 0x7da1b12f38e0>, <ast.Name object at 0x7da1b12f29b0>]]] * call[name[laplacian].dot, parameter[call[name[Y]][tuple[[<ast.Slice object at 0x7da1b12f3400>, <ast.Name object at 0x7da1b12f32b0>]]]]]]] - binary_operation[call[name[Y]][tuple[[<ast.Slice object at 0x7da1b12f24a0>, <ast.Name object at 0x7da1b12f30d0>]]] * call[name[laplacian].dot, parameter[call[name[Y]][tuple[[<ast.Slice object at 0x7da1b12f2e90>, <ast.Name object at 0x7da1b12f2770>]]]]]]]]
for taget[name[j]] in starred[call[name[np].arange, parameter[binary_operation[name[n_dim] - constant[1]]]]] begin[:]
for taget[name[i]] in starred[call[name[np].arange, parameter[binary_operation[name[j] + constant[1]], name[n_dim]]]] begin[:]
call[name[h_dual_metric]][tuple[[<ast.Slice object at 0x7da1b12f3040>, <ast.Name object at 0x7da1b12f3010>, <ast.Name object at 0x7da1b12f20b0>]]] assign[=] call[name[h_dual_metric]][tuple[[<ast.Slice object at 0x7da1b12f25c0>, <ast.Name object at 0x7da1b12f2470>, <ast.Name object at 0x7da1b12f2da0>]]]
if name[invert_h] begin[:]
<ast.Tuple object at 0x7da1b12f2860> assign[=] call[name[compute_G_from_H], parameter[name[h_dual_metric]]]
return[tuple[[<ast.Name object at 0x7da1b12f2410>, <ast.Name object at 0x7da1b12f23e0>, <ast.Name object at 0x7da1b12f23b0>, <ast.Name object at 0x7da1b12f3a30>, <ast.Name object at 0x7da1b12f3220>]]] | keyword[def] identifier[riemann_metric] ( identifier[Y] , identifier[laplacian] , identifier[n_dim] = keyword[None] , identifier[invert_h] = keyword[False] , identifier[mode_inv] = literal[string] ):
literal[string]
identifier[n_samples] = identifier[laplacian] . identifier[shape] [ literal[int] ]
identifier[h_dual_metric] = identifier[np] . identifier[zeros] (( identifier[n_samples] , identifier[n_dim] , identifier[n_dim] ))
keyword[for] identifier[i] keyword[in] identifier[np] . identifier[arange] ( identifier[n_dim] ):
keyword[for] identifier[j] keyword[in] identifier[np] . identifier[arange] ( identifier[i] , identifier[n_dim] ):
identifier[yij] = identifier[Y] [:, identifier[i] ]* identifier[Y] [:, identifier[j] ]
identifier[h_dual_metric] [:, identifier[i] , identifier[j] ]= literal[int] *( identifier[laplacian] . identifier[dot] ( identifier[yij] )- identifier[Y] [:, identifier[j] ]* identifier[laplacian] . identifier[dot] ( identifier[Y] [:, identifier[i] ])- identifier[Y] [:, identifier[i] ]* identifier[laplacian] . identifier[dot] ( identifier[Y] [:, identifier[j] ]))
keyword[for] identifier[j] keyword[in] identifier[np] . identifier[arange] ( identifier[n_dim] - literal[int] ):
keyword[for] identifier[i] keyword[in] identifier[np] . identifier[arange] ( identifier[j] + literal[int] , identifier[n_dim] ):
identifier[h_dual_metric] [:, identifier[i] , identifier[j] ]= identifier[h_dual_metric] [:, identifier[j] , identifier[i] ]
keyword[if] ( identifier[invert_h] ):
identifier[riemann_metric] , identifier[Hvv] , identifier[Hsvals] , identifier[Gsvals] = identifier[compute_G_from_H] ( identifier[h_dual_metric] )
keyword[else] :
identifier[riemann_metric] = identifier[Hvv] = identifier[Hsvals] = identifier[Gsvals] = keyword[None]
keyword[return] identifier[h_dual_metric] , identifier[riemann_metric] , identifier[Hvv] , identifier[Hsvals] , identifier[Gsvals] | def riemann_metric(Y, laplacian, n_dim=None, invert_h=False, mode_inv='svd'):
"""
Parameters
----------
Y: array-like, shape = (n_samples, mdimY )
The embedding coordinates of the points
laplacian: array-like, shape = (n_samples, n_samples)
The Laplacian of the data. It is recommended to use the "geometric"
Laplacian (default) option from geometry.graph_laplacian()
n_dim : integer, optional
Use only the first n_dim <= mdimY dimensions.All dimensions
n_dim:mdimY are ignored.
invert_h: boolean, optional
if False, only the "dual Riemannian metric" is computed
if True, the dual metric matrices are inverted to obtain the
Riemannian metric G.
mode_inv: string, optional
How to compute the inverses of h_dual_metric, if invert_h
"inv", use numpy.inv()
"svd" (default), use numpy.linalg.svd(), then invert the eigenvalues
(possibly a more numerically stable method with H is symmetric and
ill conditioned)
Returns
-------
h_dual_metric : array, shape=(n_samples, n_dim, n_dim)
Optionally :
g_riemann_metric : array, shape=(n_samples, n_dim, n_dim )
Hvv : singular vectors of H, transposed, shape = ( n_samples, n_dim, n_dim )
Hsvals : singular values of H, shape = ( n_samples, n_dim )
Gsvals : singular values of G, shape = ( n_samples, n_dim )
Notes
-----
References
----------
"Non-linear dimensionality reduction: Riemannian metric estimation and
the problem of geometric discovery",
Dominique Perraul-Joncas, Marina Meila, arXiv:1305.7255
"""
n_samples = laplacian.shape[0]
h_dual_metric = np.zeros((n_samples, n_dim, n_dim))
for i in np.arange(n_dim):
for j in np.arange(i, n_dim):
yij = Y[:, i] * Y[:, j]
h_dual_metric[:, i, j] = 0.5 * (laplacian.dot(yij) - Y[:, j] * laplacian.dot(Y[:, i]) - Y[:, i] * laplacian.dot(Y[:, j])) # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']]
for j in np.arange(n_dim - 1):
for i in np.arange(j + 1, n_dim):
h_dual_metric[:, i, j] = h_dual_metric[:, j, i] # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['j']]
# compute rmetric if requested
if invert_h:
(riemann_metric, Hvv, Hsvals, Gsvals) = compute_G_from_H(h_dual_metric) # depends on [control=['if'], data=[]]
else:
riemann_metric = Hvv = Hsvals = Gsvals = None
return (h_dual_metric, riemann_metric, Hvv, Hsvals, Gsvals) |
def marker_for_line(self, line):
"""
Returns the marker that is displayed at the specified line number if
any.
:param line: The marker line.
:return: Marker of None
:rtype: pyqode.core.Marker
"""
block = self.editor.document().findBlockByNumber(line)
try:
return block.userData().messages
except AttributeError:
return [] | def function[marker_for_line, parameter[self, line]]:
constant[
Returns the marker that is displayed at the specified line number if
any.
:param line: The marker line.
:return: Marker of None
:rtype: pyqode.core.Marker
]
variable[block] assign[=] call[call[name[self].editor.document, parameter[]].findBlockByNumber, parameter[name[line]]]
<ast.Try object at 0x7da20c76ef80> | keyword[def] identifier[marker_for_line] ( identifier[self] , identifier[line] ):
literal[string]
identifier[block] = identifier[self] . identifier[editor] . identifier[document] (). identifier[findBlockByNumber] ( identifier[line] )
keyword[try] :
keyword[return] identifier[block] . identifier[userData] (). identifier[messages]
keyword[except] identifier[AttributeError] :
keyword[return] [] | def marker_for_line(self, line):
"""
Returns the marker that is displayed at the specified line number if
any.
:param line: The marker line.
:return: Marker of None
:rtype: pyqode.core.Marker
"""
block = self.editor.document().findBlockByNumber(line)
try:
return block.userData().messages # depends on [control=['try'], data=[]]
except AttributeError:
return [] # depends on [control=['except'], data=[]] |
def get_attribute(self, attribute_name, no_cache=False):
""" Gets the passed attribute of this group.
:param attribute_name: The name of the attribute to get.
:type attribute_name: str
:param no_cache (optional): Set to True to pull the attribute directly from an LDAP search instead of
from the cache. Default False.
:type no_cache: boolean
:returns: The attribute requested or None if the attribute is not set.
"""
attributes = self.get_attributes(no_cache)
if attribute_name not in attributes:
logger.debug("ADGroup {group_dn} does not have the attribute "
"'{attribute}'.".format(group_dn=self.group_dn, attribute=attribute_name))
return None
else:
raw_attribute = attributes[attribute_name]
# Pop one-item lists
if len(raw_attribute) == 1:
raw_attribute = raw_attribute[0]
return raw_attribute | def function[get_attribute, parameter[self, attribute_name, no_cache]]:
constant[ Gets the passed attribute of this group.
:param attribute_name: The name of the attribute to get.
:type attribute_name: str
:param no_cache (optional): Set to True to pull the attribute directly from an LDAP search instead of
from the cache. Default False.
:type no_cache: boolean
:returns: The attribute requested or None if the attribute is not set.
]
variable[attributes] assign[=] call[name[self].get_attributes, parameter[name[no_cache]]]
if compare[name[attribute_name] <ast.NotIn object at 0x7da2590d7190> name[attributes]] begin[:]
call[name[logger].debug, parameter[call[constant[ADGroup {group_dn} does not have the attribute '{attribute}'.].format, parameter[]]]]
return[constant[None]] | keyword[def] identifier[get_attribute] ( identifier[self] , identifier[attribute_name] , identifier[no_cache] = keyword[False] ):
literal[string]
identifier[attributes] = identifier[self] . identifier[get_attributes] ( identifier[no_cache] )
keyword[if] identifier[attribute_name] keyword[not] keyword[in] identifier[attributes] :
identifier[logger] . identifier[debug] ( literal[string]
literal[string] . identifier[format] ( identifier[group_dn] = identifier[self] . identifier[group_dn] , identifier[attribute] = identifier[attribute_name] ))
keyword[return] keyword[None]
keyword[else] :
identifier[raw_attribute] = identifier[attributes] [ identifier[attribute_name] ]
keyword[if] identifier[len] ( identifier[raw_attribute] )== literal[int] :
identifier[raw_attribute] = identifier[raw_attribute] [ literal[int] ]
keyword[return] identifier[raw_attribute] | def get_attribute(self, attribute_name, no_cache=False):
""" Gets the passed attribute of this group.
:param attribute_name: The name of the attribute to get.
:type attribute_name: str
:param no_cache (optional): Set to True to pull the attribute directly from an LDAP search instead of
from the cache. Default False.
:type no_cache: boolean
:returns: The attribute requested or None if the attribute is not set.
"""
attributes = self.get_attributes(no_cache)
if attribute_name not in attributes:
logger.debug("ADGroup {group_dn} does not have the attribute '{attribute}'.".format(group_dn=self.group_dn, attribute=attribute_name))
return None # depends on [control=['if'], data=['attribute_name']]
else:
raw_attribute = attributes[attribute_name] # Pop one-item lists
if len(raw_attribute) == 1:
raw_attribute = raw_attribute[0] # depends on [control=['if'], data=[]]
return raw_attribute |
def select_io( hash ):
"""
Returns the relevant i/o for a method whose call is characterized by the hash
:param hash: The hash for the CallDescriptor
:rtype list(tuple( hash, stack, methodname, returnval, args, packet_num )):
"""
load_cache(True)
global CACHE_
res = []
record_used('cache', hash)
for d in CACHE_['cache'].get(hash, {}).values():
d = pickle.loads(d)
res += [(d['hash'], d['stack'], d['methodname'], d['returnval'], d['args'], d['packet_num'])]
return res | def function[select_io, parameter[hash]]:
constant[
Returns the relevant i/o for a method whose call is characterized by the hash
:param hash: The hash for the CallDescriptor
:rtype list(tuple( hash, stack, methodname, returnval, args, packet_num )):
]
call[name[load_cache], parameter[constant[True]]]
<ast.Global object at 0x7da1b09ba200>
variable[res] assign[=] list[[]]
call[name[record_used], parameter[constant[cache], name[hash]]]
for taget[name[d]] in starred[call[call[call[name[CACHE_]][constant[cache]].get, parameter[name[hash], dictionary[[], []]]].values, parameter[]]] begin[:]
variable[d] assign[=] call[name[pickle].loads, parameter[name[d]]]
<ast.AugAssign object at 0x7da1b09ba5f0>
return[name[res]] | keyword[def] identifier[select_io] ( identifier[hash] ):
literal[string]
identifier[load_cache] ( keyword[True] )
keyword[global] identifier[CACHE_]
identifier[res] =[]
identifier[record_used] ( literal[string] , identifier[hash] )
keyword[for] identifier[d] keyword[in] identifier[CACHE_] [ literal[string] ]. identifier[get] ( identifier[hash] ,{}). identifier[values] ():
identifier[d] = identifier[pickle] . identifier[loads] ( identifier[d] )
identifier[res] +=[( identifier[d] [ literal[string] ], identifier[d] [ literal[string] ], identifier[d] [ literal[string] ], identifier[d] [ literal[string] ], identifier[d] [ literal[string] ], identifier[d] [ literal[string] ])]
keyword[return] identifier[res] | def select_io(hash):
"""
Returns the relevant i/o for a method whose call is characterized by the hash
:param hash: The hash for the CallDescriptor
:rtype list(tuple( hash, stack, methodname, returnval, args, packet_num )):
"""
load_cache(True)
global CACHE_
res = []
record_used('cache', hash)
for d in CACHE_['cache'].get(hash, {}).values():
d = pickle.loads(d)
res += [(d['hash'], d['stack'], d['methodname'], d['returnval'], d['args'], d['packet_num'])] # depends on [control=['for'], data=['d']]
return res |
def symmetric_difference(self, that):
"""
Return a new set with elements in either *self* or *that* but not both.
"""
diff = self._set.symmetric_difference(that)
return self._fromset(diff, key=self._key) | def function[symmetric_difference, parameter[self, that]]:
constant[
Return a new set with elements in either *self* or *that* but not both.
]
variable[diff] assign[=] call[name[self]._set.symmetric_difference, parameter[name[that]]]
return[call[name[self]._fromset, parameter[name[diff]]]] | keyword[def] identifier[symmetric_difference] ( identifier[self] , identifier[that] ):
literal[string]
identifier[diff] = identifier[self] . identifier[_set] . identifier[symmetric_difference] ( identifier[that] )
keyword[return] identifier[self] . identifier[_fromset] ( identifier[diff] , identifier[key] = identifier[self] . identifier[_key] ) | def symmetric_difference(self, that):
"""
Return a new set with elements in either *self* or *that* but not both.
"""
diff = self._set.symmetric_difference(that)
return self._fromset(diff, key=self._key) |
def Max(self, k):
"""Computes the CDF of the maximum of k selections from this dist.
k: int
returns: new Cdf
"""
cdf = self.MakeCdf()
cdf.ps = [p ** k for p in cdf.ps]
return cdf | def function[Max, parameter[self, k]]:
constant[Computes the CDF of the maximum of k selections from this dist.
k: int
returns: new Cdf
]
variable[cdf] assign[=] call[name[self].MakeCdf, parameter[]]
name[cdf].ps assign[=] <ast.ListComp object at 0x7da1b03828c0>
return[name[cdf]] | keyword[def] identifier[Max] ( identifier[self] , identifier[k] ):
literal[string]
identifier[cdf] = identifier[self] . identifier[MakeCdf] ()
identifier[cdf] . identifier[ps] =[ identifier[p] ** identifier[k] keyword[for] identifier[p] keyword[in] identifier[cdf] . identifier[ps] ]
keyword[return] identifier[cdf] | def Max(self, k):
"""Computes the CDF of the maximum of k selections from this dist.
k: int
returns: new Cdf
"""
cdf = self.MakeCdf()
cdf.ps = [p ** k for p in cdf.ps]
return cdf |
def _raise_from_invalid_response(error):
"""Re-wrap and raise an ``InvalidResponse`` exception.
:type error: :exc:`google.resumable_media.InvalidResponse`
:param error: A caught exception from the ``google-resumable-media``
library.
:raises: :class:`~google.cloud.exceptions.GoogleCloudError` corresponding
to the failed status code
"""
response = error.response
error_message = str(error)
message = u"{method} {url}: {error}".format(
method=response.request.method, url=response.request.url, error=error_message
)
raise exceptions.from_http_status(response.status_code, message, response=response) | def function[_raise_from_invalid_response, parameter[error]]:
constant[Re-wrap and raise an ``InvalidResponse`` exception.
:type error: :exc:`google.resumable_media.InvalidResponse`
:param error: A caught exception from the ``google-resumable-media``
library.
:raises: :class:`~google.cloud.exceptions.GoogleCloudError` corresponding
to the failed status code
]
variable[response] assign[=] name[error].response
variable[error_message] assign[=] call[name[str], parameter[name[error]]]
variable[message] assign[=] call[constant[{method} {url}: {error}].format, parameter[]]
<ast.Raise object at 0x7da1b2346020> | keyword[def] identifier[_raise_from_invalid_response] ( identifier[error] ):
literal[string]
identifier[response] = identifier[error] . identifier[response]
identifier[error_message] = identifier[str] ( identifier[error] )
identifier[message] = literal[string] . identifier[format] (
identifier[method] = identifier[response] . identifier[request] . identifier[method] , identifier[url] = identifier[response] . identifier[request] . identifier[url] , identifier[error] = identifier[error_message]
)
keyword[raise] identifier[exceptions] . identifier[from_http_status] ( identifier[response] . identifier[status_code] , identifier[message] , identifier[response] = identifier[response] ) | def _raise_from_invalid_response(error):
"""Re-wrap and raise an ``InvalidResponse`` exception.
:type error: :exc:`google.resumable_media.InvalidResponse`
:param error: A caught exception from the ``google-resumable-media``
library.
:raises: :class:`~google.cloud.exceptions.GoogleCloudError` corresponding
to the failed status code
"""
response = error.response
error_message = str(error)
message = u'{method} {url}: {error}'.format(method=response.request.method, url=response.request.url, error=error_message)
raise exceptions.from_http_status(response.status_code, message, response=response) |
def run(self, force=False):
"""
Build a MBTile file.
force -- overwrite if MBTiles file already exists.
"""
if os.path.exists(self.filepath):
if force:
logger.warn(_("%s already exists. Overwrite.") % self.filepath)
os.remove(self.filepath)
else:
# Already built, do not do anything.
logger.info(_("%s already exists. Nothing to do.") % self.filepath)
return
# Clean previous runs
self._clean_gather()
# If no coverage added, use bottom layer metadata
if len(self._bboxes) == 0 and len(self._layers) > 0:
bottomlayer = self._layers[0]
metadata = bottomlayer.reader.metadata()
if 'bounds' in metadata:
logger.debug(_("Use bounds of bottom layer %s") % bottomlayer)
bbox = map(float, metadata.get('bounds', '').split(','))
zoomlevels = range(int(metadata.get('minzoom', 0)), int(metadata.get('maxzoom', 0)))
self.add_coverage(bbox=bbox, zoomlevels=zoomlevels)
# Compute list of tiles
tileslist = set()
for bbox, levels in self._bboxes:
logger.debug(_("Compute list of tiles for bbox %s on zooms %s.") % (bbox, levels))
bboxlist = self.tileslist(bbox, levels)
logger.debug(_("Add %s tiles.") % len(bboxlist))
tileslist = tileslist.union(bboxlist)
logger.debug(_("%s tiles in total.") % len(tileslist))
self.nbtiles = len(tileslist)
if not self.nbtiles:
raise EmptyCoverageError(_("No tiles are covered by bounding boxes : %s") % self._bboxes)
logger.debug(_("%s tiles to be packaged.") % self.nbtiles)
# Go through whole list of tiles and gather them in tmp_dir
self.rendered = 0
for (z, x, y) in tileslist:
try:
self._gather((z, x, y))
except Exception as e:
logger.warn(e)
if not self.ignore_errors:
raise
logger.debug(_("%s tiles were missing.") % self.rendered)
# Some metadata
middlezoom = self.zoomlevels[len(self.zoomlevels) // 2]
lat = self.bounds[1] + (self.bounds[3] - self.bounds[1])/2
lon = self.bounds[0] + (self.bounds[2] - self.bounds[0])/2
metadata = {}
metadata['name'] = str(uuid.uuid4())
metadata['format'] = self._tile_extension[1:]
metadata['minzoom'] = self.zoomlevels[0]
metadata['maxzoom'] = self.zoomlevels[-1]
metadata['bounds'] = '%s,%s,%s,%s' % tuple(self.bounds)
metadata['center'] = '%s,%s,%s' % (lon, lat, middlezoom)
#display informations from the grids on hover
content_to_display = ''
for field_name in self.grid_fields:
content_to_display += "{{{ %s }}}<br>" % field_name
metadata['template'] = '{{#__location__}}{{/__location__}} {{#__teaser__}} \
%s {{/__teaser__}}{{#__full__}}{{/__full__}}' % content_to_display
metadatafile = os.path.join(self.tmp_dir, 'metadata.json')
with open(metadatafile, 'w') as output:
json.dump(metadata, output)
# TODO: add UTF-Grid of last layer, if any
# Package it!
logger.info(_("Build MBTiles file '%s'.") % self.filepath)
extension = self.tile_format.split("image/")[-1]
disk_to_mbtiles(
self.tmp_dir,
self.filepath,
format=extension,
scheme=self.cache.scheme
)
try:
os.remove("%s-journal" % self.filepath) # created by mbutil
except OSError as e:
pass
self._clean_gather() | def function[run, parameter[self, force]]:
constant[
Build a MBTile file.
force -- overwrite if MBTiles file already exists.
]
if call[name[os].path.exists, parameter[name[self].filepath]] begin[:]
if name[force] begin[:]
call[name[logger].warn, parameter[binary_operation[call[name[_], parameter[constant[%s already exists. Overwrite.]]] <ast.Mod object at 0x7da2590d6920> name[self].filepath]]]
call[name[os].remove, parameter[name[self].filepath]]
call[name[self]._clean_gather, parameter[]]
if <ast.BoolOp object at 0x7da18f09cbb0> begin[:]
variable[bottomlayer] assign[=] call[name[self]._layers][constant[0]]
variable[metadata] assign[=] call[name[bottomlayer].reader.metadata, parameter[]]
if compare[constant[bounds] in name[metadata]] begin[:]
call[name[logger].debug, parameter[binary_operation[call[name[_], parameter[constant[Use bounds of bottom layer %s]]] <ast.Mod object at 0x7da2590d6920> name[bottomlayer]]]]
variable[bbox] assign[=] call[name[map], parameter[name[float], call[call[name[metadata].get, parameter[constant[bounds], constant[]]].split, parameter[constant[,]]]]]
variable[zoomlevels] assign[=] call[name[range], parameter[call[name[int], parameter[call[name[metadata].get, parameter[constant[minzoom], constant[0]]]]], call[name[int], parameter[call[name[metadata].get, parameter[constant[maxzoom], constant[0]]]]]]]
call[name[self].add_coverage, parameter[]]
variable[tileslist] assign[=] call[name[set], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da207f003a0>, <ast.Name object at 0x7da207f02110>]]] in starred[name[self]._bboxes] begin[:]
call[name[logger].debug, parameter[binary_operation[call[name[_], parameter[constant[Compute list of tiles for bbox %s on zooms %s.]]] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da207f03670>, <ast.Name object at 0x7da207f00d90>]]]]]
variable[bboxlist] assign[=] call[name[self].tileslist, parameter[name[bbox], name[levels]]]
call[name[logger].debug, parameter[binary_operation[call[name[_], parameter[constant[Add %s tiles.]]] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[bboxlist]]]]]]
variable[tileslist] assign[=] call[name[tileslist].union, parameter[name[bboxlist]]]
call[name[logger].debug, parameter[binary_operation[call[name[_], parameter[constant[%s tiles in total.]]] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[tileslist]]]]]]
name[self].nbtiles assign[=] call[name[len], parameter[name[tileslist]]]
if <ast.UnaryOp object at 0x7da207f039a0> begin[:]
<ast.Raise object at 0x7da207f00b20>
call[name[logger].debug, parameter[binary_operation[call[name[_], parameter[constant[%s tiles to be packaged.]]] <ast.Mod object at 0x7da2590d6920> name[self].nbtiles]]]
name[self].rendered assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da207f03310>, <ast.Name object at 0x7da207f02d10>, <ast.Name object at 0x7da207f01780>]]] in starred[name[tileslist]] begin[:]
<ast.Try object at 0x7da207f03430>
call[name[logger].debug, parameter[binary_operation[call[name[_], parameter[constant[%s tiles were missing.]]] <ast.Mod object at 0x7da2590d6920> name[self].rendered]]]
variable[middlezoom] assign[=] call[name[self].zoomlevels][binary_operation[call[name[len], parameter[name[self].zoomlevels]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]]]
variable[lat] assign[=] binary_operation[call[name[self].bounds][constant[1]] + binary_operation[binary_operation[call[name[self].bounds][constant[3]] - call[name[self].bounds][constant[1]]] / constant[2]]]
variable[lon] assign[=] binary_operation[call[name[self].bounds][constant[0]] + binary_operation[binary_operation[call[name[self].bounds][constant[2]] - call[name[self].bounds][constant[0]]] / constant[2]]]
variable[metadata] assign[=] dictionary[[], []]
call[name[metadata]][constant[name]] assign[=] call[name[str], parameter[call[name[uuid].uuid4, parameter[]]]]
call[name[metadata]][constant[format]] assign[=] call[name[self]._tile_extension][<ast.Slice object at 0x7da207f01210>]
call[name[metadata]][constant[minzoom]] assign[=] call[name[self].zoomlevels][constant[0]]
call[name[metadata]][constant[maxzoom]] assign[=] call[name[self].zoomlevels][<ast.UnaryOp object at 0x7da207f02ec0>]
call[name[metadata]][constant[bounds]] assign[=] binary_operation[constant[%s,%s,%s,%s] <ast.Mod object at 0x7da2590d6920> call[name[tuple], parameter[name[self].bounds]]]
call[name[metadata]][constant[center]] assign[=] binary_operation[constant[%s,%s,%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da207f031f0>, <ast.Name object at 0x7da207f03a30>, <ast.Name object at 0x7da207f027d0>]]]
variable[content_to_display] assign[=] constant[]
for taget[name[field_name]] in starred[name[self].grid_fields] begin[:]
<ast.AugAssign object at 0x7da207f000a0>
call[name[metadata]][constant[template]] assign[=] binary_operation[constant[{{#__location__}}{{/__location__}} {{#__teaser__}} %s {{/__teaser__}}{{#__full__}}{{/__full__}}] <ast.Mod object at 0x7da2590d6920> name[content_to_display]]
variable[metadatafile] assign[=] call[name[os].path.join, parameter[name[self].tmp_dir, constant[metadata.json]]]
with call[name[open], parameter[name[metadatafile], constant[w]]] begin[:]
call[name[json].dump, parameter[name[metadata], name[output]]]
call[name[logger].info, parameter[binary_operation[call[name[_], parameter[constant[Build MBTiles file '%s'.]]] <ast.Mod object at 0x7da2590d6920> name[self].filepath]]]
variable[extension] assign[=] call[call[name[self].tile_format.split, parameter[constant[image/]]]][<ast.UnaryOp object at 0x7da20c6c6710>]
call[name[disk_to_mbtiles], parameter[name[self].tmp_dir, name[self].filepath]]
<ast.Try object at 0x7da20c6c6ad0>
call[name[self]._clean_gather, parameter[]] | keyword[def] identifier[run] ( identifier[self] , identifier[force] = keyword[False] ):
literal[string]
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[self] . identifier[filepath] ):
keyword[if] identifier[force] :
identifier[logger] . identifier[warn] ( identifier[_] ( literal[string] )% identifier[self] . identifier[filepath] )
identifier[os] . identifier[remove] ( identifier[self] . identifier[filepath] )
keyword[else] :
identifier[logger] . identifier[info] ( identifier[_] ( literal[string] )% identifier[self] . identifier[filepath] )
keyword[return]
identifier[self] . identifier[_clean_gather] ()
keyword[if] identifier[len] ( identifier[self] . identifier[_bboxes] )== literal[int] keyword[and] identifier[len] ( identifier[self] . identifier[_layers] )> literal[int] :
identifier[bottomlayer] = identifier[self] . identifier[_layers] [ literal[int] ]
identifier[metadata] = identifier[bottomlayer] . identifier[reader] . identifier[metadata] ()
keyword[if] literal[string] keyword[in] identifier[metadata] :
identifier[logger] . identifier[debug] ( identifier[_] ( literal[string] )% identifier[bottomlayer] )
identifier[bbox] = identifier[map] ( identifier[float] , identifier[metadata] . identifier[get] ( literal[string] , literal[string] ). identifier[split] ( literal[string] ))
identifier[zoomlevels] = identifier[range] ( identifier[int] ( identifier[metadata] . identifier[get] ( literal[string] , literal[int] )), identifier[int] ( identifier[metadata] . identifier[get] ( literal[string] , literal[int] )))
identifier[self] . identifier[add_coverage] ( identifier[bbox] = identifier[bbox] , identifier[zoomlevels] = identifier[zoomlevels] )
identifier[tileslist] = identifier[set] ()
keyword[for] identifier[bbox] , identifier[levels] keyword[in] identifier[self] . identifier[_bboxes] :
identifier[logger] . identifier[debug] ( identifier[_] ( literal[string] )%( identifier[bbox] , identifier[levels] ))
identifier[bboxlist] = identifier[self] . identifier[tileslist] ( identifier[bbox] , identifier[levels] )
identifier[logger] . identifier[debug] ( identifier[_] ( literal[string] )% identifier[len] ( identifier[bboxlist] ))
identifier[tileslist] = identifier[tileslist] . identifier[union] ( identifier[bboxlist] )
identifier[logger] . identifier[debug] ( identifier[_] ( literal[string] )% identifier[len] ( identifier[tileslist] ))
identifier[self] . identifier[nbtiles] = identifier[len] ( identifier[tileslist] )
keyword[if] keyword[not] identifier[self] . identifier[nbtiles] :
keyword[raise] identifier[EmptyCoverageError] ( identifier[_] ( literal[string] )% identifier[self] . identifier[_bboxes] )
identifier[logger] . identifier[debug] ( identifier[_] ( literal[string] )% identifier[self] . identifier[nbtiles] )
identifier[self] . identifier[rendered] = literal[int]
keyword[for] ( identifier[z] , identifier[x] , identifier[y] ) keyword[in] identifier[tileslist] :
keyword[try] :
identifier[self] . identifier[_gather] (( identifier[z] , identifier[x] , identifier[y] ))
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logger] . identifier[warn] ( identifier[e] )
keyword[if] keyword[not] identifier[self] . identifier[ignore_errors] :
keyword[raise]
identifier[logger] . identifier[debug] ( identifier[_] ( literal[string] )% identifier[self] . identifier[rendered] )
identifier[middlezoom] = identifier[self] . identifier[zoomlevels] [ identifier[len] ( identifier[self] . identifier[zoomlevels] )// literal[int] ]
identifier[lat] = identifier[self] . identifier[bounds] [ literal[int] ]+( identifier[self] . identifier[bounds] [ literal[int] ]- identifier[self] . identifier[bounds] [ literal[int] ])/ literal[int]
identifier[lon] = identifier[self] . identifier[bounds] [ literal[int] ]+( identifier[self] . identifier[bounds] [ literal[int] ]- identifier[self] . identifier[bounds] [ literal[int] ])/ literal[int]
identifier[metadata] ={}
identifier[metadata] [ literal[string] ]= identifier[str] ( identifier[uuid] . identifier[uuid4] ())
identifier[metadata] [ literal[string] ]= identifier[self] . identifier[_tile_extension] [ literal[int] :]
identifier[metadata] [ literal[string] ]= identifier[self] . identifier[zoomlevels] [ literal[int] ]
identifier[metadata] [ literal[string] ]= identifier[self] . identifier[zoomlevels] [- literal[int] ]
identifier[metadata] [ literal[string] ]= literal[string] % identifier[tuple] ( identifier[self] . identifier[bounds] )
identifier[metadata] [ literal[string] ]= literal[string] %( identifier[lon] , identifier[lat] , identifier[middlezoom] )
identifier[content_to_display] = literal[string]
keyword[for] identifier[field_name] keyword[in] identifier[self] . identifier[grid_fields] :
identifier[content_to_display] += literal[string] % identifier[field_name]
identifier[metadata] [ literal[string] ]= literal[string] % identifier[content_to_display]
identifier[metadatafile] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[tmp_dir] , literal[string] )
keyword[with] identifier[open] ( identifier[metadatafile] , literal[string] ) keyword[as] identifier[output] :
identifier[json] . identifier[dump] ( identifier[metadata] , identifier[output] )
identifier[logger] . identifier[info] ( identifier[_] ( literal[string] )% identifier[self] . identifier[filepath] )
identifier[extension] = identifier[self] . identifier[tile_format] . identifier[split] ( literal[string] )[- literal[int] ]
identifier[disk_to_mbtiles] (
identifier[self] . identifier[tmp_dir] ,
identifier[self] . identifier[filepath] ,
identifier[format] = identifier[extension] ,
identifier[scheme] = identifier[self] . identifier[cache] . identifier[scheme]
)
keyword[try] :
identifier[os] . identifier[remove] ( literal[string] % identifier[self] . identifier[filepath] )
keyword[except] identifier[OSError] keyword[as] identifier[e] :
keyword[pass]
identifier[self] . identifier[_clean_gather] () | def run(self, force=False):
"""
Build a MBTile file.
force -- overwrite if MBTiles file already exists.
"""
if os.path.exists(self.filepath):
if force:
logger.warn(_('%s already exists. Overwrite.') % self.filepath)
os.remove(self.filepath) # depends on [control=['if'], data=[]]
else:
# Already built, do not do anything.
logger.info(_('%s already exists. Nothing to do.') % self.filepath)
return # depends on [control=['if'], data=[]]
# Clean previous runs
self._clean_gather()
# If no coverage added, use bottom layer metadata
if len(self._bboxes) == 0 and len(self._layers) > 0:
bottomlayer = self._layers[0]
metadata = bottomlayer.reader.metadata()
if 'bounds' in metadata:
logger.debug(_('Use bounds of bottom layer %s') % bottomlayer)
bbox = map(float, metadata.get('bounds', '').split(','))
zoomlevels = range(int(metadata.get('minzoom', 0)), int(metadata.get('maxzoom', 0)))
self.add_coverage(bbox=bbox, zoomlevels=zoomlevels) # depends on [control=['if'], data=['metadata']] # depends on [control=['if'], data=[]]
# Compute list of tiles
tileslist = set()
for (bbox, levels) in self._bboxes:
logger.debug(_('Compute list of tiles for bbox %s on zooms %s.') % (bbox, levels))
bboxlist = self.tileslist(bbox, levels)
logger.debug(_('Add %s tiles.') % len(bboxlist))
tileslist = tileslist.union(bboxlist)
logger.debug(_('%s tiles in total.') % len(tileslist)) # depends on [control=['for'], data=[]]
self.nbtiles = len(tileslist)
if not self.nbtiles:
raise EmptyCoverageError(_('No tiles are covered by bounding boxes : %s') % self._bboxes) # depends on [control=['if'], data=[]]
logger.debug(_('%s tiles to be packaged.') % self.nbtiles)
# Go through whole list of tiles and gather them in tmp_dir
self.rendered = 0
for (z, x, y) in tileslist:
try:
self._gather((z, x, y)) # depends on [control=['try'], data=[]]
except Exception as e:
logger.warn(e)
if not self.ignore_errors:
raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=[]]
logger.debug(_('%s tiles were missing.') % self.rendered)
# Some metadata
middlezoom = self.zoomlevels[len(self.zoomlevels) // 2]
lat = self.bounds[1] + (self.bounds[3] - self.bounds[1]) / 2
lon = self.bounds[0] + (self.bounds[2] - self.bounds[0]) / 2
metadata = {}
metadata['name'] = str(uuid.uuid4())
metadata['format'] = self._tile_extension[1:]
metadata['minzoom'] = self.zoomlevels[0]
metadata['maxzoom'] = self.zoomlevels[-1]
metadata['bounds'] = '%s,%s,%s,%s' % tuple(self.bounds)
metadata['center'] = '%s,%s,%s' % (lon, lat, middlezoom)
#display informations from the grids on hover
content_to_display = ''
for field_name in self.grid_fields:
content_to_display += '{{{ %s }}}<br>' % field_name # depends on [control=['for'], data=['field_name']]
metadata['template'] = '{{#__location__}}{{/__location__}} {{#__teaser__}} %s {{/__teaser__}}{{#__full__}}{{/__full__}}' % content_to_display
metadatafile = os.path.join(self.tmp_dir, 'metadata.json')
with open(metadatafile, 'w') as output:
json.dump(metadata, output) # depends on [control=['with'], data=['output']]
# TODO: add UTF-Grid of last layer, if any
# Package it!
logger.info(_("Build MBTiles file '%s'.") % self.filepath)
extension = self.tile_format.split('image/')[-1]
disk_to_mbtiles(self.tmp_dir, self.filepath, format=extension, scheme=self.cache.scheme)
try:
os.remove('%s-journal' % self.filepath) # created by mbutil # depends on [control=['try'], data=[]]
except OSError as e:
pass # depends on [control=['except'], data=[]]
self._clean_gather() |
def shard_filename(path, tag, shard_num, total_shards):
"""Create filename for data shard."""
return os.path.join(
path, "%s-%s-%s-%.5d-of-%.5d" % (_PREFIX, _ENCODE_TAG, tag, shard_num, total_shards)) | def function[shard_filename, parameter[path, tag, shard_num, total_shards]]:
constant[Create filename for data shard.]
return[call[name[os].path.join, parameter[name[path], binary_operation[constant[%s-%s-%s-%.5d-of-%.5d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18c4cdea0>, <ast.Name object at 0x7da18c4ced70>, <ast.Name object at 0x7da18c4cc760>, <ast.Name object at 0x7da18c4cf5e0>, <ast.Name object at 0x7da18c4cd3c0>]]]]]] | keyword[def] identifier[shard_filename] ( identifier[path] , identifier[tag] , identifier[shard_num] , identifier[total_shards] ):
literal[string]
keyword[return] identifier[os] . identifier[path] . identifier[join] (
identifier[path] , literal[string] %( identifier[_PREFIX] , identifier[_ENCODE_TAG] , identifier[tag] , identifier[shard_num] , identifier[total_shards] )) | def shard_filename(path, tag, shard_num, total_shards):
"""Create filename for data shard."""
return os.path.join(path, '%s-%s-%s-%.5d-of-%.5d' % (_PREFIX, _ENCODE_TAG, tag, shard_num, total_shards)) |
def get_average_record(self, n):
"""Returns a list of average current numbers, each representing the
average over the last n data points.
Args:
n: Number of data points to average over.
Returns:
A list of average current values.
"""
history_deque = collections.deque()
averages = []
for d in self.data_points:
history_deque.appendleft(d)
if len(history_deque) > n:
history_deque.pop()
avg = sum(history_deque) / len(history_deque)
averages.append(round(avg, self.lr))
return averages | def function[get_average_record, parameter[self, n]]:
constant[Returns a list of average current numbers, each representing the
average over the last n data points.
Args:
n: Number of data points to average over.
Returns:
A list of average current values.
]
variable[history_deque] assign[=] call[name[collections].deque, parameter[]]
variable[averages] assign[=] list[[]]
for taget[name[d]] in starred[name[self].data_points] begin[:]
call[name[history_deque].appendleft, parameter[name[d]]]
if compare[call[name[len], parameter[name[history_deque]]] greater[>] name[n]] begin[:]
call[name[history_deque].pop, parameter[]]
variable[avg] assign[=] binary_operation[call[name[sum], parameter[name[history_deque]]] / call[name[len], parameter[name[history_deque]]]]
call[name[averages].append, parameter[call[name[round], parameter[name[avg], name[self].lr]]]]
return[name[averages]] | keyword[def] identifier[get_average_record] ( identifier[self] , identifier[n] ):
literal[string]
identifier[history_deque] = identifier[collections] . identifier[deque] ()
identifier[averages] =[]
keyword[for] identifier[d] keyword[in] identifier[self] . identifier[data_points] :
identifier[history_deque] . identifier[appendleft] ( identifier[d] )
keyword[if] identifier[len] ( identifier[history_deque] )> identifier[n] :
identifier[history_deque] . identifier[pop] ()
identifier[avg] = identifier[sum] ( identifier[history_deque] )/ identifier[len] ( identifier[history_deque] )
identifier[averages] . identifier[append] ( identifier[round] ( identifier[avg] , identifier[self] . identifier[lr] ))
keyword[return] identifier[averages] | def get_average_record(self, n):
"""Returns a list of average current numbers, each representing the
average over the last n data points.
Args:
n: Number of data points to average over.
Returns:
A list of average current values.
"""
history_deque = collections.deque()
averages = []
for d in self.data_points:
history_deque.appendleft(d)
if len(history_deque) > n:
history_deque.pop() # depends on [control=['if'], data=[]]
avg = sum(history_deque) / len(history_deque)
averages.append(round(avg, self.lr)) # depends on [control=['for'], data=['d']]
return averages |
def _log10_Inorm_extern_planckint(self, Teff):
"""
Internal function to compute normal passband intensities using
the external WD machinery that employs blackbody approximation.
@Teff: effective temperature in K
Returns: log10(Inorm)
"""
log10_Inorm = libphoebe.wd_planckint(Teff, self.extern_wd_idx, self.wd_data["planck_table"])
return log10_Inorm | def function[_log10_Inorm_extern_planckint, parameter[self, Teff]]:
constant[
Internal function to compute normal passband intensities using
the external WD machinery that employs blackbody approximation.
@Teff: effective temperature in K
Returns: log10(Inorm)
]
variable[log10_Inorm] assign[=] call[name[libphoebe].wd_planckint, parameter[name[Teff], name[self].extern_wd_idx, call[name[self].wd_data][constant[planck_table]]]]
return[name[log10_Inorm]] | keyword[def] identifier[_log10_Inorm_extern_planckint] ( identifier[self] , identifier[Teff] ):
literal[string]
identifier[log10_Inorm] = identifier[libphoebe] . identifier[wd_planckint] ( identifier[Teff] , identifier[self] . identifier[extern_wd_idx] , identifier[self] . identifier[wd_data] [ literal[string] ])
keyword[return] identifier[log10_Inorm] | def _log10_Inorm_extern_planckint(self, Teff):
"""
Internal function to compute normal passband intensities using
the external WD machinery that employs blackbody approximation.
@Teff: effective temperature in K
Returns: log10(Inorm)
"""
log10_Inorm = libphoebe.wd_planckint(Teff, self.extern_wd_idx, self.wd_data['planck_table'])
return log10_Inorm |
def add_group_user(self, group_id, user_id):
"""
Adds an existing user to a group.
:param group_id: The unique ID of the group.
:type group_id: ``str``
:param user_id: The unique ID of the user.
:type user_id: ``str``
"""
data = {
"id": user_id
}
response = self._perform_request(
url='/um/groups/%s/users' % group_id,
method='POST',
data=json.dumps(data))
return response | def function[add_group_user, parameter[self, group_id, user_id]]:
constant[
Adds an existing user to a group.
:param group_id: The unique ID of the group.
:type group_id: ``str``
:param user_id: The unique ID of the user.
:type user_id: ``str``
]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b00fb4c0>], [<ast.Name object at 0x7da1b00fa710>]]
variable[response] assign[=] call[name[self]._perform_request, parameter[]]
return[name[response]] | keyword[def] identifier[add_group_user] ( identifier[self] , identifier[group_id] , identifier[user_id] ):
literal[string]
identifier[data] ={
literal[string] : identifier[user_id]
}
identifier[response] = identifier[self] . identifier[_perform_request] (
identifier[url] = literal[string] % identifier[group_id] ,
identifier[method] = literal[string] ,
identifier[data] = identifier[json] . identifier[dumps] ( identifier[data] ))
keyword[return] identifier[response] | def add_group_user(self, group_id, user_id):
"""
Adds an existing user to a group.
:param group_id: The unique ID of the group.
:type group_id: ``str``
:param user_id: The unique ID of the user.
:type user_id: ``str``
"""
data = {'id': user_id}
response = self._perform_request(url='/um/groups/%s/users' % group_id, method='POST', data=json.dumps(data))
return response |
def remove_dynamodb_tables():
"""Remove the Blockade DynamoDB tables."""
logger.debug("[#] Removing DynamoDB tables")
client = boto3.client('dynamodb', region_name=PRIMARY_REGION)
responses = list()
for label in DYNAMODB_TABLES:
logger.debug("[*] Removing %s table" % (label))
try:
response = client.delete_table(
TableName=label
)
except client.exceptions.ResourceNotFoundException:
logger.info("[!] Table %s already removed" % (label))
continue
responses.append(response)
logger.debug("[*] Removed %s table" % (label))
logger.info("[#] Successfully removed DynamoDB tables")
return responses | def function[remove_dynamodb_tables, parameter[]]:
constant[Remove the Blockade DynamoDB tables.]
call[name[logger].debug, parameter[constant[[#] Removing DynamoDB tables]]]
variable[client] assign[=] call[name[boto3].client, parameter[constant[dynamodb]]]
variable[responses] assign[=] call[name[list], parameter[]]
for taget[name[label]] in starred[name[DYNAMODB_TABLES]] begin[:]
call[name[logger].debug, parameter[binary_operation[constant[[*] Removing %s table] <ast.Mod object at 0x7da2590d6920> name[label]]]]
<ast.Try object at 0x7da2054a6830>
call[name[responses].append, parameter[name[response]]]
call[name[logger].debug, parameter[binary_operation[constant[[*] Removed %s table] <ast.Mod object at 0x7da2590d6920> name[label]]]]
call[name[logger].info, parameter[constant[[#] Successfully removed DynamoDB tables]]]
return[name[responses]] | keyword[def] identifier[remove_dynamodb_tables] ():
literal[string]
identifier[logger] . identifier[debug] ( literal[string] )
identifier[client] = identifier[boto3] . identifier[client] ( literal[string] , identifier[region_name] = identifier[PRIMARY_REGION] )
identifier[responses] = identifier[list] ()
keyword[for] identifier[label] keyword[in] identifier[DYNAMODB_TABLES] :
identifier[logger] . identifier[debug] ( literal[string] %( identifier[label] ))
keyword[try] :
identifier[response] = identifier[client] . identifier[delete_table] (
identifier[TableName] = identifier[label]
)
keyword[except] identifier[client] . identifier[exceptions] . identifier[ResourceNotFoundException] :
identifier[logger] . identifier[info] ( literal[string] %( identifier[label] ))
keyword[continue]
identifier[responses] . identifier[append] ( identifier[response] )
identifier[logger] . identifier[debug] ( literal[string] %( identifier[label] ))
identifier[logger] . identifier[info] ( literal[string] )
keyword[return] identifier[responses] | def remove_dynamodb_tables():
"""Remove the Blockade DynamoDB tables."""
logger.debug('[#] Removing DynamoDB tables')
client = boto3.client('dynamodb', region_name=PRIMARY_REGION)
responses = list()
for label in DYNAMODB_TABLES:
logger.debug('[*] Removing %s table' % label)
try:
response = client.delete_table(TableName=label) # depends on [control=['try'], data=[]]
except client.exceptions.ResourceNotFoundException:
logger.info('[!] Table %s already removed' % label)
continue # depends on [control=['except'], data=[]]
responses.append(response)
logger.debug('[*] Removed %s table' % label) # depends on [control=['for'], data=['label']]
logger.info('[#] Successfully removed DynamoDB tables')
return responses |
def get_server_setting(settings, server=_DEFAULT_SERVER):
'''
Get the value of the setting for the SMTP virtual server.
:param str settings: A list of the setting names.
:param str server: The SMTP server name.
:return: A dictionary of the provided settings and their values.
:rtype: dict
CLI Example:
.. code-block:: bash
salt '*' win_smtp_server.get_server_setting settings="['MaxRecipients']"
'''
ret = dict()
if not settings:
_LOG.warning('No settings provided.')
return ret
with salt.utils.winapi.Com():
try:
connection = wmi.WMI(namespace=_WMI_NAMESPACE)
objs = connection.IIsSmtpServerSetting(settings, Name=server)[0]
for setting in settings:
ret[setting] = six.text_type(getattr(objs, setting))
except wmi.x_wmi as error:
_LOG.error('Encountered WMI error: %s', error.com_error)
except (AttributeError, IndexError) as error:
_LOG.error('Error getting IIsSmtpServerSetting: %s', error)
return ret | def function[get_server_setting, parameter[settings, server]]:
constant[
Get the value of the setting for the SMTP virtual server.
:param str settings: A list of the setting names.
:param str server: The SMTP server name.
:return: A dictionary of the provided settings and their values.
:rtype: dict
CLI Example:
.. code-block:: bash
salt '*' win_smtp_server.get_server_setting settings="['MaxRecipients']"
]
variable[ret] assign[=] call[name[dict], parameter[]]
if <ast.UnaryOp object at 0x7da18f722530> begin[:]
call[name[_LOG].warning, parameter[constant[No settings provided.]]]
return[name[ret]]
with call[name[salt].utils.winapi.Com, parameter[]] begin[:]
<ast.Try object at 0x7da20e7484f0>
return[name[ret]] | keyword[def] identifier[get_server_setting] ( identifier[settings] , identifier[server] = identifier[_DEFAULT_SERVER] ):
literal[string]
identifier[ret] = identifier[dict] ()
keyword[if] keyword[not] identifier[settings] :
identifier[_LOG] . identifier[warning] ( literal[string] )
keyword[return] identifier[ret]
keyword[with] identifier[salt] . identifier[utils] . identifier[winapi] . identifier[Com] ():
keyword[try] :
identifier[connection] = identifier[wmi] . identifier[WMI] ( identifier[namespace] = identifier[_WMI_NAMESPACE] )
identifier[objs] = identifier[connection] . identifier[IIsSmtpServerSetting] ( identifier[settings] , identifier[Name] = identifier[server] )[ literal[int] ]
keyword[for] identifier[setting] keyword[in] identifier[settings] :
identifier[ret] [ identifier[setting] ]= identifier[six] . identifier[text_type] ( identifier[getattr] ( identifier[objs] , identifier[setting] ))
keyword[except] identifier[wmi] . identifier[x_wmi] keyword[as] identifier[error] :
identifier[_LOG] . identifier[error] ( literal[string] , identifier[error] . identifier[com_error] )
keyword[except] ( identifier[AttributeError] , identifier[IndexError] ) keyword[as] identifier[error] :
identifier[_LOG] . identifier[error] ( literal[string] , identifier[error] )
keyword[return] identifier[ret] | def get_server_setting(settings, server=_DEFAULT_SERVER):
"""
Get the value of the setting for the SMTP virtual server.
:param str settings: A list of the setting names.
:param str server: The SMTP server name.
:return: A dictionary of the provided settings and their values.
:rtype: dict
CLI Example:
.. code-block:: bash
salt '*' win_smtp_server.get_server_setting settings="['MaxRecipients']"
"""
ret = dict()
if not settings:
_LOG.warning('No settings provided.')
return ret # depends on [control=['if'], data=[]]
with salt.utils.winapi.Com():
try:
connection = wmi.WMI(namespace=_WMI_NAMESPACE)
objs = connection.IIsSmtpServerSetting(settings, Name=server)[0]
for setting in settings:
ret[setting] = six.text_type(getattr(objs, setting)) # depends on [control=['for'], data=['setting']] # depends on [control=['try'], data=[]]
except wmi.x_wmi as error:
_LOG.error('Encountered WMI error: %s', error.com_error) # depends on [control=['except'], data=['error']]
except (AttributeError, IndexError) as error:
_LOG.error('Error getting IIsSmtpServerSetting: %s', error) # depends on [control=['except'], data=['error']] # depends on [control=['with'], data=[]]
return ret |
def entry_path(cls, project, location, entry_group, entry):
"""Return a fully-qualified entry string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}",
project=project,
location=location,
entry_group=entry_group,
entry=entry,
) | def function[entry_path, parameter[cls, project, location, entry_group, entry]]:
constant[Return a fully-qualified entry string.]
return[call[name[google].api_core.path_template.expand, parameter[constant[projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}]]]] | keyword[def] identifier[entry_path] ( identifier[cls] , identifier[project] , identifier[location] , identifier[entry_group] , identifier[entry] ):
literal[string]
keyword[return] identifier[google] . identifier[api_core] . identifier[path_template] . identifier[expand] (
literal[string] ,
identifier[project] = identifier[project] ,
identifier[location] = identifier[location] ,
identifier[entry_group] = identifier[entry_group] ,
identifier[entry] = identifier[entry] ,
) | def entry_path(cls, project, location, entry_group, entry):
"""Return a fully-qualified entry string."""
return google.api_core.path_template.expand('projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}', project=project, location=location, entry_group=entry_group, entry=entry) |
def rpc(rtype=None):
"""Decorator marks a method for export.
:param type: Specifies which :py:class:`Type` this method will return.
The return type (rtype) must be one of:
- An instance of :py:class:`p4p.Type`
- None, in which case the method must return a :py:class:`p4p.Value`
- One of the NT helper classes (eg :py:class:`p4p.nt.NTScalar`).
- A list or tuple used to construct a :py:class:`p4p.Type`.
Exported methods raise an :py:class:`Exception` to indicate an error to the remote caller.
:py:class:`RemoteError` may be raised to send a specific message describing the error condition.
>>> class Example(object):
@rpc(NTScalar.buildType('d'))
def add(self, lhs, rhs):
return {'value':float(lhs)+flost(rhs)}
"""
wrap = None
if rtype is None or isinstance(rtype, Type):
pass
elif isinstance(type, (list, tuple)):
rtype = Type(rtype)
elif hasattr(rtype, 'type'): # eg. one of the NT* helper classes
wrap = rtype.wrap
rtype = rtype.type
else:
raise TypeError("Not supported")
def wrapper(fn):
if wrap is not None:
orig = fn
@wraps(orig)
def wrapper2(*args, **kws):
return wrap(orig(*args, **kws))
fn = wrapper2
fn._reply_Type = rtype
return fn
return wrapper | def function[rpc, parameter[rtype]]:
constant[Decorator marks a method for export.
:param type: Specifies which :py:class:`Type` this method will return.
The return type (rtype) must be one of:
- An instance of :py:class:`p4p.Type`
- None, in which case the method must return a :py:class:`p4p.Value`
- One of the NT helper classes (eg :py:class:`p4p.nt.NTScalar`).
- A list or tuple used to construct a :py:class:`p4p.Type`.
Exported methods raise an :py:class:`Exception` to indicate an error to the remote caller.
:py:class:`RemoteError` may be raised to send a specific message describing the error condition.
>>> class Example(object):
@rpc(NTScalar.buildType('d'))
def add(self, lhs, rhs):
return {'value':float(lhs)+flost(rhs)}
]
variable[wrap] assign[=] constant[None]
if <ast.BoolOp object at 0x7da18dc9bd00> begin[:]
pass
def function[wrapper, parameter[fn]]:
if compare[name[wrap] is_not constant[None]] begin[:]
variable[orig] assign[=] name[fn]
def function[wrapper2, parameter[]]:
return[call[name[wrap], parameter[call[name[orig], parameter[<ast.Starred object at 0x7da1b04c9360>]]]]]
variable[fn] assign[=] name[wrapper2]
name[fn]._reply_Type assign[=] name[rtype]
return[name[fn]]
return[name[wrapper]] | keyword[def] identifier[rpc] ( identifier[rtype] = keyword[None] ):
literal[string]
identifier[wrap] = keyword[None]
keyword[if] identifier[rtype] keyword[is] keyword[None] keyword[or] identifier[isinstance] ( identifier[rtype] , identifier[Type] ):
keyword[pass]
keyword[elif] identifier[isinstance] ( identifier[type] ,( identifier[list] , identifier[tuple] )):
identifier[rtype] = identifier[Type] ( identifier[rtype] )
keyword[elif] identifier[hasattr] ( identifier[rtype] , literal[string] ):
identifier[wrap] = identifier[rtype] . identifier[wrap]
identifier[rtype] = identifier[rtype] . identifier[type]
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[def] identifier[wrapper] ( identifier[fn] ):
keyword[if] identifier[wrap] keyword[is] keyword[not] keyword[None] :
identifier[orig] = identifier[fn]
@ identifier[wraps] ( identifier[orig] )
keyword[def] identifier[wrapper2] (* identifier[args] ,** identifier[kws] ):
keyword[return] identifier[wrap] ( identifier[orig] (* identifier[args] ,** identifier[kws] ))
identifier[fn] = identifier[wrapper2]
identifier[fn] . identifier[_reply_Type] = identifier[rtype]
keyword[return] identifier[fn]
keyword[return] identifier[wrapper] | def rpc(rtype=None):
"""Decorator marks a method for export.
:param type: Specifies which :py:class:`Type` this method will return.
The return type (rtype) must be one of:
- An instance of :py:class:`p4p.Type`
- None, in which case the method must return a :py:class:`p4p.Value`
- One of the NT helper classes (eg :py:class:`p4p.nt.NTScalar`).
- A list or tuple used to construct a :py:class:`p4p.Type`.
Exported methods raise an :py:class:`Exception` to indicate an error to the remote caller.
:py:class:`RemoteError` may be raised to send a specific message describing the error condition.
>>> class Example(object):
@rpc(NTScalar.buildType('d'))
def add(self, lhs, rhs):
return {'value':float(lhs)+flost(rhs)}
"""
wrap = None
if rtype is None or isinstance(rtype, Type):
pass # depends on [control=['if'], data=[]]
elif isinstance(type, (list, tuple)):
rtype = Type(rtype) # depends on [control=['if'], data=[]]
elif hasattr(rtype, 'type'): # eg. one of the NT* helper classes
wrap = rtype.wrap
rtype = rtype.type # depends on [control=['if'], data=[]]
else:
raise TypeError('Not supported')
def wrapper(fn):
if wrap is not None:
orig = fn
@wraps(orig)
def wrapper2(*args, **kws):
return wrap(orig(*args, **kws))
fn = wrapper2 # depends on [control=['if'], data=['wrap']]
fn._reply_Type = rtype
return fn
return wrapper |
def plot(self, flip=False, ax_channels=None, ax=None, *args, **kwargs):
"""
{_gate_plot_doc}
"""
if ax == None:
ax = pl.gca()
kwargs.setdefault('color', 'black')
if ax_channels is not None:
flip = self._find_orientation(ax_channels)
if not flip:
a1 = ax.axes.axvline(self.vert[0], *args, **kwargs)
a2 = ax.axes.axhline(self.vert[1], *args, **kwargs)
else:
a1 = ax.axes.axvline(self.vert[1], *args, **kwargs)
a2 = ax.axes.axhline(self.vert[0], *args, **kwargs)
return (a1, a2) | def function[plot, parameter[self, flip, ax_channels, ax]]:
constant[
{_gate_plot_doc}
]
if compare[name[ax] equal[==] constant[None]] begin[:]
variable[ax] assign[=] call[name[pl].gca, parameter[]]
call[name[kwargs].setdefault, parameter[constant[color], constant[black]]]
if compare[name[ax_channels] is_not constant[None]] begin[:]
variable[flip] assign[=] call[name[self]._find_orientation, parameter[name[ax_channels]]]
if <ast.UnaryOp object at 0x7da2041db5b0> begin[:]
variable[a1] assign[=] call[name[ax].axes.axvline, parameter[call[name[self].vert][constant[0]], <ast.Starred object at 0x7da2041d9e10>]]
variable[a2] assign[=] call[name[ax].axes.axhline, parameter[call[name[self].vert][constant[1]], <ast.Starred object at 0x7da2041da680>]]
return[tuple[[<ast.Name object at 0x7da18f58fa30>, <ast.Name object at 0x7da18f58faf0>]]] | keyword[def] identifier[plot] ( identifier[self] , identifier[flip] = keyword[False] , identifier[ax_channels] = keyword[None] , identifier[ax] = keyword[None] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[ax] == keyword[None] :
identifier[ax] = identifier[pl] . identifier[gca] ()
identifier[kwargs] . identifier[setdefault] ( literal[string] , literal[string] )
keyword[if] identifier[ax_channels] keyword[is] keyword[not] keyword[None] :
identifier[flip] = identifier[self] . identifier[_find_orientation] ( identifier[ax_channels] )
keyword[if] keyword[not] identifier[flip] :
identifier[a1] = identifier[ax] . identifier[axes] . identifier[axvline] ( identifier[self] . identifier[vert] [ literal[int] ],* identifier[args] ,** identifier[kwargs] )
identifier[a2] = identifier[ax] . identifier[axes] . identifier[axhline] ( identifier[self] . identifier[vert] [ literal[int] ],* identifier[args] ,** identifier[kwargs] )
keyword[else] :
identifier[a1] = identifier[ax] . identifier[axes] . identifier[axvline] ( identifier[self] . identifier[vert] [ literal[int] ],* identifier[args] ,** identifier[kwargs] )
identifier[a2] = identifier[ax] . identifier[axes] . identifier[axhline] ( identifier[self] . identifier[vert] [ literal[int] ],* identifier[args] ,** identifier[kwargs] )
keyword[return] ( identifier[a1] , identifier[a2] ) | def plot(self, flip=False, ax_channels=None, ax=None, *args, **kwargs):
"""
{_gate_plot_doc}
"""
if ax == None:
ax = pl.gca() # depends on [control=['if'], data=['ax']]
kwargs.setdefault('color', 'black')
if ax_channels is not None:
flip = self._find_orientation(ax_channels) # depends on [control=['if'], data=['ax_channels']]
if not flip:
a1 = ax.axes.axvline(self.vert[0], *args, **kwargs)
a2 = ax.axes.axhline(self.vert[1], *args, **kwargs) # depends on [control=['if'], data=[]]
else:
a1 = ax.axes.axvline(self.vert[1], *args, **kwargs)
a2 = ax.axes.axhline(self.vert[0], *args, **kwargs)
return (a1, a2) |
def prt_hier_down(self, goid, prt=sys.stdout):
"""Write hierarchy for all GO IDs below GO ID in arg, goid."""
wrhiercfg = self._get_wrhiercfg()
obj = WrHierPrt(self.gosubdag.go2obj, self.gosubdag.go2nt, wrhiercfg, prt)
obj.prt_hier_rec(goid)
return obj.items_list | def function[prt_hier_down, parameter[self, goid, prt]]:
constant[Write hierarchy for all GO IDs below GO ID in arg, goid.]
variable[wrhiercfg] assign[=] call[name[self]._get_wrhiercfg, parameter[]]
variable[obj] assign[=] call[name[WrHierPrt], parameter[name[self].gosubdag.go2obj, name[self].gosubdag.go2nt, name[wrhiercfg], name[prt]]]
call[name[obj].prt_hier_rec, parameter[name[goid]]]
return[name[obj].items_list] | keyword[def] identifier[prt_hier_down] ( identifier[self] , identifier[goid] , identifier[prt] = identifier[sys] . identifier[stdout] ):
literal[string]
identifier[wrhiercfg] = identifier[self] . identifier[_get_wrhiercfg] ()
identifier[obj] = identifier[WrHierPrt] ( identifier[self] . identifier[gosubdag] . identifier[go2obj] , identifier[self] . identifier[gosubdag] . identifier[go2nt] , identifier[wrhiercfg] , identifier[prt] )
identifier[obj] . identifier[prt_hier_rec] ( identifier[goid] )
keyword[return] identifier[obj] . identifier[items_list] | def prt_hier_down(self, goid, prt=sys.stdout):
"""Write hierarchy for all GO IDs below GO ID in arg, goid."""
wrhiercfg = self._get_wrhiercfg()
obj = WrHierPrt(self.gosubdag.go2obj, self.gosubdag.go2nt, wrhiercfg, prt)
obj.prt_hier_rec(goid)
return obj.items_list |
def _rc_sinterstore(self, dst, src, *args):
"""
Store the difference of sets ``src``, ``args`` into a new
set named ``dest``. Returns the number of keys in the new set.
"""
args = list_or_args(src, args)
result = self.sinter(*args)
if result is not set([]):
return self.sadd(dst, *list(result))
return 0 | def function[_rc_sinterstore, parameter[self, dst, src]]:
constant[
Store the difference of sets ``src``, ``args`` into a new
set named ``dest``. Returns the number of keys in the new set.
]
variable[args] assign[=] call[name[list_or_args], parameter[name[src], name[args]]]
variable[result] assign[=] call[name[self].sinter, parameter[<ast.Starred object at 0x7da2047eb070>]]
if compare[name[result] is_not call[name[set], parameter[list[[]]]]] begin[:]
return[call[name[self].sadd, parameter[name[dst], <ast.Starred object at 0x7da2047eb700>]]]
return[constant[0]] | keyword[def] identifier[_rc_sinterstore] ( identifier[self] , identifier[dst] , identifier[src] ,* identifier[args] ):
literal[string]
identifier[args] = identifier[list_or_args] ( identifier[src] , identifier[args] )
identifier[result] = identifier[self] . identifier[sinter] (* identifier[args] )
keyword[if] identifier[result] keyword[is] keyword[not] identifier[set] ([]):
keyword[return] identifier[self] . identifier[sadd] ( identifier[dst] ,* identifier[list] ( identifier[result] ))
keyword[return] literal[int] | def _rc_sinterstore(self, dst, src, *args):
"""
Store the difference of sets ``src``, ``args`` into a new
set named ``dest``. Returns the number of keys in the new set.
"""
args = list_or_args(src, args)
result = self.sinter(*args)
if result is not set([]):
return self.sadd(dst, *list(result)) # depends on [control=['if'], data=['result']]
return 0 |
def add_record_check(self, actions, table, func):
# emitted after query
# table: 'table_name'
# column: ('table_name', 'column_name')
assert isinstance(table, str), '`table` must be table name'
for i in actions:
assert i not in (A.QUERY, A.CREATE), "meaningless action check with record: [%s]" % i
self.record_checks.append([table, actions, func])
"""def func(ability, user, action, record: DataRecord, available_columns: list):
pass
""" | def function[add_record_check, parameter[self, actions, table, func]]:
assert[call[name[isinstance], parameter[name[table], name[str]]]]
for taget[name[i]] in starred[name[actions]] begin[:]
assert[compare[name[i] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Attribute object at 0x7da1b00674f0>, <ast.Attribute object at 0x7da1b0065480>]]]]
call[name[self].record_checks.append, parameter[list[[<ast.Name object at 0x7da1b0065660>, <ast.Name object at 0x7da1b0066650>, <ast.Name object at 0x7da1b0065420>]]]]
constant[def func(ability, user, action, record: DataRecord, available_columns: list):
pass
] | keyword[def] identifier[add_record_check] ( identifier[self] , identifier[actions] , identifier[table] , identifier[func] ):
keyword[assert] identifier[isinstance] ( identifier[table] , identifier[str] ), literal[string]
keyword[for] identifier[i] keyword[in] identifier[actions] :
keyword[assert] identifier[i] keyword[not] keyword[in] ( identifier[A] . identifier[QUERY] , identifier[A] . identifier[CREATE] ), literal[string] % identifier[i]
identifier[self] . identifier[record_checks] . identifier[append] ([ identifier[table] , identifier[actions] , identifier[func] ])
literal[string] | def add_record_check(self, actions, table, func):
# emitted after query
# table: 'table_name'
# column: ('table_name', 'column_name')
assert isinstance(table, str), '`table` must be table name'
for i in actions:
assert i not in (A.QUERY, A.CREATE), 'meaningless action check with record: [%s]' % i # depends on [control=['for'], data=['i']]
self.record_checks.append([table, actions, func])
'def func(ability, user, action, record: DataRecord, available_columns: list):\n pass\n ' |
def set_precision(cls, precision):
"""Set the number of decimal places used to report percentages."""
assert 0 <= precision < 10
cls._precision = precision
cls._near0 = 1.0 / 10**precision
cls._near100 = 100.0 - cls._near0 | def function[set_precision, parameter[cls, precision]]:
constant[Set the number of decimal places used to report percentages.]
assert[compare[constant[0] less_or_equal[<=] name[precision]]]
name[cls]._precision assign[=] name[precision]
name[cls]._near0 assign[=] binary_operation[constant[1.0] / binary_operation[constant[10] ** name[precision]]]
name[cls]._near100 assign[=] binary_operation[constant[100.0] - name[cls]._near0] | keyword[def] identifier[set_precision] ( identifier[cls] , identifier[precision] ):
literal[string]
keyword[assert] literal[int] <= identifier[precision] < literal[int]
identifier[cls] . identifier[_precision] = identifier[precision]
identifier[cls] . identifier[_near0] = literal[int] / literal[int] ** identifier[precision]
identifier[cls] . identifier[_near100] = literal[int] - identifier[cls] . identifier[_near0] | def set_precision(cls, precision):
"""Set the number of decimal places used to report percentages."""
assert 0 <= precision < 10
cls._precision = precision
cls._near0 = 1.0 / 10 ** precision
cls._near100 = 100.0 - cls._near0 |
def admin_required(obj):
"""
Requires that the user be logged AND be set as a superuser
"""
decorator = request_passes_test(lambda r, *args, **kwargs: r.user.is_superuser)
return wrap_object(obj, decorator) | def function[admin_required, parameter[obj]]:
constant[
Requires that the user be logged AND be set as a superuser
]
variable[decorator] assign[=] call[name[request_passes_test], parameter[<ast.Lambda object at 0x7da1b27a71c0>]]
return[call[name[wrap_object], parameter[name[obj], name[decorator]]]] | keyword[def] identifier[admin_required] ( identifier[obj] ):
literal[string]
identifier[decorator] = identifier[request_passes_test] ( keyword[lambda] identifier[r] ,* identifier[args] ,** identifier[kwargs] : identifier[r] . identifier[user] . identifier[is_superuser] )
keyword[return] identifier[wrap_object] ( identifier[obj] , identifier[decorator] ) | def admin_required(obj):
"""
Requires that the user be logged AND be set as a superuser
"""
decorator = request_passes_test(lambda r, *args, **kwargs: r.user.is_superuser)
return wrap_object(obj, decorator) |
def begin(self, frame):
"""
Handles BEGING command: Starts a new transaction.
"""
if not frame.transaction:
raise ProtocolError("Missing transaction for BEGIN command.")
self.engine.transactions[frame.transaction] = [] | def function[begin, parameter[self, frame]]:
constant[
Handles BEGING command: Starts a new transaction.
]
if <ast.UnaryOp object at 0x7da1b1942f80> begin[:]
<ast.Raise object at 0x7da1b1943760>
call[name[self].engine.transactions][name[frame].transaction] assign[=] list[[]] | keyword[def] identifier[begin] ( identifier[self] , identifier[frame] ):
literal[string]
keyword[if] keyword[not] identifier[frame] . identifier[transaction] :
keyword[raise] identifier[ProtocolError] ( literal[string] )
identifier[self] . identifier[engine] . identifier[transactions] [ identifier[frame] . identifier[transaction] ]=[] | def begin(self, frame):
"""
Handles BEGING command: Starts a new transaction.
"""
if not frame.transaction:
raise ProtocolError('Missing transaction for BEGIN command.') # depends on [control=['if'], data=[]]
self.engine.transactions[frame.transaction] = [] |
def process(cls, post, render=True):
"""
This method takes the post data and renders it
:param post:
:param render:
:return:
"""
post["slug"] = cls.create_slug(post["title"])
post["editable"] = cls.is_author(post, current_user)
post["url"] = cls.construct_url(post)
post["priority"] = 0.8
if render:
cls.render_text(post)
post["meta"]["images"] = cls.extract_images(post) | def function[process, parameter[cls, post, render]]:
constant[
This method takes the post data and renders it
:param post:
:param render:
:return:
]
call[name[post]][constant[slug]] assign[=] call[name[cls].create_slug, parameter[call[name[post]][constant[title]]]]
call[name[post]][constant[editable]] assign[=] call[name[cls].is_author, parameter[name[post], name[current_user]]]
call[name[post]][constant[url]] assign[=] call[name[cls].construct_url, parameter[name[post]]]
call[name[post]][constant[priority]] assign[=] constant[0.8]
if name[render] begin[:]
call[name[cls].render_text, parameter[name[post]]]
call[call[name[post]][constant[meta]]][constant[images]] assign[=] call[name[cls].extract_images, parameter[name[post]]] | keyword[def] identifier[process] ( identifier[cls] , identifier[post] , identifier[render] = keyword[True] ):
literal[string]
identifier[post] [ literal[string] ]= identifier[cls] . identifier[create_slug] ( identifier[post] [ literal[string] ])
identifier[post] [ literal[string] ]= identifier[cls] . identifier[is_author] ( identifier[post] , identifier[current_user] )
identifier[post] [ literal[string] ]= identifier[cls] . identifier[construct_url] ( identifier[post] )
identifier[post] [ literal[string] ]= literal[int]
keyword[if] identifier[render] :
identifier[cls] . identifier[render_text] ( identifier[post] )
identifier[post] [ literal[string] ][ literal[string] ]= identifier[cls] . identifier[extract_images] ( identifier[post] ) | def process(cls, post, render=True):
"""
This method takes the post data and renders it
:param post:
:param render:
:return:
"""
post['slug'] = cls.create_slug(post['title'])
post['editable'] = cls.is_author(post, current_user)
post['url'] = cls.construct_url(post)
post['priority'] = 0.8
if render:
cls.render_text(post)
post['meta']['images'] = cls.extract_images(post) # depends on [control=['if'], data=[]] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.