code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def union(self, i):
'''If intervals intersect, returns their union, otherwise returns None'''
if self.intersects(i) or self.end + 1 == i.start or i.end + 1 == self.start:
return Interval(min(self.start, i.start), max(self.end, i.end))
else:
return None
|
def function[union, parameter[self, i]]:
constant[If intervals intersect, returns their union, otherwise returns None]
if <ast.BoolOp object at 0x7da1aff1f520> begin[:]
return[call[name[Interval], parameter[call[name[min], parameter[name[self].start, name[i].start]], call[name[max], parameter[name[self].end, name[i].end]]]]]
|
keyword[def] identifier[union] ( identifier[self] , identifier[i] ):
literal[string]
keyword[if] identifier[self] . identifier[intersects] ( identifier[i] ) keyword[or] identifier[self] . identifier[end] + literal[int] == identifier[i] . identifier[start] keyword[or] identifier[i] . identifier[end] + literal[int] == identifier[self] . identifier[start] :
keyword[return] identifier[Interval] ( identifier[min] ( identifier[self] . identifier[start] , identifier[i] . identifier[start] ), identifier[max] ( identifier[self] . identifier[end] , identifier[i] . identifier[end] ))
keyword[else] :
keyword[return] keyword[None]
|
def union(self, i):
"""If intervals intersect, returns their union, otherwise returns None"""
if self.intersects(i) or self.end + 1 == i.start or i.end + 1 == self.start:
return Interval(min(self.start, i.start), max(self.end, i.end)) # depends on [control=['if'], data=[]]
else:
return None
|
def asset_create_asset(self, *args, **kwargs):
"""Create a new asset
:returns: None
:rtype: None
:raises: None
"""
if not self.cur_asset:
return
asset = self.create_asset(project=self.cur_asset.project, asset=self.cur_asset)
if not asset:
return
atypes = {}
for c in self.asset_asset_model.root.childItems:
atypes[c.internal_data()] = c
atypeitem = atypes.get(asset.atype)
if not atypeitem:
atypedata = djitemdata.AtypeItemData(asset.atype)
atypeitem = treemodel.TreeItem(atypedata, self.asset_asset_model.root)
atypes[asset.atype] = atypeitem
assetdata = djitemdata.AssetItemData(asset)
treemodel.TreeItem(assetdata, atypeitem)
|
def function[asset_create_asset, parameter[self]]:
constant[Create a new asset
:returns: None
:rtype: None
:raises: None
]
if <ast.UnaryOp object at 0x7da1b1434940> begin[:]
return[None]
variable[asset] assign[=] call[name[self].create_asset, parameter[]]
if <ast.UnaryOp object at 0x7da1b1434190> begin[:]
return[None]
variable[atypes] assign[=] dictionary[[], []]
for taget[name[c]] in starred[name[self].asset_asset_model.root.childItems] begin[:]
call[name[atypes]][call[name[c].internal_data, parameter[]]] assign[=] name[c]
variable[atypeitem] assign[=] call[name[atypes].get, parameter[name[asset].atype]]
if <ast.UnaryOp object at 0x7da1b1435540> begin[:]
variable[atypedata] assign[=] call[name[djitemdata].AtypeItemData, parameter[name[asset].atype]]
variable[atypeitem] assign[=] call[name[treemodel].TreeItem, parameter[name[atypedata], name[self].asset_asset_model.root]]
call[name[atypes]][name[asset].atype] assign[=] name[atypeitem]
variable[assetdata] assign[=] call[name[djitemdata].AssetItemData, parameter[name[asset]]]
call[name[treemodel].TreeItem, parameter[name[assetdata], name[atypeitem]]]
|
keyword[def] identifier[asset_create_asset] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[cur_asset] :
keyword[return]
identifier[asset] = identifier[self] . identifier[create_asset] ( identifier[project] = identifier[self] . identifier[cur_asset] . identifier[project] , identifier[asset] = identifier[self] . identifier[cur_asset] )
keyword[if] keyword[not] identifier[asset] :
keyword[return]
identifier[atypes] ={}
keyword[for] identifier[c] keyword[in] identifier[self] . identifier[asset_asset_model] . identifier[root] . identifier[childItems] :
identifier[atypes] [ identifier[c] . identifier[internal_data] ()]= identifier[c]
identifier[atypeitem] = identifier[atypes] . identifier[get] ( identifier[asset] . identifier[atype] )
keyword[if] keyword[not] identifier[atypeitem] :
identifier[atypedata] = identifier[djitemdata] . identifier[AtypeItemData] ( identifier[asset] . identifier[atype] )
identifier[atypeitem] = identifier[treemodel] . identifier[TreeItem] ( identifier[atypedata] , identifier[self] . identifier[asset_asset_model] . identifier[root] )
identifier[atypes] [ identifier[asset] . identifier[atype] ]= identifier[atypeitem]
identifier[assetdata] = identifier[djitemdata] . identifier[AssetItemData] ( identifier[asset] )
identifier[treemodel] . identifier[TreeItem] ( identifier[assetdata] , identifier[atypeitem] )
|
def asset_create_asset(self, *args, **kwargs):
"""Create a new asset
:returns: None
:rtype: None
:raises: None
"""
if not self.cur_asset:
return # depends on [control=['if'], data=[]]
asset = self.create_asset(project=self.cur_asset.project, asset=self.cur_asset)
if not asset:
return # depends on [control=['if'], data=[]]
atypes = {}
for c in self.asset_asset_model.root.childItems:
atypes[c.internal_data()] = c # depends on [control=['for'], data=['c']]
atypeitem = atypes.get(asset.atype)
if not atypeitem:
atypedata = djitemdata.AtypeItemData(asset.atype)
atypeitem = treemodel.TreeItem(atypedata, self.asset_asset_model.root)
atypes[asset.atype] = atypeitem # depends on [control=['if'], data=[]]
assetdata = djitemdata.AssetItemData(asset)
treemodel.TreeItem(assetdata, atypeitem)
|
def unionIntoArray(self, inputVector, outputVector, forceOutput=False):
"""
Create a union of the inputVector and copy the result into the outputVector
Parameters:
----------------------------
@param inputVector: The inputVector can be either a full numpy array
containing 0's and 1's, or a list of non-zero entry
indices
@param outputVector: A numpy array that matches the length of the
union pooler.
@param forceOutput: if True, a union will be created without regard to
minHistory
"""
if isinstance(inputVector, numpy.ndarray):
if inputVector.size == self._numInputs:
activeBits = numpy.where(inputVector)[0]
else:
raise ValueError(
"Input vector dimensions don't match. Expecting %s but got %s" % (
self._numInputs, inputVector.size))
elif isinstance(inputVector, list):
if len(inputVector) > 0:
if max(inputVector) >= self._numInputs:
raise ValueError(
"Non-zero entry indices exceed input dimension of union pooler. "
"Expecting %s but got %s" % (self._numInputs, max(inputVector)))
activeBits = inputVector
else:
raise TypeError("Unsuported input types")
if len(outputVector) != self._numInputs:
raise ValueError(
"Output vector dimension does match dimension of union pooler "
"Expecting %s but got %s" % (self._numInputs, len(outputVector)))
unionSDR = self.updateHistory(activeBits, forceOutput)
numpy.copyto(outputVector, unionSDR, casting="unsafe")
|
def function[unionIntoArray, parameter[self, inputVector, outputVector, forceOutput]]:
constant[
Create a union of the inputVector and copy the result into the outputVector
Parameters:
----------------------------
@param inputVector: The inputVector can be either a full numpy array
containing 0's and 1's, or a list of non-zero entry
indices
@param outputVector: A numpy array that matches the length of the
union pooler.
@param forceOutput: if True, a union will be created without regard to
minHistory
]
if call[name[isinstance], parameter[name[inputVector], name[numpy].ndarray]] begin[:]
if compare[name[inputVector].size equal[==] name[self]._numInputs] begin[:]
variable[activeBits] assign[=] call[call[name[numpy].where, parameter[name[inputVector]]]][constant[0]]
if compare[call[name[len], parameter[name[outputVector]]] not_equal[!=] name[self]._numInputs] begin[:]
<ast.Raise object at 0x7da1b08c5ab0>
variable[unionSDR] assign[=] call[name[self].updateHistory, parameter[name[activeBits], name[forceOutput]]]
call[name[numpy].copyto, parameter[name[outputVector], name[unionSDR]]]
|
keyword[def] identifier[unionIntoArray] ( identifier[self] , identifier[inputVector] , identifier[outputVector] , identifier[forceOutput] = keyword[False] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[inputVector] , identifier[numpy] . identifier[ndarray] ):
keyword[if] identifier[inputVector] . identifier[size] == identifier[self] . identifier[_numInputs] :
identifier[activeBits] = identifier[numpy] . identifier[where] ( identifier[inputVector] )[ literal[int] ]
keyword[else] :
keyword[raise] identifier[ValueError] (
literal[string] %(
identifier[self] . identifier[_numInputs] , identifier[inputVector] . identifier[size] ))
keyword[elif] identifier[isinstance] ( identifier[inputVector] , identifier[list] ):
keyword[if] identifier[len] ( identifier[inputVector] )> literal[int] :
keyword[if] identifier[max] ( identifier[inputVector] )>= identifier[self] . identifier[_numInputs] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string] %( identifier[self] . identifier[_numInputs] , identifier[max] ( identifier[inputVector] )))
identifier[activeBits] = identifier[inputVector]
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[len] ( identifier[outputVector] )!= identifier[self] . identifier[_numInputs] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string] %( identifier[self] . identifier[_numInputs] , identifier[len] ( identifier[outputVector] )))
identifier[unionSDR] = identifier[self] . identifier[updateHistory] ( identifier[activeBits] , identifier[forceOutput] )
identifier[numpy] . identifier[copyto] ( identifier[outputVector] , identifier[unionSDR] , identifier[casting] = literal[string] )
|
def unionIntoArray(self, inputVector, outputVector, forceOutput=False):
"""
Create a union of the inputVector and copy the result into the outputVector
Parameters:
----------------------------
@param inputVector: The inputVector can be either a full numpy array
containing 0's and 1's, or a list of non-zero entry
indices
@param outputVector: A numpy array that matches the length of the
union pooler.
@param forceOutput: if True, a union will be created without regard to
minHistory
"""
if isinstance(inputVector, numpy.ndarray):
if inputVector.size == self._numInputs:
activeBits = numpy.where(inputVector)[0] # depends on [control=['if'], data=[]]
else:
raise ValueError("Input vector dimensions don't match. Expecting %s but got %s" % (self._numInputs, inputVector.size)) # depends on [control=['if'], data=[]]
elif isinstance(inputVector, list):
if len(inputVector) > 0:
if max(inputVector) >= self._numInputs:
raise ValueError('Non-zero entry indices exceed input dimension of union pooler. Expecting %s but got %s' % (self._numInputs, max(inputVector))) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
activeBits = inputVector # depends on [control=['if'], data=[]]
else:
raise TypeError('Unsuported input types')
if len(outputVector) != self._numInputs:
raise ValueError('Output vector dimension does match dimension of union pooler Expecting %s but got %s' % (self._numInputs, len(outputVector))) # depends on [control=['if'], data=[]]
unionSDR = self.updateHistory(activeBits, forceOutput)
numpy.copyto(outputVector, unionSDR, casting='unsafe')
|
def F_oneway(*lists):
"""
Performs a 1-way ANOVA, returning an F-value and probability given
any number of groups. From Heiman, pp.394-7.
Usage: F_oneway(*lists) where *lists is any number of lists, one per
treatment group
Returns: F value, one-tailed p-value
"""
a = len(lists) # ANOVA on 'a' groups, each in it's own list
means = [0] * a
vars = [0] * a
ns = [0] * a
alldata = []
tmp = lists
means = map(mean, tmp)
vars = map(var, tmp)
ns = map(len, lists)
for i in range(len(lists)):
alldata = alldata + lists[i]
bign = len(alldata)
sstot = ss(alldata) - (square_of_sums(alldata) / float(bign))
ssbn = 0
for list in lists:
ssbn = ssbn + square_of_sums(list) / float(len(list))
ssbn = ssbn - (square_of_sums(alldata) / float(bign))
sswn = sstot - ssbn
dfbn = a - 1
dfwn = bign - a
msb = ssbn / float(dfbn)
msw = sswn / float(dfwn)
f = msb / msw
prob = fprob(dfbn, dfwn, f)
return f, prob
|
def function[F_oneway, parameter[]]:
constant[
Performs a 1-way ANOVA, returning an F-value and probability given
any number of groups. From Heiman, pp.394-7.
Usage: F_oneway(*lists) where *lists is any number of lists, one per
treatment group
Returns: F value, one-tailed p-value
]
variable[a] assign[=] call[name[len], parameter[name[lists]]]
variable[means] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b2347430>]] * name[a]]
variable[vars] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b2346290>]] * name[a]]
variable[ns] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b2347070>]] * name[a]]
variable[alldata] assign[=] list[[]]
variable[tmp] assign[=] name[lists]
variable[means] assign[=] call[name[map], parameter[name[mean], name[tmp]]]
variable[vars] assign[=] call[name[map], parameter[name[var], name[tmp]]]
variable[ns] assign[=] call[name[map], parameter[name[len], name[lists]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[lists]]]]]] begin[:]
variable[alldata] assign[=] binary_operation[name[alldata] + call[name[lists]][name[i]]]
variable[bign] assign[=] call[name[len], parameter[name[alldata]]]
variable[sstot] assign[=] binary_operation[call[name[ss], parameter[name[alldata]]] - binary_operation[call[name[square_of_sums], parameter[name[alldata]]] / call[name[float], parameter[name[bign]]]]]
variable[ssbn] assign[=] constant[0]
for taget[name[list]] in starred[name[lists]] begin[:]
variable[ssbn] assign[=] binary_operation[name[ssbn] + binary_operation[call[name[square_of_sums], parameter[name[list]]] / call[name[float], parameter[call[name[len], parameter[name[list]]]]]]]
variable[ssbn] assign[=] binary_operation[name[ssbn] - binary_operation[call[name[square_of_sums], parameter[name[alldata]]] / call[name[float], parameter[name[bign]]]]]
variable[sswn] assign[=] binary_operation[name[sstot] - name[ssbn]]
variable[dfbn] assign[=] binary_operation[name[a] - constant[1]]
variable[dfwn] assign[=] binary_operation[name[bign] - name[a]]
variable[msb] assign[=] binary_operation[name[ssbn] / call[name[float], parameter[name[dfbn]]]]
variable[msw] assign[=] binary_operation[name[sswn] / call[name[float], parameter[name[dfwn]]]]
variable[f] assign[=] binary_operation[name[msb] / name[msw]]
variable[prob] assign[=] call[name[fprob], parameter[name[dfbn], name[dfwn], name[f]]]
return[tuple[[<ast.Name object at 0x7da1b0b1b970>, <ast.Name object at 0x7da1b0b19ea0>]]]
|
keyword[def] identifier[F_oneway] (* identifier[lists] ):
literal[string]
identifier[a] = identifier[len] ( identifier[lists] )
identifier[means] =[ literal[int] ]* identifier[a]
identifier[vars] =[ literal[int] ]* identifier[a]
identifier[ns] =[ literal[int] ]* identifier[a]
identifier[alldata] =[]
identifier[tmp] = identifier[lists]
identifier[means] = identifier[map] ( identifier[mean] , identifier[tmp] )
identifier[vars] = identifier[map] ( identifier[var] , identifier[tmp] )
identifier[ns] = identifier[map] ( identifier[len] , identifier[lists] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[lists] )):
identifier[alldata] = identifier[alldata] + identifier[lists] [ identifier[i] ]
identifier[bign] = identifier[len] ( identifier[alldata] )
identifier[sstot] = identifier[ss] ( identifier[alldata] )-( identifier[square_of_sums] ( identifier[alldata] )/ identifier[float] ( identifier[bign] ))
identifier[ssbn] = literal[int]
keyword[for] identifier[list] keyword[in] identifier[lists] :
identifier[ssbn] = identifier[ssbn] + identifier[square_of_sums] ( identifier[list] )/ identifier[float] ( identifier[len] ( identifier[list] ))
identifier[ssbn] = identifier[ssbn] -( identifier[square_of_sums] ( identifier[alldata] )/ identifier[float] ( identifier[bign] ))
identifier[sswn] = identifier[sstot] - identifier[ssbn]
identifier[dfbn] = identifier[a] - literal[int]
identifier[dfwn] = identifier[bign] - identifier[a]
identifier[msb] = identifier[ssbn] / identifier[float] ( identifier[dfbn] )
identifier[msw] = identifier[sswn] / identifier[float] ( identifier[dfwn] )
identifier[f] = identifier[msb] / identifier[msw]
identifier[prob] = identifier[fprob] ( identifier[dfbn] , identifier[dfwn] , identifier[f] )
keyword[return] identifier[f] , identifier[prob]
|
def F_oneway(*lists):
"""
Performs a 1-way ANOVA, returning an F-value and probability given
any number of groups. From Heiman, pp.394-7.
Usage: F_oneway(*lists) where *lists is any number of lists, one per
treatment group
Returns: F value, one-tailed p-value
"""
a = len(lists) # ANOVA on 'a' groups, each in it's own list
means = [0] * a
vars = [0] * a
ns = [0] * a
alldata = []
tmp = lists
means = map(mean, tmp)
vars = map(var, tmp)
ns = map(len, lists)
for i in range(len(lists)):
alldata = alldata + lists[i] # depends on [control=['for'], data=['i']]
bign = len(alldata)
sstot = ss(alldata) - square_of_sums(alldata) / float(bign)
ssbn = 0
for list in lists:
ssbn = ssbn + square_of_sums(list) / float(len(list)) # depends on [control=['for'], data=['list']]
ssbn = ssbn - square_of_sums(alldata) / float(bign)
sswn = sstot - ssbn
dfbn = a - 1
dfwn = bign - a
msb = ssbn / float(dfbn)
msw = sswn / float(dfwn)
f = msb / msw
prob = fprob(dfbn, dfwn, f)
return (f, prob)
|
def shortInterestDF(symbol, date=None, token='', version=''):
'''The consolidated market short interest positions in all IEX-listed securities are included in the IEX Short Interest Report.
The report data will be published daily at 4:00pm ET.
https://iexcloud.io/docs/api/#listed-short-interest-list-in-dev
Args:
symbol (string); Ticker to request
date (datetime); Effective Datetime
token (string); Access token
version (string); API version
Returns:
DataFrame: result
'''
df = pd.DataFrame(shortInterest(symbol, date, token, version))
_toDatetime(df)
return df
|
def function[shortInterestDF, parameter[symbol, date, token, version]]:
constant[The consolidated market short interest positions in all IEX-listed securities are included in the IEX Short Interest Report.
The report data will be published daily at 4:00pm ET.
https://iexcloud.io/docs/api/#listed-short-interest-list-in-dev
Args:
symbol (string); Ticker to request
date (datetime); Effective Datetime
token (string); Access token
version (string); API version
Returns:
DataFrame: result
]
variable[df] assign[=] call[name[pd].DataFrame, parameter[call[name[shortInterest], parameter[name[symbol], name[date], name[token], name[version]]]]]
call[name[_toDatetime], parameter[name[df]]]
return[name[df]]
|
keyword[def] identifier[shortInterestDF] ( identifier[symbol] , identifier[date] = keyword[None] , identifier[token] = literal[string] , identifier[version] = literal[string] ):
literal[string]
identifier[df] = identifier[pd] . identifier[DataFrame] ( identifier[shortInterest] ( identifier[symbol] , identifier[date] , identifier[token] , identifier[version] ))
identifier[_toDatetime] ( identifier[df] )
keyword[return] identifier[df]
|
def shortInterestDF(symbol, date=None, token='', version=''):
"""The consolidated market short interest positions in all IEX-listed securities are included in the IEX Short Interest Report.
The report data will be published daily at 4:00pm ET.
https://iexcloud.io/docs/api/#listed-short-interest-list-in-dev
Args:
symbol (string); Ticker to request
date (datetime); Effective Datetime
token (string); Access token
version (string); API version
Returns:
DataFrame: result
"""
df = pd.DataFrame(shortInterest(symbol, date, token, version))
_toDatetime(df)
return df
|
def _get_shade_hdrgos(**kws):
"""If no hdrgo_prt specified, and these conditions are present -> hdrgo_prt=F."""
# KWS: shade_hdrgos hdrgo_prt section_sortby top_n
if 'shade_hdrgos' in kws:
return kws['shade_hdrgos']
# Return user-sepcified hdrgo_prt, if provided
if 'hdrgo_prt' in kws:
return kws['hdrgo_prt']
# If no hdrgo_prt provided, set hdrgo_prt to False if:
# * section_sortby == True
# * section_sortby = user_sort
# * top_n == N
if 'section_sortby' in kws and kws['section_sortby']:
return False
if 'top_n' in kws and isinstance(kws['top_n'], int):
return False
return True
|
def function[_get_shade_hdrgos, parameter[]]:
constant[If no hdrgo_prt specified, and these conditions are present -> hdrgo_prt=F.]
if compare[constant[shade_hdrgos] in name[kws]] begin[:]
return[call[name[kws]][constant[shade_hdrgos]]]
if compare[constant[hdrgo_prt] in name[kws]] begin[:]
return[call[name[kws]][constant[hdrgo_prt]]]
if <ast.BoolOp object at 0x7da18bcc8ac0> begin[:]
return[constant[False]]
if <ast.BoolOp object at 0x7da18bcc84c0> begin[:]
return[constant[False]]
return[constant[True]]
|
keyword[def] identifier[_get_shade_hdrgos] (** identifier[kws] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[kws] :
keyword[return] identifier[kws] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[kws] :
keyword[return] identifier[kws] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[kws] keyword[and] identifier[kws] [ literal[string] ]:
keyword[return] keyword[False]
keyword[if] literal[string] keyword[in] identifier[kws] keyword[and] identifier[isinstance] ( identifier[kws] [ literal[string] ], identifier[int] ):
keyword[return] keyword[False]
keyword[return] keyword[True]
|
def _get_shade_hdrgos(**kws):
"""If no hdrgo_prt specified, and these conditions are present -> hdrgo_prt=F."""
# KWS: shade_hdrgos hdrgo_prt section_sortby top_n
if 'shade_hdrgos' in kws:
return kws['shade_hdrgos'] # depends on [control=['if'], data=['kws']]
# Return user-sepcified hdrgo_prt, if provided
if 'hdrgo_prt' in kws:
return kws['hdrgo_prt'] # depends on [control=['if'], data=['kws']]
# If no hdrgo_prt provided, set hdrgo_prt to False if:
# * section_sortby == True
# * section_sortby = user_sort
# * top_n == N
if 'section_sortby' in kws and kws['section_sortby']:
return False # depends on [control=['if'], data=[]]
if 'top_n' in kws and isinstance(kws['top_n'], int):
return False # depends on [control=['if'], data=[]]
return True
|
def disordered_formula(disordered_struct, symbols=('x', 'y', 'z'), fmt='plain'):
"""
Returns a formula of a form like AxB1-x (x=0.5)
for disordered structures. Will only return a
formula for disordered structures with one
kind of disordered site at present.
Args:
disordered_struct: a disordered structure
symbols: a tuple of characters to use for
subscripts, by default this is ('x', 'y', 'z')
but if you have more than three disordered
species more symbols will need to be added
fmt (str): 'plain', 'HTML' or 'LaTeX'
Returns (str): a disordered formula string
"""
# this is in string utils and not in
# Composition because we need to have access
# to site occupancies to calculate this, so
# have to pass the full structure as an argument
# (alternatively this could be made a method on
# Structure)
from pymatgen.core.composition import Composition
from pymatgen.core.periodic_table import get_el_sp
if disordered_struct.is_ordered:
raise ValueError("Structure is not disordered, "
"so disordered formula not defined.")
disordered_site_compositions = {site.species
for site in disordered_struct if not site.is_ordered}
if len(disordered_site_compositions) > 1:
# this probably won't happen too often
raise ValueError("Ambiguous how to define disordered "
"formula when more than one type of disordered "
"site is present.")
disordered_site_composition = disordered_site_compositions.pop()
disordered_species = {str(sp) for sp, occu in disordered_site_composition.items()}
if len(disordered_species) > len(symbols):
# this probably won't happen too often either
raise ValueError("Not enough symbols to describe disordered composition: "
"{}".format(symbols))
symbols = list(symbols)[0:len(disordered_species) - 1]
comp = disordered_struct.composition.get_el_amt_dict().items()
# sort by electronegativity, as per composition
comp = sorted(comp, key=lambda x: get_el_sp(x[0]).X)
disordered_comp = []
variable_map = {}
total_disordered_occu = sum([occu for sp, occu in comp
if str(sp) in disordered_species])
# composition to get common factor
factor_comp = disordered_struct.composition.as_dict()
factor_comp['X'] = total_disordered_occu
for sp in disordered_species:
del factor_comp[str(sp)]
factor_comp = Composition.from_dict(factor_comp)
factor = factor_comp.get_reduced_formula_and_factor()[1]
total_disordered_occu /= factor
remainder = "{}-{}".format(formula_double_format(total_disordered_occu, ignore_ones=False),
'-'.join(symbols))
for sp, occu in comp:
sp = str(sp)
if sp not in disordered_species:
disordered_comp.append((sp, formula_double_format(occu/factor)))
else:
if len(symbols) > 0:
symbol = symbols.pop(0)
disordered_comp.append((sp, symbol))
variable_map[symbol] = occu / total_disordered_occu / factor
else:
disordered_comp.append((sp, remainder))
if fmt == 'LaTeX':
sub_start = "_{"
sub_end = "}"
elif fmt == 'HTML':
sub_start = "<sub>"
sub_end = "</sub>"
elif fmt != 'plain':
raise ValueError("Unsupported output format, "
"choose from: LaTeX, HTML, plain")
disordered_formula = []
for sp, occu in disordered_comp:
disordered_formula.append(sp)
if occu: # can be empty string if 1
if fmt != 'plain':
disordered_formula.append(sub_start)
disordered_formula.append(occu)
if fmt != 'plain':
disordered_formula.append(sub_end)
disordered_formula.append(" ")
disordered_formula += ["{}={} ".format(k, formula_double_format(v))
for k, v in variable_map.items()]
comp = disordered_struct.composition
return "".join(map(str, disordered_formula))[0:-1]
|
def function[disordered_formula, parameter[disordered_struct, symbols, fmt]]:
constant[
Returns a formula of a form like AxB1-x (x=0.5)
for disordered structures. Will only return a
formula for disordered structures with one
kind of disordered site at present.
Args:
disordered_struct: a disordered structure
symbols: a tuple of characters to use for
subscripts, by default this is ('x', 'y', 'z')
but if you have more than three disordered
species more symbols will need to be added
fmt (str): 'plain', 'HTML' or 'LaTeX'
Returns (str): a disordered formula string
]
from relative_module[pymatgen.core.composition] import module[Composition]
from relative_module[pymatgen.core.periodic_table] import module[get_el_sp]
if name[disordered_struct].is_ordered begin[:]
<ast.Raise object at 0x7da1b1c58a00>
variable[disordered_site_compositions] assign[=] <ast.SetComp object at 0x7da1b1c58b20>
if compare[call[name[len], parameter[name[disordered_site_compositions]]] greater[>] constant[1]] begin[:]
<ast.Raise object at 0x7da1b1c58e20>
variable[disordered_site_composition] assign[=] call[name[disordered_site_compositions].pop, parameter[]]
variable[disordered_species] assign[=] <ast.SetComp object at 0x7da1b1c58490>
if compare[call[name[len], parameter[name[disordered_species]]] greater[>] call[name[len], parameter[name[symbols]]]] begin[:]
<ast.Raise object at 0x7da1b1c580d0>
variable[symbols] assign[=] call[call[name[list], parameter[name[symbols]]]][<ast.Slice object at 0x7da1b1c59090>]
variable[comp] assign[=] call[call[name[disordered_struct].composition.get_el_amt_dict, parameter[]].items, parameter[]]
variable[comp] assign[=] call[name[sorted], parameter[name[comp]]]
variable[disordered_comp] assign[=] list[[]]
variable[variable_map] assign[=] dictionary[[], []]
variable[total_disordered_occu] assign[=] call[name[sum], parameter[<ast.ListComp object at 0x7da1b1c59810>]]
variable[factor_comp] assign[=] call[name[disordered_struct].composition.as_dict, parameter[]]
call[name[factor_comp]][constant[X]] assign[=] name[total_disordered_occu]
for taget[name[sp]] in starred[name[disordered_species]] begin[:]
<ast.Delete object at 0x7da1b1c59d20>
variable[factor_comp] assign[=] call[name[Composition].from_dict, parameter[name[factor_comp]]]
variable[factor] assign[=] call[call[name[factor_comp].get_reduced_formula_and_factor, parameter[]]][constant[1]]
<ast.AugAssign object at 0x7da1b1c5a0e0>
variable[remainder] assign[=] call[constant[{}-{}].format, parameter[call[name[formula_double_format], parameter[name[total_disordered_occu]]], call[constant[-].join, parameter[name[symbols]]]]]
for taget[tuple[[<ast.Name object at 0x7da1b1c5a470>, <ast.Name object at 0x7da1b1c5a4a0>]]] in starred[name[comp]] begin[:]
variable[sp] assign[=] call[name[str], parameter[name[sp]]]
if compare[name[sp] <ast.NotIn object at 0x7da2590d7190> name[disordered_species]] begin[:]
call[name[disordered_comp].append, parameter[tuple[[<ast.Name object at 0x7da1b1c5a7a0>, <ast.Call object at 0x7da1b1c5a7d0>]]]]
if compare[name[fmt] equal[==] constant[LaTeX]] begin[:]
variable[sub_start] assign[=] constant[_{]
variable[sub_end] assign[=] constant[}]
variable[disordered_formula] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b1c5b8e0>, <ast.Name object at 0x7da1b1c5b8b0>]]] in starred[name[disordered_comp]] begin[:]
call[name[disordered_formula].append, parameter[name[sp]]]
if name[occu] begin[:]
if compare[name[fmt] not_equal[!=] constant[plain]] begin[:]
call[name[disordered_formula].append, parameter[name[sub_start]]]
call[name[disordered_formula].append, parameter[name[occu]]]
if compare[name[fmt] not_equal[!=] constant[plain]] begin[:]
call[name[disordered_formula].append, parameter[name[sub_end]]]
call[name[disordered_formula].append, parameter[constant[ ]]]
<ast.AugAssign object at 0x7da1b1c5b0d0>
variable[comp] assign[=] name[disordered_struct].composition
return[call[call[constant[].join, parameter[call[name[map], parameter[name[str], name[disordered_formula]]]]]][<ast.Slice object at 0x7da2044c0160>]]
|
keyword[def] identifier[disordered_formula] ( identifier[disordered_struct] , identifier[symbols] =( literal[string] , literal[string] , literal[string] ), identifier[fmt] = literal[string] ):
literal[string]
keyword[from] identifier[pymatgen] . identifier[core] . identifier[composition] keyword[import] identifier[Composition]
keyword[from] identifier[pymatgen] . identifier[core] . identifier[periodic_table] keyword[import] identifier[get_el_sp]
keyword[if] identifier[disordered_struct] . identifier[is_ordered] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
identifier[disordered_site_compositions] ={ identifier[site] . identifier[species]
keyword[for] identifier[site] keyword[in] identifier[disordered_struct] keyword[if] keyword[not] identifier[site] . identifier[is_ordered] }
keyword[if] identifier[len] ( identifier[disordered_site_compositions] )> literal[int] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string]
literal[string] )
identifier[disordered_site_composition] = identifier[disordered_site_compositions] . identifier[pop] ()
identifier[disordered_species] ={ identifier[str] ( identifier[sp] ) keyword[for] identifier[sp] , identifier[occu] keyword[in] identifier[disordered_site_composition] . identifier[items] ()}
keyword[if] identifier[len] ( identifier[disordered_species] )> identifier[len] ( identifier[symbols] ):
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[symbols] ))
identifier[symbols] = identifier[list] ( identifier[symbols] )[ literal[int] : identifier[len] ( identifier[disordered_species] )- literal[int] ]
identifier[comp] = identifier[disordered_struct] . identifier[composition] . identifier[get_el_amt_dict] (). identifier[items] ()
identifier[comp] = identifier[sorted] ( identifier[comp] , identifier[key] = keyword[lambda] identifier[x] : identifier[get_el_sp] ( identifier[x] [ literal[int] ]). identifier[X] )
identifier[disordered_comp] =[]
identifier[variable_map] ={}
identifier[total_disordered_occu] = identifier[sum] ([ identifier[occu] keyword[for] identifier[sp] , identifier[occu] keyword[in] identifier[comp]
keyword[if] identifier[str] ( identifier[sp] ) keyword[in] identifier[disordered_species] ])
identifier[factor_comp] = identifier[disordered_struct] . identifier[composition] . identifier[as_dict] ()
identifier[factor_comp] [ literal[string] ]= identifier[total_disordered_occu]
keyword[for] identifier[sp] keyword[in] identifier[disordered_species] :
keyword[del] identifier[factor_comp] [ identifier[str] ( identifier[sp] )]
identifier[factor_comp] = identifier[Composition] . identifier[from_dict] ( identifier[factor_comp] )
identifier[factor] = identifier[factor_comp] . identifier[get_reduced_formula_and_factor] ()[ literal[int] ]
identifier[total_disordered_occu] /= identifier[factor]
identifier[remainder] = literal[string] . identifier[format] ( identifier[formula_double_format] ( identifier[total_disordered_occu] , identifier[ignore_ones] = keyword[False] ),
literal[string] . identifier[join] ( identifier[symbols] ))
keyword[for] identifier[sp] , identifier[occu] keyword[in] identifier[comp] :
identifier[sp] = identifier[str] ( identifier[sp] )
keyword[if] identifier[sp] keyword[not] keyword[in] identifier[disordered_species] :
identifier[disordered_comp] . identifier[append] (( identifier[sp] , identifier[formula_double_format] ( identifier[occu] / identifier[factor] )))
keyword[else] :
keyword[if] identifier[len] ( identifier[symbols] )> literal[int] :
identifier[symbol] = identifier[symbols] . identifier[pop] ( literal[int] )
identifier[disordered_comp] . identifier[append] (( identifier[sp] , identifier[symbol] ))
identifier[variable_map] [ identifier[symbol] ]= identifier[occu] / identifier[total_disordered_occu] / identifier[factor]
keyword[else] :
identifier[disordered_comp] . identifier[append] (( identifier[sp] , identifier[remainder] ))
keyword[if] identifier[fmt] == literal[string] :
identifier[sub_start] = literal[string]
identifier[sub_end] = literal[string]
keyword[elif] identifier[fmt] == literal[string] :
identifier[sub_start] = literal[string]
identifier[sub_end] = literal[string]
keyword[elif] identifier[fmt] != literal[string] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
identifier[disordered_formula] =[]
keyword[for] identifier[sp] , identifier[occu] keyword[in] identifier[disordered_comp] :
identifier[disordered_formula] . identifier[append] ( identifier[sp] )
keyword[if] identifier[occu] :
keyword[if] identifier[fmt] != literal[string] :
identifier[disordered_formula] . identifier[append] ( identifier[sub_start] )
identifier[disordered_formula] . identifier[append] ( identifier[occu] )
keyword[if] identifier[fmt] != literal[string] :
identifier[disordered_formula] . identifier[append] ( identifier[sub_end] )
identifier[disordered_formula] . identifier[append] ( literal[string] )
identifier[disordered_formula] +=[ literal[string] . identifier[format] ( identifier[k] , identifier[formula_double_format] ( identifier[v] ))
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[variable_map] . identifier[items] ()]
identifier[comp] = identifier[disordered_struct] . identifier[composition]
keyword[return] literal[string] . identifier[join] ( identifier[map] ( identifier[str] , identifier[disordered_formula] ))[ literal[int] :- literal[int] ]
|
def disordered_formula(disordered_struct, symbols=('x', 'y', 'z'), fmt='plain'):
"""
Returns a formula of a form like AxB1-x (x=0.5)
for disordered structures. Will only return a
formula for disordered structures with one
kind of disordered site at present.
Args:
disordered_struct: a disordered structure
symbols: a tuple of characters to use for
subscripts, by default this is ('x', 'y', 'z')
but if you have more than three disordered
species more symbols will need to be added
fmt (str): 'plain', 'HTML' or 'LaTeX'
Returns (str): a disordered formula string
"""
# this is in string utils and not in
# Composition because we need to have access
# to site occupancies to calculate this, so
# have to pass the full structure as an argument
# (alternatively this could be made a method on
# Structure)
from pymatgen.core.composition import Composition
from pymatgen.core.periodic_table import get_el_sp
if disordered_struct.is_ordered:
raise ValueError('Structure is not disordered, so disordered formula not defined.') # depends on [control=['if'], data=[]]
disordered_site_compositions = {site.species for site in disordered_struct if not site.is_ordered}
if len(disordered_site_compositions) > 1:
# this probably won't happen too often
raise ValueError('Ambiguous how to define disordered formula when more than one type of disordered site is present.') # depends on [control=['if'], data=[]]
disordered_site_composition = disordered_site_compositions.pop()
disordered_species = {str(sp) for (sp, occu) in disordered_site_composition.items()}
if len(disordered_species) > len(symbols):
# this probably won't happen too often either
raise ValueError('Not enough symbols to describe disordered composition: {}'.format(symbols)) # depends on [control=['if'], data=[]]
symbols = list(symbols)[0:len(disordered_species) - 1]
comp = disordered_struct.composition.get_el_amt_dict().items()
# sort by electronegativity, as per composition
comp = sorted(comp, key=lambda x: get_el_sp(x[0]).X)
disordered_comp = []
variable_map = {}
total_disordered_occu = sum([occu for (sp, occu) in comp if str(sp) in disordered_species])
# composition to get common factor
factor_comp = disordered_struct.composition.as_dict()
factor_comp['X'] = total_disordered_occu
for sp in disordered_species:
del factor_comp[str(sp)] # depends on [control=['for'], data=['sp']]
factor_comp = Composition.from_dict(factor_comp)
factor = factor_comp.get_reduced_formula_and_factor()[1]
total_disordered_occu /= factor
remainder = '{}-{}'.format(formula_double_format(total_disordered_occu, ignore_ones=False), '-'.join(symbols))
for (sp, occu) in comp:
sp = str(sp)
if sp not in disordered_species:
disordered_comp.append((sp, formula_double_format(occu / factor))) # depends on [control=['if'], data=['sp']]
elif len(symbols) > 0:
symbol = symbols.pop(0)
disordered_comp.append((sp, symbol))
variable_map[symbol] = occu / total_disordered_occu / factor # depends on [control=['if'], data=[]]
else:
disordered_comp.append((sp, remainder)) # depends on [control=['for'], data=[]]
if fmt == 'LaTeX':
sub_start = '_{'
sub_end = '}' # depends on [control=['if'], data=[]]
elif fmt == 'HTML':
sub_start = '<sub>'
sub_end = '</sub>' # depends on [control=['if'], data=[]]
elif fmt != 'plain':
raise ValueError('Unsupported output format, choose from: LaTeX, HTML, plain') # depends on [control=['if'], data=[]]
disordered_formula = []
for (sp, occu) in disordered_comp:
disordered_formula.append(sp)
if occu: # can be empty string if 1
if fmt != 'plain':
disordered_formula.append(sub_start) # depends on [control=['if'], data=[]]
disordered_formula.append(occu)
if fmt != 'plain':
disordered_formula.append(sub_end) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
disordered_formula.append(' ')
disordered_formula += ['{}={} '.format(k, formula_double_format(v)) for (k, v) in variable_map.items()]
comp = disordered_struct.composition
return ''.join(map(str, disordered_formula))[0:-1]
|
def splash(self):
"""
Draw splash screen
"""
dirname = os.path.split(os.path.abspath(__file__))[0]
try:
splash = open(os.path.join(dirname, "splash"), "r").readlines()
except IOError:
return
width = len(max(splash, key=len))
y = int(self.y_grid / 2) - len(splash)
x = int(self.x_grid / 2) - int(width / 2)
if self.x_grid > width:
for i, line in enumerate(splash):
self.win.addstr(y + i, x, line, curses.color_pair(5))
|
def function[splash, parameter[self]]:
constant[
Draw splash screen
]
variable[dirname] assign[=] call[call[name[os].path.split, parameter[call[name[os].path.abspath, parameter[name[__file__]]]]]][constant[0]]
<ast.Try object at 0x7da207f00700>
variable[width] assign[=] call[name[len], parameter[call[name[max], parameter[name[splash]]]]]
variable[y] assign[=] binary_operation[call[name[int], parameter[binary_operation[name[self].y_grid / constant[2]]]] - call[name[len], parameter[name[splash]]]]
variable[x] assign[=] binary_operation[call[name[int], parameter[binary_operation[name[self].x_grid / constant[2]]]] - call[name[int], parameter[binary_operation[name[width] / constant[2]]]]]
if compare[name[self].x_grid greater[>] name[width]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da207f03fd0>, <ast.Name object at 0x7da207f03580>]]] in starred[call[name[enumerate], parameter[name[splash]]]] begin[:]
call[name[self].win.addstr, parameter[binary_operation[name[y] + name[i]], name[x], name[line], call[name[curses].color_pair, parameter[constant[5]]]]]
|
keyword[def] identifier[splash] ( identifier[self] ):
literal[string]
identifier[dirname] = identifier[os] . identifier[path] . identifier[split] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[__file__] ))[ literal[int] ]
keyword[try] :
identifier[splash] = identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[dirname] , literal[string] ), literal[string] ). identifier[readlines] ()
keyword[except] identifier[IOError] :
keyword[return]
identifier[width] = identifier[len] ( identifier[max] ( identifier[splash] , identifier[key] = identifier[len] ))
identifier[y] = identifier[int] ( identifier[self] . identifier[y_grid] / literal[int] )- identifier[len] ( identifier[splash] )
identifier[x] = identifier[int] ( identifier[self] . identifier[x_grid] / literal[int] )- identifier[int] ( identifier[width] / literal[int] )
keyword[if] identifier[self] . identifier[x_grid] > identifier[width] :
keyword[for] identifier[i] , identifier[line] keyword[in] identifier[enumerate] ( identifier[splash] ):
identifier[self] . identifier[win] . identifier[addstr] ( identifier[y] + identifier[i] , identifier[x] , identifier[line] , identifier[curses] . identifier[color_pair] ( literal[int] ))
|
def splash(self):
"""
Draw splash screen
"""
dirname = os.path.split(os.path.abspath(__file__))[0]
try:
splash = open(os.path.join(dirname, 'splash'), 'r').readlines() # depends on [control=['try'], data=[]]
except IOError:
return # depends on [control=['except'], data=[]]
width = len(max(splash, key=len))
y = int(self.y_grid / 2) - len(splash)
x = int(self.x_grid / 2) - int(width / 2)
if self.x_grid > width:
for (i, line) in enumerate(splash):
self.win.addstr(y + i, x, line, curses.color_pair(5)) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
|
async def _relay(self,
channel: aioamqp.channel.Channel,
body: str,
envelope: aioamqp.envelope.Envelope,
properties: aioamqp.properties.Properties):
"""Relays incoming messages between the queue and the user callback"""
try:
await channel.basic_client_ack(envelope.delivery_tag)
await self.on_message(self, envelope.routing_key, json.loads(body))
except Exception as ex:
LOGGER.error(f'Exception relaying message in {self}: {ex}')
|
<ast.AsyncFunctionDef object at 0x7da204963ee0>
|
keyword[async] keyword[def] identifier[_relay] ( identifier[self] ,
identifier[channel] : identifier[aioamqp] . identifier[channel] . identifier[Channel] ,
identifier[body] : identifier[str] ,
identifier[envelope] : identifier[aioamqp] . identifier[envelope] . identifier[Envelope] ,
identifier[properties] : identifier[aioamqp] . identifier[properties] . identifier[Properties] ):
literal[string]
keyword[try] :
keyword[await] identifier[channel] . identifier[basic_client_ack] ( identifier[envelope] . identifier[delivery_tag] )
keyword[await] identifier[self] . identifier[on_message] ( identifier[self] , identifier[envelope] . identifier[routing_key] , identifier[json] . identifier[loads] ( identifier[body] ))
keyword[except] identifier[Exception] keyword[as] identifier[ex] :
identifier[LOGGER] . identifier[error] ( literal[string] )
|
async def _relay(self, channel: aioamqp.channel.Channel, body: str, envelope: aioamqp.envelope.Envelope, properties: aioamqp.properties.Properties):
"""Relays incoming messages between the queue and the user callback"""
try:
await channel.basic_client_ack(envelope.delivery_tag)
await self.on_message(self, envelope.routing_key, json.loads(body)) # depends on [control=['try'], data=[]]
except Exception as ex:
LOGGER.error(f'Exception relaying message in {self}: {ex}') # depends on [control=['except'], data=['ex']]
|
def change_email(self, email, as_username=False):
"""
Change account email
:param email:
:param as_username
:return: the email provided
"""
email = email.lower()
data = {"email": email}
if self.email != email:
if self.get_by_email(email):
raise exceptions.AuthError("Email exists already")
if as_username:
if self.username != email:
if self.get_by_username(email):
raise exceptions.AuthError("Username exists already")
data["username"] = email
self.update(**data)
|
def function[change_email, parameter[self, email, as_username]]:
constant[
Change account email
:param email:
:param as_username
:return: the email provided
]
variable[email] assign[=] call[name[email].lower, parameter[]]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da207f9b460>], [<ast.Name object at 0x7da207f9bca0>]]
if compare[name[self].email not_equal[!=] name[email]] begin[:]
if call[name[self].get_by_email, parameter[name[email]]] begin[:]
<ast.Raise object at 0x7da207f99ba0>
if name[as_username] begin[:]
if compare[name[self].username not_equal[!=] name[email]] begin[:]
if call[name[self].get_by_username, parameter[name[email]]] begin[:]
<ast.Raise object at 0x7da207f9ae90>
call[name[data]][constant[username]] assign[=] name[email]
call[name[self].update, parameter[]]
|
keyword[def] identifier[change_email] ( identifier[self] , identifier[email] , identifier[as_username] = keyword[False] ):
literal[string]
identifier[email] = identifier[email] . identifier[lower] ()
identifier[data] ={ literal[string] : identifier[email] }
keyword[if] identifier[self] . identifier[email] != identifier[email] :
keyword[if] identifier[self] . identifier[get_by_email] ( identifier[email] ):
keyword[raise] identifier[exceptions] . identifier[AuthError] ( literal[string] )
keyword[if] identifier[as_username] :
keyword[if] identifier[self] . identifier[username] != identifier[email] :
keyword[if] identifier[self] . identifier[get_by_username] ( identifier[email] ):
keyword[raise] identifier[exceptions] . identifier[AuthError] ( literal[string] )
identifier[data] [ literal[string] ]= identifier[email]
identifier[self] . identifier[update] (** identifier[data] )
|
def change_email(self, email, as_username=False):
"""
Change account email
:param email:
:param as_username
:return: the email provided
"""
email = email.lower()
data = {'email': email}
if self.email != email:
if self.get_by_email(email):
raise exceptions.AuthError('Email exists already') # depends on [control=['if'], data=[]]
if as_username:
if self.username != email:
if self.get_by_username(email):
raise exceptions.AuthError('Username exists already') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['email']]
data['username'] = email # depends on [control=['if'], data=[]]
self.update(**data) # depends on [control=['if'], data=['email']]
|
def merge_dicts(*args):
r"""
add / concatenate / union / join / merge / combine dictionaries
Copies the first dictionary given and then repeatedly calls update using
the rest of the dicts given in args. Duplicate keys will receive the last
value specified the list of dictionaries.
Returns:
dict: mergedict_
CommandLine:
python -m utool.util_dict --test-merge_dicts
References:
http://stackoverflow.com/questions/38987/how-can-i-merge-two-python-dictionaries-in-a-single-expression
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_dict import * # NOQA
>>> import utool as ut
>>> x = {'a': 1, 'b': 2}
>>> y = {'b': 3, 'c': 4}
>>> mergedict_ = merge_dicts(x, y)
>>> result = ut.repr4(mergedict_, sorted_=True, newlines=False)
>>> print(result)
{'a': 1, 'b': 3, 'c': 4}
"""
iter_ = iter(args)
mergedict_ = six.next(iter_).copy()
for dict_ in iter_:
mergedict_.update(dict_)
return mergedict_
|
def function[merge_dicts, parameter[]]:
constant[
add / concatenate / union / join / merge / combine dictionaries
Copies the first dictionary given and then repeatedly calls update using
the rest of the dicts given in args. Duplicate keys will receive the last
value specified the list of dictionaries.
Returns:
dict: mergedict_
CommandLine:
python -m utool.util_dict --test-merge_dicts
References:
http://stackoverflow.com/questions/38987/how-can-i-merge-two-python-dictionaries-in-a-single-expression
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_dict import * # NOQA
>>> import utool as ut
>>> x = {'a': 1, 'b': 2}
>>> y = {'b': 3, 'c': 4}
>>> mergedict_ = merge_dicts(x, y)
>>> result = ut.repr4(mergedict_, sorted_=True, newlines=False)
>>> print(result)
{'a': 1, 'b': 3, 'c': 4}
]
variable[iter_] assign[=] call[name[iter], parameter[name[args]]]
variable[mergedict_] assign[=] call[call[name[six].next, parameter[name[iter_]]].copy, parameter[]]
for taget[name[dict_]] in starred[name[iter_]] begin[:]
call[name[mergedict_].update, parameter[name[dict_]]]
return[name[mergedict_]]
|
keyword[def] identifier[merge_dicts] (* identifier[args] ):
literal[string]
identifier[iter_] = identifier[iter] ( identifier[args] )
identifier[mergedict_] = identifier[six] . identifier[next] ( identifier[iter_] ). identifier[copy] ()
keyword[for] identifier[dict_] keyword[in] identifier[iter_] :
identifier[mergedict_] . identifier[update] ( identifier[dict_] )
keyword[return] identifier[mergedict_]
|
def merge_dicts(*args):
"""
add / concatenate / union / join / merge / combine dictionaries
Copies the first dictionary given and then repeatedly calls update using
the rest of the dicts given in args. Duplicate keys will receive the last
value specified the list of dictionaries.
Returns:
dict: mergedict_
CommandLine:
python -m utool.util_dict --test-merge_dicts
References:
http://stackoverflow.com/questions/38987/how-can-i-merge-two-python-dictionaries-in-a-single-expression
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_dict import * # NOQA
>>> import utool as ut
>>> x = {'a': 1, 'b': 2}
>>> y = {'b': 3, 'c': 4}
>>> mergedict_ = merge_dicts(x, y)
>>> result = ut.repr4(mergedict_, sorted_=True, newlines=False)
>>> print(result)
{'a': 1, 'b': 3, 'c': 4}
"""
iter_ = iter(args)
mergedict_ = six.next(iter_).copy()
for dict_ in iter_:
mergedict_.update(dict_) # depends on [control=['for'], data=['dict_']]
return mergedict_
|
def read_passwd_file(pass_file):
"""Read password from external file and retrun as string. The file should
contain just single line. Prevents hard-coding password anywhere in this
script. IMPORTANT! Password is stored as plain text! Do NOT use with your
personal account!"
Args:
pass_file (str): /path/to/pass_file
"""
with open(pass_file) as fin:
passwd = fin.read().strip()
return passwd
|
def function[read_passwd_file, parameter[pass_file]]:
constant[Read password from external file and retrun as string. The file should
contain just single line. Prevents hard-coding password anywhere in this
script. IMPORTANT! Password is stored as plain text! Do NOT use with your
personal account!"
Args:
pass_file (str): /path/to/pass_file
]
with call[name[open], parameter[name[pass_file]]] begin[:]
variable[passwd] assign[=] call[call[name[fin].read, parameter[]].strip, parameter[]]
return[name[passwd]]
|
keyword[def] identifier[read_passwd_file] ( identifier[pass_file] ):
literal[string]
keyword[with] identifier[open] ( identifier[pass_file] ) keyword[as] identifier[fin] :
identifier[passwd] = identifier[fin] . identifier[read] (). identifier[strip] ()
keyword[return] identifier[passwd]
|
def read_passwd_file(pass_file):
"""Read password from external file and retrun as string. The file should
contain just single line. Prevents hard-coding password anywhere in this
script. IMPORTANT! Password is stored as plain text! Do NOT use with your
personal account!"
Args:
pass_file (str): /path/to/pass_file
"""
with open(pass_file) as fin:
passwd = fin.read().strip() # depends on [control=['with'], data=['fin']]
return passwd
|
def delete(path, regex=None, recurse=False, test=False):
"""Deletes the file or directory at `path`. If `path` is a directory and
`regex` is provided, matching files will be deleted; `recurse` controls
whether subdirectories are recursed. A list of deleted items is returned.
If `test` is true, nothing will be deleted and a list of items that would
have been deleted is returned.
"""
deleted = []
if op.isfile(path):
if not test: os.remove(path)
else: return [path]
return [] if op.exists(path) else [path]
elif op.isdir(path):
if regex:
for r,ds,fs in os.walk(path):
for i in fs:
if _is_match(regex, i):
deleted += delete(op.join(r,i), test=test)
if not recurse:
break
else:
if not test: shutil.rmtree(path)
else: return [path]
return [] if op.exists(path) else [path]
return deleted
|
def function[delete, parameter[path, regex, recurse, test]]:
constant[Deletes the file or directory at `path`. If `path` is a directory and
`regex` is provided, matching files will be deleted; `recurse` controls
whether subdirectories are recursed. A list of deleted items is returned.
If `test` is true, nothing will be deleted and a list of items that would
have been deleted is returned.
]
variable[deleted] assign[=] list[[]]
if call[name[op].isfile, parameter[name[path]]] begin[:]
if <ast.UnaryOp object at 0x7da18ede7d90> begin[:]
call[name[os].remove, parameter[name[path]]]
return[<ast.IfExp object at 0x7da18ede6ef0>]
return[name[deleted]]
|
keyword[def] identifier[delete] ( identifier[path] , identifier[regex] = keyword[None] , identifier[recurse] = keyword[False] , identifier[test] = keyword[False] ):
literal[string]
identifier[deleted] =[]
keyword[if] identifier[op] . identifier[isfile] ( identifier[path] ):
keyword[if] keyword[not] identifier[test] : identifier[os] . identifier[remove] ( identifier[path] )
keyword[else] : keyword[return] [ identifier[path] ]
keyword[return] [] keyword[if] identifier[op] . identifier[exists] ( identifier[path] ) keyword[else] [ identifier[path] ]
keyword[elif] identifier[op] . identifier[isdir] ( identifier[path] ):
keyword[if] identifier[regex] :
keyword[for] identifier[r] , identifier[ds] , identifier[fs] keyword[in] identifier[os] . identifier[walk] ( identifier[path] ):
keyword[for] identifier[i] keyword[in] identifier[fs] :
keyword[if] identifier[_is_match] ( identifier[regex] , identifier[i] ):
identifier[deleted] += identifier[delete] ( identifier[op] . identifier[join] ( identifier[r] , identifier[i] ), identifier[test] = identifier[test] )
keyword[if] keyword[not] identifier[recurse] :
keyword[break]
keyword[else] :
keyword[if] keyword[not] identifier[test] : identifier[shutil] . identifier[rmtree] ( identifier[path] )
keyword[else] : keyword[return] [ identifier[path] ]
keyword[return] [] keyword[if] identifier[op] . identifier[exists] ( identifier[path] ) keyword[else] [ identifier[path] ]
keyword[return] identifier[deleted]
|
def delete(path, regex=None, recurse=False, test=False):
"""Deletes the file or directory at `path`. If `path` is a directory and
`regex` is provided, matching files will be deleted; `recurse` controls
whether subdirectories are recursed. A list of deleted items is returned.
If `test` is true, nothing will be deleted and a list of items that would
have been deleted is returned.
"""
deleted = []
if op.isfile(path):
if not test:
os.remove(path) # depends on [control=['if'], data=[]]
else:
return [path]
return [] if op.exists(path) else [path] # depends on [control=['if'], data=[]]
elif op.isdir(path):
if regex:
for (r, ds, fs) in os.walk(path):
for i in fs:
if _is_match(regex, i):
deleted += delete(op.join(r, i), test=test) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
if not recurse:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
if not test:
shutil.rmtree(path) # depends on [control=['if'], data=[]]
else:
return [path]
return [] if op.exists(path) else [path] # depends on [control=['if'], data=[]]
return deleted
|
def is_streamable(self):
"""Returns True if the artist is streamable."""
return bool(
_number(
_extract(self._request(self.ws_prefix + ".getInfo", True), "streamable")
)
)
|
def function[is_streamable, parameter[self]]:
constant[Returns True if the artist is streamable.]
return[call[name[bool], parameter[call[name[_number], parameter[call[name[_extract], parameter[call[name[self]._request, parameter[binary_operation[name[self].ws_prefix + constant[.getInfo]], constant[True]]], constant[streamable]]]]]]]]
|
keyword[def] identifier[is_streamable] ( identifier[self] ):
literal[string]
keyword[return] identifier[bool] (
identifier[_number] (
identifier[_extract] ( identifier[self] . identifier[_request] ( identifier[self] . identifier[ws_prefix] + literal[string] , keyword[True] ), literal[string] )
)
)
|
def is_streamable(self):
"""Returns True if the artist is streamable."""
return bool(_number(_extract(self._request(self.ws_prefix + '.getInfo', True), 'streamable')))
|
def create_from_file(cls, filename):
"""Return an Estimator object given the path of the file, relative to the MEDIA_ROOT"""
obj = cls()
obj.object_file = filename
obj.load()
return obj
|
def function[create_from_file, parameter[cls, filename]]:
constant[Return an Estimator object given the path of the file, relative to the MEDIA_ROOT]
variable[obj] assign[=] call[name[cls], parameter[]]
name[obj].object_file assign[=] name[filename]
call[name[obj].load, parameter[]]
return[name[obj]]
|
keyword[def] identifier[create_from_file] ( identifier[cls] , identifier[filename] ):
literal[string]
identifier[obj] = identifier[cls] ()
identifier[obj] . identifier[object_file] = identifier[filename]
identifier[obj] . identifier[load] ()
keyword[return] identifier[obj]
|
def create_from_file(cls, filename):
"""Return an Estimator object given the path of the file, relative to the MEDIA_ROOT"""
obj = cls()
obj.object_file = filename
obj.load()
return obj
|
def xgetattr(obj: object, name: str, default=_sentinel, getitem=False):
"""Get attribute value from object.
:param obj: object
:param name: attribute or key name
:param default: when attribute or key missing, return default; if obj is a
dict and use getitem, default will not be used.
:param getitem: when object is a dict, use getitem or get
:return: attribute or key value, or raise KeyError/AttributeError
"""
if isinstance(obj, dict):
if getitem:
# In tune with `dict.__getitem__` method.
return obj[name]
else:
# In tune with `dict.get` method.
val = obj.get(name, default)
return None if val is _sentinel else val
else:
# If object is not a dict, in tune with `getattr` method.
val = getattr(obj, name, default)
if val is _sentinel:
msg = '%r object has no attribute %r' % (obj.__class__, name)
raise AttributeError(msg)
else:
return val
|
def function[xgetattr, parameter[obj, name, default, getitem]]:
constant[Get attribute value from object.
:param obj: object
:param name: attribute or key name
:param default: when attribute or key missing, return default; if obj is a
dict and use getitem, default will not be used.
:param getitem: when object is a dict, use getitem or get
:return: attribute or key value, or raise KeyError/AttributeError
]
if call[name[isinstance], parameter[name[obj], name[dict]]] begin[:]
if name[getitem] begin[:]
return[call[name[obj]][name[name]]]
|
keyword[def] identifier[xgetattr] ( identifier[obj] : identifier[object] , identifier[name] : identifier[str] , identifier[default] = identifier[_sentinel] , identifier[getitem] = keyword[False] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[dict] ):
keyword[if] identifier[getitem] :
keyword[return] identifier[obj] [ identifier[name] ]
keyword[else] :
identifier[val] = identifier[obj] . identifier[get] ( identifier[name] , identifier[default] )
keyword[return] keyword[None] keyword[if] identifier[val] keyword[is] identifier[_sentinel] keyword[else] identifier[val]
keyword[else] :
identifier[val] = identifier[getattr] ( identifier[obj] , identifier[name] , identifier[default] )
keyword[if] identifier[val] keyword[is] identifier[_sentinel] :
identifier[msg] = literal[string] %( identifier[obj] . identifier[__class__] , identifier[name] )
keyword[raise] identifier[AttributeError] ( identifier[msg] )
keyword[else] :
keyword[return] identifier[val]
|
def xgetattr(obj: object, name: str, default=_sentinel, getitem=False):
"""Get attribute value from object.
:param obj: object
:param name: attribute or key name
:param default: when attribute or key missing, return default; if obj is a
dict and use getitem, default will not be used.
:param getitem: when object is a dict, use getitem or get
:return: attribute or key value, or raise KeyError/AttributeError
"""
if isinstance(obj, dict):
if getitem:
# In tune with `dict.__getitem__` method.
return obj[name] # depends on [control=['if'], data=[]]
else:
# In tune with `dict.get` method.
val = obj.get(name, default)
return None if val is _sentinel else val # depends on [control=['if'], data=[]]
else:
# If object is not a dict, in tune with `getattr` method.
val = getattr(obj, name, default)
if val is _sentinel:
msg = '%r object has no attribute %r' % (obj.__class__, name)
raise AttributeError(msg) # depends on [control=['if'], data=[]]
else:
return val
|
def _wait_for_operation_to_complete(self, operation_name):
"""
Waits for the named operation to complete - checks status of the
asynchronous call.
:param operation_name: The name of the operation.
:type operation_name: str
:return: The response returned by the operation.
:rtype: dict
:exception: AirflowException in case error is returned.
"""
service = self.get_conn()
while True:
operation_response = service.operations().get(
name=operation_name,
).execute(num_retries=self.num_retries)
if operation_response.get("done"):
response = operation_response.get("response")
error = operation_response.get("error")
# Note, according to documentation always either response or error is
# set when "done" == True
if error:
raise AirflowException(str(error))
return response
time.sleep(TIME_TO_SLEEP_IN_SECONDS)
|
def function[_wait_for_operation_to_complete, parameter[self, operation_name]]:
constant[
Waits for the named operation to complete - checks status of the
asynchronous call.
:param operation_name: The name of the operation.
:type operation_name: str
:return: The response returned by the operation.
:rtype: dict
:exception: AirflowException in case error is returned.
]
variable[service] assign[=] call[name[self].get_conn, parameter[]]
while constant[True] begin[:]
variable[operation_response] assign[=] call[call[call[name[service].operations, parameter[]].get, parameter[]].execute, parameter[]]
if call[name[operation_response].get, parameter[constant[done]]] begin[:]
variable[response] assign[=] call[name[operation_response].get, parameter[constant[response]]]
variable[error] assign[=] call[name[operation_response].get, parameter[constant[error]]]
if name[error] begin[:]
<ast.Raise object at 0x7da1b052bd30>
return[name[response]]
call[name[time].sleep, parameter[name[TIME_TO_SLEEP_IN_SECONDS]]]
|
keyword[def] identifier[_wait_for_operation_to_complete] ( identifier[self] , identifier[operation_name] ):
literal[string]
identifier[service] = identifier[self] . identifier[get_conn] ()
keyword[while] keyword[True] :
identifier[operation_response] = identifier[service] . identifier[operations] (). identifier[get] (
identifier[name] = identifier[operation_name] ,
). identifier[execute] ( identifier[num_retries] = identifier[self] . identifier[num_retries] )
keyword[if] identifier[operation_response] . identifier[get] ( literal[string] ):
identifier[response] = identifier[operation_response] . identifier[get] ( literal[string] )
identifier[error] = identifier[operation_response] . identifier[get] ( literal[string] )
keyword[if] identifier[error] :
keyword[raise] identifier[AirflowException] ( identifier[str] ( identifier[error] ))
keyword[return] identifier[response]
identifier[time] . identifier[sleep] ( identifier[TIME_TO_SLEEP_IN_SECONDS] )
|
def _wait_for_operation_to_complete(self, operation_name):
"""
Waits for the named operation to complete - checks status of the
asynchronous call.
:param operation_name: The name of the operation.
:type operation_name: str
:return: The response returned by the operation.
:rtype: dict
:exception: AirflowException in case error is returned.
"""
service = self.get_conn()
while True:
operation_response = service.operations().get(name=operation_name).execute(num_retries=self.num_retries)
if operation_response.get('done'):
response = operation_response.get('response')
error = operation_response.get('error')
# Note, according to documentation always either response or error is
# set when "done" == True
if error:
raise AirflowException(str(error)) # depends on [control=['if'], data=[]]
return response # depends on [control=['if'], data=[]]
time.sleep(TIME_TO_SLEEP_IN_SECONDS) # depends on [control=['while'], data=[]]
|
def __merge(cls, *multicolors):
""" Produces a new :class:`Multicolor` object resulting from gathering information from all supplied :class:`Multicolor` instances.
New :class:`Multicolor` is created and its :attr:`Multicolor.multicolors` attribute is updated with similar attributes of supplied :class:`Multicolor` objects.
Accounts for subclassing.
:param multicolors: variable number of :class:`Multicolor` objects
:type multicolors: :class:`Multicolor`
:return: object containing gathered information from all supplied arguments
:rtype: :class:`Multicolor`
"""
result = cls()
for multicolor in multicolors:
result.multicolors = result.multicolors + multicolor.multicolors
return result
|
def function[__merge, parameter[cls]]:
constant[ Produces a new :class:`Multicolor` object resulting from gathering information from all supplied :class:`Multicolor` instances.
New :class:`Multicolor` is created and its :attr:`Multicolor.multicolors` attribute is updated with similar attributes of supplied :class:`Multicolor` objects.
Accounts for subclassing.
:param multicolors: variable number of :class:`Multicolor` objects
:type multicolors: :class:`Multicolor`
:return: object containing gathered information from all supplied arguments
:rtype: :class:`Multicolor`
]
variable[result] assign[=] call[name[cls], parameter[]]
for taget[name[multicolor]] in starred[name[multicolors]] begin[:]
name[result].multicolors assign[=] binary_operation[name[result].multicolors + name[multicolor].multicolors]
return[name[result]]
|
keyword[def] identifier[__merge] ( identifier[cls] ,* identifier[multicolors] ):
literal[string]
identifier[result] = identifier[cls] ()
keyword[for] identifier[multicolor] keyword[in] identifier[multicolors] :
identifier[result] . identifier[multicolors] = identifier[result] . identifier[multicolors] + identifier[multicolor] . identifier[multicolors]
keyword[return] identifier[result]
|
def __merge(cls, *multicolors):
""" Produces a new :class:`Multicolor` object resulting from gathering information from all supplied :class:`Multicolor` instances.
New :class:`Multicolor` is created and its :attr:`Multicolor.multicolors` attribute is updated with similar attributes of supplied :class:`Multicolor` objects.
Accounts for subclassing.
:param multicolors: variable number of :class:`Multicolor` objects
:type multicolors: :class:`Multicolor`
:return: object containing gathered information from all supplied arguments
:rtype: :class:`Multicolor`
"""
result = cls()
for multicolor in multicolors:
result.multicolors = result.multicolors + multicolor.multicolors # depends on [control=['for'], data=['multicolor']]
return result
|
async def detach(self, discard=True):
'''Remove the underlying :attr:`connection` from the connection
:attr:`pool`.
'''
if discard:
return self.close(True)
else:
self.connection._exit_ = False
return self
|
<ast.AsyncFunctionDef object at 0x7da20c6a84f0>
|
keyword[async] keyword[def] identifier[detach] ( identifier[self] , identifier[discard] = keyword[True] ):
literal[string]
keyword[if] identifier[discard] :
keyword[return] identifier[self] . identifier[close] ( keyword[True] )
keyword[else] :
identifier[self] . identifier[connection] . identifier[_exit_] = keyword[False]
keyword[return] identifier[self]
|
async def detach(self, discard=True):
"""Remove the underlying :attr:`connection` from the connection
:attr:`pool`.
"""
if discard:
return self.close(True) # depends on [control=['if'], data=[]]
else:
self.connection._exit_ = False
return self
|
def name_filter(keywords, names):
'''
Returns the first keyword from the list, unless
that keyword is one of the names in names, in which case
it continues to the next keyword.
Since keywords consists of tuples, it just returns the first
element of the tuple, the keyword. It also adds double
quotes around the keywords, as is appropriate for google queries.
Input Arguments:
keywords -- a list of (keyword, strength) tuples
names -- a list of names to be skipped
'''
name_set = set(name.lower() for name in names)
for key_tuple in keywords:
if not key_tuple[0] in name_set:
return '\"' + key_tuple[0] +'\"'
## returns empty string if we run out, which we shouldn't
return ''
|
def function[name_filter, parameter[keywords, names]]:
constant[
Returns the first keyword from the list, unless
that keyword is one of the names in names, in which case
it continues to the next keyword.
Since keywords consists of tuples, it just returns the first
element of the tuple, the keyword. It also adds double
quotes around the keywords, as is appropriate for google queries.
Input Arguments:
keywords -- a list of (keyword, strength) tuples
names -- a list of names to be skipped
]
variable[name_set] assign[=] call[name[set], parameter[<ast.GeneratorExp object at 0x7da20c76edd0>]]
for taget[name[key_tuple]] in starred[name[keywords]] begin[:]
if <ast.UnaryOp object at 0x7da20c76ee60> begin[:]
return[binary_operation[binary_operation[constant["] + call[name[key_tuple]][constant[0]]] + constant["]]]
return[constant[]]
|
keyword[def] identifier[name_filter] ( identifier[keywords] , identifier[names] ):
literal[string]
identifier[name_set] = identifier[set] ( identifier[name] . identifier[lower] () keyword[for] identifier[name] keyword[in] identifier[names] )
keyword[for] identifier[key_tuple] keyword[in] identifier[keywords] :
keyword[if] keyword[not] identifier[key_tuple] [ literal[int] ] keyword[in] identifier[name_set] :
keyword[return] literal[string] + identifier[key_tuple] [ literal[int] ]+ literal[string]
keyword[return] literal[string]
|
def name_filter(keywords, names):
"""
Returns the first keyword from the list, unless
that keyword is one of the names in names, in which case
it continues to the next keyword.
Since keywords consists of tuples, it just returns the first
element of the tuple, the keyword. It also adds double
quotes around the keywords, as is appropriate for google queries.
Input Arguments:
keywords -- a list of (keyword, strength) tuples
names -- a list of names to be skipped
"""
name_set = set((name.lower() for name in names))
for key_tuple in keywords:
if not key_tuple[0] in name_set:
return '"' + key_tuple[0] + '"' # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key_tuple']]
## returns empty string if we run out, which we shouldn't
return ''
|
def _handle_response(response, **kwargs) -> XMLResponse:
"""Requests HTTP Response handler. Attaches .html property to
class:`requests.Response <requests.Response>` objects.
"""
if not response.encoding:
response.encoding = DEFAULT_ENCODING
return response
|
def function[_handle_response, parameter[response]]:
constant[Requests HTTP Response handler. Attaches .html property to
class:`requests.Response <requests.Response>` objects.
]
if <ast.UnaryOp object at 0x7da1b26ae290> begin[:]
name[response].encoding assign[=] name[DEFAULT_ENCODING]
return[name[response]]
|
keyword[def] identifier[_handle_response] ( identifier[response] ,** identifier[kwargs] )-> identifier[XMLResponse] :
literal[string]
keyword[if] keyword[not] identifier[response] . identifier[encoding] :
identifier[response] . identifier[encoding] = identifier[DEFAULT_ENCODING]
keyword[return] identifier[response]
|
def _handle_response(response, **kwargs) -> XMLResponse:
"""Requests HTTP Response handler. Attaches .html property to
class:`requests.Response <requests.Response>` objects.
"""
if not response.encoding:
response.encoding = DEFAULT_ENCODING # depends on [control=['if'], data=[]]
return response
|
def allpaths(args):
"""
%prog allpaths folder1 folder2 ...
Run automated ALLPATHS on list of dirs.
"""
p = OptionParser(allpaths.__doc__)
p.add_option("--ploidy", default="1", choices=("1", "2"),
help="Ploidy [default: %default]")
opts, args = p.parse_args(args)
if len(args) == 0:
sys.exit(not p.print_help())
folders = args
for pf in folders:
if not op.isdir(pf):
continue
assemble_dir(pf, target=["final.contigs.fasta", "final.assembly.fasta"],
ploidy=opts.ploidy)
|
def function[allpaths, parameter[args]]:
constant[
%prog allpaths folder1 folder2 ...
Run automated ALLPATHS on list of dirs.
]
variable[p] assign[=] call[name[OptionParser], parameter[name[allpaths].__doc__]]
call[name[p].add_option, parameter[constant[--ploidy]]]
<ast.Tuple object at 0x7da18fe92200> assign[=] call[name[p].parse_args, parameter[name[args]]]
if compare[call[name[len], parameter[name[args]]] equal[==] constant[0]] begin[:]
call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da1b08c9630>]]
variable[folders] assign[=] name[args]
for taget[name[pf]] in starred[name[folders]] begin[:]
if <ast.UnaryOp object at 0x7da1b08c8f70> begin[:]
continue
call[name[assemble_dir], parameter[name[pf]]]
|
keyword[def] identifier[allpaths] ( identifier[args] ):
literal[string]
identifier[p] = identifier[OptionParser] ( identifier[allpaths] . identifier[__doc__] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = literal[string] , identifier[choices] =( literal[string] , literal[string] ),
identifier[help] = literal[string] )
identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] )
keyword[if] identifier[len] ( identifier[args] )== literal[int] :
identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ())
identifier[folders] = identifier[args]
keyword[for] identifier[pf] keyword[in] identifier[folders] :
keyword[if] keyword[not] identifier[op] . identifier[isdir] ( identifier[pf] ):
keyword[continue]
identifier[assemble_dir] ( identifier[pf] , identifier[target] =[ literal[string] , literal[string] ],
identifier[ploidy] = identifier[opts] . identifier[ploidy] )
|
def allpaths(args):
"""
%prog allpaths folder1 folder2 ...
Run automated ALLPATHS on list of dirs.
"""
p = OptionParser(allpaths.__doc__)
p.add_option('--ploidy', default='1', choices=('1', '2'), help='Ploidy [default: %default]')
(opts, args) = p.parse_args(args)
if len(args) == 0:
sys.exit(not p.print_help()) # depends on [control=['if'], data=[]]
folders = args
for pf in folders:
if not op.isdir(pf):
continue # depends on [control=['if'], data=[]]
assemble_dir(pf, target=['final.contigs.fasta', 'final.assembly.fasta'], ploidy=opts.ploidy) # depends on [control=['for'], data=['pf']]
|
def parent_after_fork_release():
"""
Call all parent after fork callables, release the lock and print
all prepare and parent callback exceptions.
"""
prepare_exceptions = list(_prepare_call_exceptions)
del _prepare_call_exceptions[:]
exceptions = _call_atfork_list(_parent_call_list)
_fork_lock.release()
_print_exception_list(prepare_exceptions, 'before fork')
_print_exception_list(exceptions, 'after fork from parent')
|
def function[parent_after_fork_release, parameter[]]:
constant[
Call all parent after fork callables, release the lock and print
all prepare and parent callback exceptions.
]
variable[prepare_exceptions] assign[=] call[name[list], parameter[name[_prepare_call_exceptions]]]
<ast.Delete object at 0x7da20c6e56c0>
variable[exceptions] assign[=] call[name[_call_atfork_list], parameter[name[_parent_call_list]]]
call[name[_fork_lock].release, parameter[]]
call[name[_print_exception_list], parameter[name[prepare_exceptions], constant[before fork]]]
call[name[_print_exception_list], parameter[name[exceptions], constant[after fork from parent]]]
|
keyword[def] identifier[parent_after_fork_release] ():
literal[string]
identifier[prepare_exceptions] = identifier[list] ( identifier[_prepare_call_exceptions] )
keyword[del] identifier[_prepare_call_exceptions] [:]
identifier[exceptions] = identifier[_call_atfork_list] ( identifier[_parent_call_list] )
identifier[_fork_lock] . identifier[release] ()
identifier[_print_exception_list] ( identifier[prepare_exceptions] , literal[string] )
identifier[_print_exception_list] ( identifier[exceptions] , literal[string] )
|
def parent_after_fork_release():
"""
Call all parent after fork callables, release the lock and print
all prepare and parent callback exceptions.
"""
prepare_exceptions = list(_prepare_call_exceptions)
del _prepare_call_exceptions[:]
exceptions = _call_atfork_list(_parent_call_list)
_fork_lock.release()
_print_exception_list(prepare_exceptions, 'before fork')
_print_exception_list(exceptions, 'after fork from parent')
|
def segment(self, webvtt, output='', seconds=SECONDS, mpegts=MPEGTS):
"""Segments the captions based on a number of seconds."""
if isinstance(webvtt, str):
# if a string is supplied we parse the file
captions = WebVTT().read(webvtt).captions
elif not self._validate_webvtt(webvtt):
raise InvalidCaptionsError('The captions provided are invalid')
else:
# we expect to have a webvtt object
captions = webvtt.captions
self._total_segments = 0 if not captions else int(ceil(captions[-1].end_in_seconds / seconds))
self._output_folder = output
self._seconds = seconds
self._mpegts = mpegts
output_folder = os.path.join(os.getcwd(), output)
if not os.path.exists(output_folder):
os.makedirs(output_folder)
self._slice_segments(captions)
self._write_segments()
self._write_manifest()
|
def function[segment, parameter[self, webvtt, output, seconds, mpegts]]:
constant[Segments the captions based on a number of seconds.]
if call[name[isinstance], parameter[name[webvtt], name[str]]] begin[:]
variable[captions] assign[=] call[call[name[WebVTT], parameter[]].read, parameter[name[webvtt]]].captions
name[self]._total_segments assign[=] <ast.IfExp object at 0x7da1b0bda8c0>
name[self]._output_folder assign[=] name[output]
name[self]._seconds assign[=] name[seconds]
name[self]._mpegts assign[=] name[mpegts]
variable[output_folder] assign[=] call[name[os].path.join, parameter[call[name[os].getcwd, parameter[]], name[output]]]
if <ast.UnaryOp object at 0x7da1b0b37c70> begin[:]
call[name[os].makedirs, parameter[name[output_folder]]]
call[name[self]._slice_segments, parameter[name[captions]]]
call[name[self]._write_segments, parameter[]]
call[name[self]._write_manifest, parameter[]]
|
keyword[def] identifier[segment] ( identifier[self] , identifier[webvtt] , identifier[output] = literal[string] , identifier[seconds] = identifier[SECONDS] , identifier[mpegts] = identifier[MPEGTS] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[webvtt] , identifier[str] ):
identifier[captions] = identifier[WebVTT] (). identifier[read] ( identifier[webvtt] ). identifier[captions]
keyword[elif] keyword[not] identifier[self] . identifier[_validate_webvtt] ( identifier[webvtt] ):
keyword[raise] identifier[InvalidCaptionsError] ( literal[string] )
keyword[else] :
identifier[captions] = identifier[webvtt] . identifier[captions]
identifier[self] . identifier[_total_segments] = literal[int] keyword[if] keyword[not] identifier[captions] keyword[else] identifier[int] ( identifier[ceil] ( identifier[captions] [- literal[int] ]. identifier[end_in_seconds] / identifier[seconds] ))
identifier[self] . identifier[_output_folder] = identifier[output]
identifier[self] . identifier[_seconds] = identifier[seconds]
identifier[self] . identifier[_mpegts] = identifier[mpegts]
identifier[output_folder] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[getcwd] (), identifier[output] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[output_folder] ):
identifier[os] . identifier[makedirs] ( identifier[output_folder] )
identifier[self] . identifier[_slice_segments] ( identifier[captions] )
identifier[self] . identifier[_write_segments] ()
identifier[self] . identifier[_write_manifest] ()
|
def segment(self, webvtt, output='', seconds=SECONDS, mpegts=MPEGTS):
"""Segments the captions based on a number of seconds."""
if isinstance(webvtt, str):
# if a string is supplied we parse the file
captions = WebVTT().read(webvtt).captions # depends on [control=['if'], data=[]]
elif not self._validate_webvtt(webvtt):
raise InvalidCaptionsError('The captions provided are invalid') # depends on [control=['if'], data=[]]
else:
# we expect to have a webvtt object
captions = webvtt.captions
self._total_segments = 0 if not captions else int(ceil(captions[-1].end_in_seconds / seconds))
self._output_folder = output
self._seconds = seconds
self._mpegts = mpegts
output_folder = os.path.join(os.getcwd(), output)
if not os.path.exists(output_folder):
os.makedirs(output_folder) # depends on [control=['if'], data=[]]
self._slice_segments(captions)
self._write_segments()
self._write_manifest()
|
def set_action(self,action):
"""Set the action of the item.
:Parameters:
- `action`: the new action or `None`.
:Types:
- `action`: `unicode`
"""
if action is None:
if self.xmlnode.hasProp("action"):
self.xmlnode.unsetProp("action")
return
if action not in ("remove","update"):
raise ValueError("Action must be 'update' or 'remove'")
action = unicode(action)
self.xmlnode.setProp("action", action.encode("utf-8"))
|
def function[set_action, parameter[self, action]]:
constant[Set the action of the item.
:Parameters:
- `action`: the new action or `None`.
:Types:
- `action`: `unicode`
]
if compare[name[action] is constant[None]] begin[:]
if call[name[self].xmlnode.hasProp, parameter[constant[action]]] begin[:]
call[name[self].xmlnode.unsetProp, parameter[constant[action]]]
return[None]
if compare[name[action] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da1b00e5b40>, <ast.Constant object at 0x7da1b00e7850>]]] begin[:]
<ast.Raise object at 0x7da1b00e7ee0>
variable[action] assign[=] call[name[unicode], parameter[name[action]]]
call[name[self].xmlnode.setProp, parameter[constant[action], call[name[action].encode, parameter[constant[utf-8]]]]]
|
keyword[def] identifier[set_action] ( identifier[self] , identifier[action] ):
literal[string]
keyword[if] identifier[action] keyword[is] keyword[None] :
keyword[if] identifier[self] . identifier[xmlnode] . identifier[hasProp] ( literal[string] ):
identifier[self] . identifier[xmlnode] . identifier[unsetProp] ( literal[string] )
keyword[return]
keyword[if] identifier[action] keyword[not] keyword[in] ( literal[string] , literal[string] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[action] = identifier[unicode] ( identifier[action] )
identifier[self] . identifier[xmlnode] . identifier[setProp] ( literal[string] , identifier[action] . identifier[encode] ( literal[string] ))
|
def set_action(self, action):
"""Set the action of the item.
:Parameters:
- `action`: the new action or `None`.
:Types:
- `action`: `unicode`
"""
if action is None:
if self.xmlnode.hasProp('action'):
self.xmlnode.unsetProp('action') # depends on [control=['if'], data=[]]
return # depends on [control=['if'], data=[]]
if action not in ('remove', 'update'):
raise ValueError("Action must be 'update' or 'remove'") # depends on [control=['if'], data=[]]
action = unicode(action)
self.xmlnode.setProp('action', action.encode('utf-8'))
|
def note_to_int(note):
"""Convert notes in the form of C, C#, Cb, C##, etc. to an integer in the
range of 0-11.
Throw a NoteFormatError exception if the note format is not recognised.
"""
if is_valid_note(note):
val = _note_dict[note[0]]
else:
raise NoteFormatError("Unknown note format '%s'" % note)
# Check for '#' and 'b' postfixes
for post in note[1:]:
if post == 'b':
val -= 1
elif post == '#':
val += 1
return val % 12
|
def function[note_to_int, parameter[note]]:
constant[Convert notes in the form of C, C#, Cb, C##, etc. to an integer in the
range of 0-11.
Throw a NoteFormatError exception if the note format is not recognised.
]
if call[name[is_valid_note], parameter[name[note]]] begin[:]
variable[val] assign[=] call[name[_note_dict]][call[name[note]][constant[0]]]
for taget[name[post]] in starred[call[name[note]][<ast.Slice object at 0x7da1b2345b70>]] begin[:]
if compare[name[post] equal[==] constant[b]] begin[:]
<ast.AugAssign object at 0x7da1b2344ca0>
return[binary_operation[name[val] <ast.Mod object at 0x7da2590d6920> constant[12]]]
|
keyword[def] identifier[note_to_int] ( identifier[note] ):
literal[string]
keyword[if] identifier[is_valid_note] ( identifier[note] ):
identifier[val] = identifier[_note_dict] [ identifier[note] [ literal[int] ]]
keyword[else] :
keyword[raise] identifier[NoteFormatError] ( literal[string] % identifier[note] )
keyword[for] identifier[post] keyword[in] identifier[note] [ literal[int] :]:
keyword[if] identifier[post] == literal[string] :
identifier[val] -= literal[int]
keyword[elif] identifier[post] == literal[string] :
identifier[val] += literal[int]
keyword[return] identifier[val] % literal[int]
|
def note_to_int(note):
"""Convert notes in the form of C, C#, Cb, C##, etc. to an integer in the
range of 0-11.
Throw a NoteFormatError exception if the note format is not recognised.
"""
if is_valid_note(note):
val = _note_dict[note[0]] # depends on [control=['if'], data=[]]
else:
raise NoteFormatError("Unknown note format '%s'" % note)
# Check for '#' and 'b' postfixes
for post in note[1:]:
if post == 'b':
val -= 1 # depends on [control=['if'], data=[]]
elif post == '#':
val += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['post']]
return val % 12
|
def DeregisterBlockchain():
"""
Remove the default blockchain instance.
"""
Blockchain.SECONDS_PER_BLOCK = 15
Blockchain.DECREMENT_INTERVAL = 2000000
Blockchain.GENERATION_AMOUNT = [8, 7, 6, 5, 4, 3, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
Blockchain._blockchain = None
Blockchain._validators = []
Blockchain._genesis_block = None
Blockchain._instance = None
Blockchain._blockrequests = set()
Blockchain._paused = False
Blockchain.BlockSearchTries = 0
Blockchain.CACHELIM = 4000
Blockchain.CMISSLIM = 5
Blockchain.LOOPTIME = .1
Blockchain.PersistCompleted = Events()
Blockchain.Notify = Events()
Blockchain._instance = None
|
def function[DeregisterBlockchain, parameter[]]:
constant[
Remove the default blockchain instance.
]
name[Blockchain].SECONDS_PER_BLOCK assign[=] constant[15]
name[Blockchain].DECREMENT_INTERVAL assign[=] constant[2000000]
name[Blockchain].GENERATION_AMOUNT assign[=] list[[<ast.Constant object at 0x7da1b1dd27d0>, <ast.Constant object at 0x7da1b1dd38b0>, <ast.Constant object at 0x7da1b1dd2a40>, <ast.Constant object at 0x7da1b1dd0b80>, <ast.Constant object at 0x7da1b1dd3a30>, <ast.Constant object at 0x7da1b1dd1570>, <ast.Constant object at 0x7da1b1dd3af0>, <ast.Constant object at 0x7da1b1dd39d0>, <ast.Constant object at 0x7da1b1dd15a0>, <ast.Constant object at 0x7da1b1dd0820>, <ast.Constant object at 0x7da1b1dd1210>, <ast.Constant object at 0x7da1b1dd12a0>, <ast.Constant object at 0x7da1b1dd2350>, <ast.Constant object at 0x7da1b1dd3850>, <ast.Constant object at 0x7da1b1dd29e0>, <ast.Constant object at 0x7da1b1dd00d0>, <ast.Constant object at 0x7da1b1dd2320>, <ast.Constant object at 0x7da1b1dd0bb0>, <ast.Constant object at 0x7da1b1dd0b50>, <ast.Constant object at 0x7da1b1dd0100>, <ast.Constant object at 0x7da1b1dd11e0>, <ast.Constant object at 0x7da1b1dd0850>]]
name[Blockchain]._blockchain assign[=] constant[None]
name[Blockchain]._validators assign[=] list[[]]
name[Blockchain]._genesis_block assign[=] constant[None]
name[Blockchain]._instance assign[=] constant[None]
name[Blockchain]._blockrequests assign[=] call[name[set], parameter[]]
name[Blockchain]._paused assign[=] constant[False]
name[Blockchain].BlockSearchTries assign[=] constant[0]
name[Blockchain].CACHELIM assign[=] constant[4000]
name[Blockchain].CMISSLIM assign[=] constant[5]
name[Blockchain].LOOPTIME assign[=] constant[0.1]
name[Blockchain].PersistCompleted assign[=] call[name[Events], parameter[]]
name[Blockchain].Notify assign[=] call[name[Events], parameter[]]
name[Blockchain]._instance assign[=] constant[None]
|
keyword[def] identifier[DeregisterBlockchain] ():
literal[string]
identifier[Blockchain] . identifier[SECONDS_PER_BLOCK] = literal[int]
identifier[Blockchain] . identifier[DECREMENT_INTERVAL] = literal[int]
identifier[Blockchain] . identifier[GENERATION_AMOUNT] =[ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ]
identifier[Blockchain] . identifier[_blockchain] = keyword[None]
identifier[Blockchain] . identifier[_validators] =[]
identifier[Blockchain] . identifier[_genesis_block] = keyword[None]
identifier[Blockchain] . identifier[_instance] = keyword[None]
identifier[Blockchain] . identifier[_blockrequests] = identifier[set] ()
identifier[Blockchain] . identifier[_paused] = keyword[False]
identifier[Blockchain] . identifier[BlockSearchTries] = literal[int]
identifier[Blockchain] . identifier[CACHELIM] = literal[int]
identifier[Blockchain] . identifier[CMISSLIM] = literal[int]
identifier[Blockchain] . identifier[LOOPTIME] = literal[int]
identifier[Blockchain] . identifier[PersistCompleted] = identifier[Events] ()
identifier[Blockchain] . identifier[Notify] = identifier[Events] ()
identifier[Blockchain] . identifier[_instance] = keyword[None]
|
def DeregisterBlockchain():
"""
Remove the default blockchain instance.
"""
Blockchain.SECONDS_PER_BLOCK = 15
Blockchain.DECREMENT_INTERVAL = 2000000
Blockchain.GENERATION_AMOUNT = [8, 7, 6, 5, 4, 3, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
Blockchain._blockchain = None
Blockchain._validators = []
Blockchain._genesis_block = None
Blockchain._instance = None
Blockchain._blockrequests = set()
Blockchain._paused = False
Blockchain.BlockSearchTries = 0
Blockchain.CACHELIM = 4000
Blockchain.CMISSLIM = 5
Blockchain.LOOPTIME = 0.1
Blockchain.PersistCompleted = Events()
Blockchain.Notify = Events()
Blockchain._instance = None
|
def build():
"""
builds the cloud_init script
"""
try:
cloud_config = CloudConfig()
config_data = cloud_config.config_data('cluster')
cloud_init = CloudInit()
print(cloud_init.build(config_data))
except CloudComposeException as ex:
print(ex)
|
def function[build, parameter[]]:
constant[
builds the cloud_init script
]
<ast.Try object at 0x7da20c6c4340>
|
keyword[def] identifier[build] ():
literal[string]
keyword[try] :
identifier[cloud_config] = identifier[CloudConfig] ()
identifier[config_data] = identifier[cloud_config] . identifier[config_data] ( literal[string] )
identifier[cloud_init] = identifier[CloudInit] ()
identifier[print] ( identifier[cloud_init] . identifier[build] ( identifier[config_data] ))
keyword[except] identifier[CloudComposeException] keyword[as] identifier[ex] :
identifier[print] ( identifier[ex] )
|
def build():
"""
builds the cloud_init script
"""
try:
cloud_config = CloudConfig()
config_data = cloud_config.config_data('cluster')
cloud_init = CloudInit()
print(cloud_init.build(config_data)) # depends on [control=['try'], data=[]]
except CloudComposeException as ex:
print(ex) # depends on [control=['except'], data=['ex']]
|
def track_metric(self, name, value, type=None, count=None, min=None, max=None, std_dev=None, properties=None):
"""Send information about a single metric data point that was captured for the application.
Args:
name (str). the name of the metric that was captured.\n
value (float). the value of the metric that was captured.\n
type (:class:`channel.contracts.DataPointType`). the type of the metric. (defaults to: :func:`channel.contracts.DataPointType.aggregation`)\n
count (int). the number of metrics that were aggregated into this data point. (defaults to: None)\n
min (float). the minimum of all metrics collected that were aggregated into this data point. (defaults to: None)\n
max (float). the maximum of all metrics collected that were aggregated into this data point. (defaults to: None)\n
std_dev (float). the standard deviation of all metrics collected that were aggregated into this data point. (defaults to: None)\n
properties (dict). the set of custom properties the client wants attached to this data item. (defaults to: None)
"""
dataPoint = channel.contracts.DataPoint()
dataPoint.name = name or NULL_CONSTANT_STRING
dataPoint.value = value or 0
dataPoint.kind = type or channel.contracts.DataPointType.aggregation
dataPoint.count = count
dataPoint.min = min
dataPoint.max = max
dataPoint.std_dev = std_dev
data = channel.contracts.MetricData()
data.metrics.append(dataPoint)
if properties:
data.properties = properties
self.track(data, self._context)
|
def function[track_metric, parameter[self, name, value, type, count, min, max, std_dev, properties]]:
constant[Send information about a single metric data point that was captured for the application.
Args:
name (str). the name of the metric that was captured.
value (float). the value of the metric that was captured.
type (:class:`channel.contracts.DataPointType`). the type of the metric. (defaults to: :func:`channel.contracts.DataPointType.aggregation`)
count (int). the number of metrics that were aggregated into this data point. (defaults to: None)
min (float). the minimum of all metrics collected that were aggregated into this data point. (defaults to: None)
max (float). the maximum of all metrics collected that were aggregated into this data point. (defaults to: None)
std_dev (float). the standard deviation of all metrics collected that were aggregated into this data point. (defaults to: None)
properties (dict). the set of custom properties the client wants attached to this data item. (defaults to: None)
]
variable[dataPoint] assign[=] call[name[channel].contracts.DataPoint, parameter[]]
name[dataPoint].name assign[=] <ast.BoolOp object at 0x7da1b102b910>
name[dataPoint].value assign[=] <ast.BoolOp object at 0x7da1b102b220>
name[dataPoint].kind assign[=] <ast.BoolOp object at 0x7da1b10292d0>
name[dataPoint].count assign[=] name[count]
name[dataPoint].min assign[=] name[min]
name[dataPoint].max assign[=] name[max]
name[dataPoint].std_dev assign[=] name[std_dev]
variable[data] assign[=] call[name[channel].contracts.MetricData, parameter[]]
call[name[data].metrics.append, parameter[name[dataPoint]]]
if name[properties] begin[:]
name[data].properties assign[=] name[properties]
call[name[self].track, parameter[name[data], name[self]._context]]
|
keyword[def] identifier[track_metric] ( identifier[self] , identifier[name] , identifier[value] , identifier[type] = keyword[None] , identifier[count] = keyword[None] , identifier[min] = keyword[None] , identifier[max] = keyword[None] , identifier[std_dev] = keyword[None] , identifier[properties] = keyword[None] ):
literal[string]
identifier[dataPoint] = identifier[channel] . identifier[contracts] . identifier[DataPoint] ()
identifier[dataPoint] . identifier[name] = identifier[name] keyword[or] identifier[NULL_CONSTANT_STRING]
identifier[dataPoint] . identifier[value] = identifier[value] keyword[or] literal[int]
identifier[dataPoint] . identifier[kind] = identifier[type] keyword[or] identifier[channel] . identifier[contracts] . identifier[DataPointType] . identifier[aggregation]
identifier[dataPoint] . identifier[count] = identifier[count]
identifier[dataPoint] . identifier[min] = identifier[min]
identifier[dataPoint] . identifier[max] = identifier[max]
identifier[dataPoint] . identifier[std_dev] = identifier[std_dev]
identifier[data] = identifier[channel] . identifier[contracts] . identifier[MetricData] ()
identifier[data] . identifier[metrics] . identifier[append] ( identifier[dataPoint] )
keyword[if] identifier[properties] :
identifier[data] . identifier[properties] = identifier[properties]
identifier[self] . identifier[track] ( identifier[data] , identifier[self] . identifier[_context] )
|
def track_metric(self, name, value, type=None, count=None, min=None, max=None, std_dev=None, properties=None):
"""Send information about a single metric data point that was captured for the application.
Args:
name (str). the name of the metric that was captured.
value (float). the value of the metric that was captured.
type (:class:`channel.contracts.DataPointType`). the type of the metric. (defaults to: :func:`channel.contracts.DataPointType.aggregation`)
count (int). the number of metrics that were aggregated into this data point. (defaults to: None)
min (float). the minimum of all metrics collected that were aggregated into this data point. (defaults to: None)
max (float). the maximum of all metrics collected that were aggregated into this data point. (defaults to: None)
std_dev (float). the standard deviation of all metrics collected that were aggregated into this data point. (defaults to: None)
properties (dict). the set of custom properties the client wants attached to this data item. (defaults to: None)
"""
dataPoint = channel.contracts.DataPoint()
dataPoint.name = name or NULL_CONSTANT_STRING
dataPoint.value = value or 0
dataPoint.kind = type or channel.contracts.DataPointType.aggregation
dataPoint.count = count
dataPoint.min = min
dataPoint.max = max
dataPoint.std_dev = std_dev
data = channel.contracts.MetricData()
data.metrics.append(dataPoint)
if properties:
data.properties = properties # depends on [control=['if'], data=[]]
self.track(data, self._context)
|
def bump(component='patch', exact=None):
# type: (str, str) -> Tuple[str, str]
""" Bump the given version component.
Args:
component (str):
What part of the version should be bumped. Can be one of:
- major
- minor
- patch
exact (str):
The exact version that should be set instead of bumping the current
one.
Returns:
tuple(str, str): A tuple of old and bumped version.
"""
old_ver = current()
if exact is None:
new_ver = _bump_version(old_ver, component)
else:
new_ver = exact
write(new_ver)
return old_ver, new_ver
|
def function[bump, parameter[component, exact]]:
constant[ Bump the given version component.
Args:
component (str):
What part of the version should be bumped. Can be one of:
- major
- minor
- patch
exact (str):
The exact version that should be set instead of bumping the current
one.
Returns:
tuple(str, str): A tuple of old and bumped version.
]
variable[old_ver] assign[=] call[name[current], parameter[]]
if compare[name[exact] is constant[None]] begin[:]
variable[new_ver] assign[=] call[name[_bump_version], parameter[name[old_ver], name[component]]]
call[name[write], parameter[name[new_ver]]]
return[tuple[[<ast.Name object at 0x7da1b10adb40>, <ast.Name object at 0x7da1b10ad960>]]]
|
keyword[def] identifier[bump] ( identifier[component] = literal[string] , identifier[exact] = keyword[None] ):
literal[string]
identifier[old_ver] = identifier[current] ()
keyword[if] identifier[exact] keyword[is] keyword[None] :
identifier[new_ver] = identifier[_bump_version] ( identifier[old_ver] , identifier[component] )
keyword[else] :
identifier[new_ver] = identifier[exact]
identifier[write] ( identifier[new_ver] )
keyword[return] identifier[old_ver] , identifier[new_ver]
|
def bump(component='patch', exact=None):
# type: (str, str) -> Tuple[str, str]
' Bump the given version component.\n\n Args:\n component (str):\n What part of the version should be bumped. Can be one of:\n\n - major\n - minor\n - patch\n\n exact (str):\n The exact version that should be set instead of bumping the current\n one.\n\n Returns:\n tuple(str, str): A tuple of old and bumped version.\n '
old_ver = current()
if exact is None:
new_ver = _bump_version(old_ver, component) # depends on [control=['if'], data=[]]
else:
new_ver = exact
write(new_ver)
return (old_ver, new_ver)
|
def global_step(device=''):
"""Returns the global step variable.
Args:
device: Optional device to place the variable. It can be an string or a
function that is called to get the device for the variable.
Returns:
the tensor representing the global step variable.
"""
global_step_ref = tf.get_collection(tf.GraphKeys.GLOBAL_STEP)
if global_step_ref:
return global_step_ref[0]
else:
collections = [
VARIABLES_TO_RESTORE,
tf.GraphKeys.GLOBAL_VARIABLES,
tf.GraphKeys.GLOBAL_STEP,
]
# Get the device for the variable.
with tf.device(variable_device(device, 'global_step')):
return tf.get_variable('global_step', shape=[], dtype=tf.int64,
initializer=tf.zeros_initializer(),
trainable=False, collections=collections)
|
def function[global_step, parameter[device]]:
constant[Returns the global step variable.
Args:
device: Optional device to place the variable. It can be an string or a
function that is called to get the device for the variable.
Returns:
the tensor representing the global step variable.
]
variable[global_step_ref] assign[=] call[name[tf].get_collection, parameter[name[tf].GraphKeys.GLOBAL_STEP]]
if name[global_step_ref] begin[:]
return[call[name[global_step_ref]][constant[0]]]
|
keyword[def] identifier[global_step] ( identifier[device] = literal[string] ):
literal[string]
identifier[global_step_ref] = identifier[tf] . identifier[get_collection] ( identifier[tf] . identifier[GraphKeys] . identifier[GLOBAL_STEP] )
keyword[if] identifier[global_step_ref] :
keyword[return] identifier[global_step_ref] [ literal[int] ]
keyword[else] :
identifier[collections] =[
identifier[VARIABLES_TO_RESTORE] ,
identifier[tf] . identifier[GraphKeys] . identifier[GLOBAL_VARIABLES] ,
identifier[tf] . identifier[GraphKeys] . identifier[GLOBAL_STEP] ,
]
keyword[with] identifier[tf] . identifier[device] ( identifier[variable_device] ( identifier[device] , literal[string] )):
keyword[return] identifier[tf] . identifier[get_variable] ( literal[string] , identifier[shape] =[], identifier[dtype] = identifier[tf] . identifier[int64] ,
identifier[initializer] = identifier[tf] . identifier[zeros_initializer] (),
identifier[trainable] = keyword[False] , identifier[collections] = identifier[collections] )
|
def global_step(device=''):
"""Returns the global step variable.
Args:
device: Optional device to place the variable. It can be an string or a
function that is called to get the device for the variable.
Returns:
the tensor representing the global step variable.
"""
global_step_ref = tf.get_collection(tf.GraphKeys.GLOBAL_STEP)
if global_step_ref:
return global_step_ref[0] # depends on [control=['if'], data=[]]
else:
collections = [VARIABLES_TO_RESTORE, tf.GraphKeys.GLOBAL_VARIABLES, tf.GraphKeys.GLOBAL_STEP]
# Get the device for the variable.
with tf.device(variable_device(device, 'global_step')):
return tf.get_variable('global_step', shape=[], dtype=tf.int64, initializer=tf.zeros_initializer(), trainable=False, collections=collections) # depends on [control=['with'], data=[]]
|
def is_manifestation_model(instance, attribute, value):
"""Must include a ``manifestationOfWork`` key."""
instance_name = instance.__class__.__name__
is_creation_model(instance, attribute, value)
manifestation_of = value.get('manifestationOfWork')
if not isinstance(manifestation_of, str):
err_str = ("'manifestationOfWork' must be given as a string in the "
"'{attr}' parameter of a '{cls}'. Given "
"'{value}'").format(attr=attribute.name,
cls=instance_name,
value=manifestation_of)
print(err_str)
|
def function[is_manifestation_model, parameter[instance, attribute, value]]:
constant[Must include a ``manifestationOfWork`` key.]
variable[instance_name] assign[=] name[instance].__class__.__name__
call[name[is_creation_model], parameter[name[instance], name[attribute], name[value]]]
variable[manifestation_of] assign[=] call[name[value].get, parameter[constant[manifestationOfWork]]]
if <ast.UnaryOp object at 0x7da1b26addb0> begin[:]
variable[err_str] assign[=] call[constant['manifestationOfWork' must be given as a string in the '{attr}' parameter of a '{cls}'. Given '{value}'].format, parameter[]]
call[name[print], parameter[name[err_str]]]
|
keyword[def] identifier[is_manifestation_model] ( identifier[instance] , identifier[attribute] , identifier[value] ):
literal[string]
identifier[instance_name] = identifier[instance] . identifier[__class__] . identifier[__name__]
identifier[is_creation_model] ( identifier[instance] , identifier[attribute] , identifier[value] )
identifier[manifestation_of] = identifier[value] . identifier[get] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[manifestation_of] , identifier[str] ):
identifier[err_str] =( literal[string]
literal[string]
literal[string] ). identifier[format] ( identifier[attr] = identifier[attribute] . identifier[name] ,
identifier[cls] = identifier[instance_name] ,
identifier[value] = identifier[manifestation_of] )
identifier[print] ( identifier[err_str] )
|
def is_manifestation_model(instance, attribute, value):
"""Must include a ``manifestationOfWork`` key."""
instance_name = instance.__class__.__name__
is_creation_model(instance, attribute, value)
manifestation_of = value.get('manifestationOfWork')
if not isinstance(manifestation_of, str):
err_str = "'manifestationOfWork' must be given as a string in the '{attr}' parameter of a '{cls}'. Given '{value}'".format(attr=attribute.name, cls=instance_name, value=manifestation_of)
print(err_str) # depends on [control=['if'], data=[]]
|
def _sync_children(self, task_specs, state=MAYBE):
"""
This method syncs up the task's children with the given list of task
specs. In other words::
- Add one child for each given TaskSpec, unless that child already
exists.
- Remove all children for which there is no spec in the given list,
unless it is a "triggered" task.
.. note::
It is an error if the task has a non-predicted child that is
not given in the TaskSpecs.
:type task_specs: list(TaskSpec)
:param task_specs: The list of task specs that may become children.
:type state: integer
:param state: The bitmask of states for the new children.
"""
LOG.debug("Updating children for %s" % self.get_name())
if task_specs is None:
raise ValueError('"task_specs" argument is None')
add = task_specs[:]
# Create a list of all children that are no longer needed.
remove = []
for child in self.children:
# Triggered tasks are never removed.
if child.triggered:
continue
# Check whether the task needs to be removed.
if child.task_spec in add:
add.remove(child.task_spec)
continue
# Non-predicted tasks must not be removed, so they HAVE to be in
# the given task spec list.
if child._is_definite():
raise WorkflowException(self.task_spec,
'removal of non-predicted child %s' %
repr(child))
remove.append(child)
# Remove and add the children accordingly.
for child in remove:
self.children.remove(child)
for task_spec in add:
self._add_child(task_spec, state)
|
def function[_sync_children, parameter[self, task_specs, state]]:
constant[
This method syncs up the task's children with the given list of task
specs. In other words::
- Add one child for each given TaskSpec, unless that child already
exists.
- Remove all children for which there is no spec in the given list,
unless it is a "triggered" task.
.. note::
It is an error if the task has a non-predicted child that is
not given in the TaskSpecs.
:type task_specs: list(TaskSpec)
:param task_specs: The list of task specs that may become children.
:type state: integer
:param state: The bitmask of states for the new children.
]
call[name[LOG].debug, parameter[binary_operation[constant[Updating children for %s] <ast.Mod object at 0x7da2590d6920> call[name[self].get_name, parameter[]]]]]
if compare[name[task_specs] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b01c2920>
variable[add] assign[=] call[name[task_specs]][<ast.Slice object at 0x7da1b01c3e50>]
variable[remove] assign[=] list[[]]
for taget[name[child]] in starred[name[self].children] begin[:]
if name[child].triggered begin[:]
continue
if compare[name[child].task_spec in name[add]] begin[:]
call[name[add].remove, parameter[name[child].task_spec]]
continue
if call[name[child]._is_definite, parameter[]] begin[:]
<ast.Raise object at 0x7da1b01c0460>
call[name[remove].append, parameter[name[child]]]
for taget[name[child]] in starred[name[remove]] begin[:]
call[name[self].children.remove, parameter[name[child]]]
for taget[name[task_spec]] in starred[name[add]] begin[:]
call[name[self]._add_child, parameter[name[task_spec], name[state]]]
|
keyword[def] identifier[_sync_children] ( identifier[self] , identifier[task_specs] , identifier[state] = identifier[MAYBE] ):
literal[string]
identifier[LOG] . identifier[debug] ( literal[string] % identifier[self] . identifier[get_name] ())
keyword[if] identifier[task_specs] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[add] = identifier[task_specs] [:]
identifier[remove] =[]
keyword[for] identifier[child] keyword[in] identifier[self] . identifier[children] :
keyword[if] identifier[child] . identifier[triggered] :
keyword[continue]
keyword[if] identifier[child] . identifier[task_spec] keyword[in] identifier[add] :
identifier[add] . identifier[remove] ( identifier[child] . identifier[task_spec] )
keyword[continue]
keyword[if] identifier[child] . identifier[_is_definite] ():
keyword[raise] identifier[WorkflowException] ( identifier[self] . identifier[task_spec] ,
literal[string] %
identifier[repr] ( identifier[child] ))
identifier[remove] . identifier[append] ( identifier[child] )
keyword[for] identifier[child] keyword[in] identifier[remove] :
identifier[self] . identifier[children] . identifier[remove] ( identifier[child] )
keyword[for] identifier[task_spec] keyword[in] identifier[add] :
identifier[self] . identifier[_add_child] ( identifier[task_spec] , identifier[state] )
|
def _sync_children(self, task_specs, state=MAYBE):
"""
This method syncs up the task's children with the given list of task
specs. In other words::
- Add one child for each given TaskSpec, unless that child already
exists.
- Remove all children for which there is no spec in the given list,
unless it is a "triggered" task.
.. note::
It is an error if the task has a non-predicted child that is
not given in the TaskSpecs.
:type task_specs: list(TaskSpec)
:param task_specs: The list of task specs that may become children.
:type state: integer
:param state: The bitmask of states for the new children.
"""
LOG.debug('Updating children for %s' % self.get_name())
if task_specs is None:
raise ValueError('"task_specs" argument is None') # depends on [control=['if'], data=[]]
add = task_specs[:]
# Create a list of all children that are no longer needed.
remove = []
for child in self.children:
# Triggered tasks are never removed.
if child.triggered:
continue # depends on [control=['if'], data=[]]
# Check whether the task needs to be removed.
if child.task_spec in add:
add.remove(child.task_spec)
continue # depends on [control=['if'], data=['add']]
# Non-predicted tasks must not be removed, so they HAVE to be in
# the given task spec list.
if child._is_definite():
raise WorkflowException(self.task_spec, 'removal of non-predicted child %s' % repr(child)) # depends on [control=['if'], data=[]]
remove.append(child) # depends on [control=['for'], data=['child']]
# Remove and add the children accordingly.
for child in remove:
self.children.remove(child) # depends on [control=['for'], data=['child']]
for task_spec in add:
self._add_child(task_spec, state) # depends on [control=['for'], data=['task_spec']]
|
def handleMatch(self, m):
username = self.unescape(m.group(2))
"""Makesure `username` is registered and actived."""
if MARTOR_ENABLE_CONFIGS['mention'] == 'true':
if username in [u.username for u in User.objects.exclude(is_active=False)]:
url = '{0}{1}/'.format(MARTOR_MARKDOWN_BASE_MENTION_URL, username)
el = markdown.util.etree.Element('a')
el.set('href', url)
el.set('class', 'direct-mention-link')
el.text = markdown.util.AtomicString('@' + username)
return el
|
def function[handleMatch, parameter[self, m]]:
variable[username] assign[=] call[name[self].unescape, parameter[call[name[m].group, parameter[constant[2]]]]]
constant[Makesure `username` is registered and actived.]
if compare[call[name[MARTOR_ENABLE_CONFIGS]][constant[mention]] equal[==] constant[true]] begin[:]
if compare[name[username] in <ast.ListComp object at 0x7da207f9be50>] begin[:]
variable[url] assign[=] call[constant[{0}{1}/].format, parameter[name[MARTOR_MARKDOWN_BASE_MENTION_URL], name[username]]]
variable[el] assign[=] call[name[markdown].util.etree.Element, parameter[constant[a]]]
call[name[el].set, parameter[constant[href], name[url]]]
call[name[el].set, parameter[constant[class], constant[direct-mention-link]]]
name[el].text assign[=] call[name[markdown].util.AtomicString, parameter[binary_operation[constant[@] + name[username]]]]
return[name[el]]
|
keyword[def] identifier[handleMatch] ( identifier[self] , identifier[m] ):
identifier[username] = identifier[self] . identifier[unescape] ( identifier[m] . identifier[group] ( literal[int] ))
literal[string]
keyword[if] identifier[MARTOR_ENABLE_CONFIGS] [ literal[string] ]== literal[string] :
keyword[if] identifier[username] keyword[in] [ identifier[u] . identifier[username] keyword[for] identifier[u] keyword[in] identifier[User] . identifier[objects] . identifier[exclude] ( identifier[is_active] = keyword[False] )]:
identifier[url] = literal[string] . identifier[format] ( identifier[MARTOR_MARKDOWN_BASE_MENTION_URL] , identifier[username] )
identifier[el] = identifier[markdown] . identifier[util] . identifier[etree] . identifier[Element] ( literal[string] )
identifier[el] . identifier[set] ( literal[string] , identifier[url] )
identifier[el] . identifier[set] ( literal[string] , literal[string] )
identifier[el] . identifier[text] = identifier[markdown] . identifier[util] . identifier[AtomicString] ( literal[string] + identifier[username] )
keyword[return] identifier[el]
|
def handleMatch(self, m):
username = self.unescape(m.group(2))
'Makesure `username` is registered and actived.'
if MARTOR_ENABLE_CONFIGS['mention'] == 'true':
if username in [u.username for u in User.objects.exclude(is_active=False)]:
url = '{0}{1}/'.format(MARTOR_MARKDOWN_BASE_MENTION_URL, username)
el = markdown.util.etree.Element('a')
el.set('href', url)
el.set('class', 'direct-mention-link')
el.text = markdown.util.AtomicString('@' + username)
return el # depends on [control=['if'], data=['username']] # depends on [control=['if'], data=[]]
|
def adjustSize(self):
"""
Adjusts the size of this popup to best fit the new widget size.
"""
widget = self.centralWidget()
if widget is None:
super(XPopupWidget, self).adjustSize()
return
widget.adjustSize()
hint = widget.minimumSizeHint()
size = widget.minimumSize()
width = max(size.width(), hint.width())
height = max(size.height(), hint.height())
width += 20
height += 20
if self._buttonBoxVisible:
height += self.buttonBox().height() + 10
if self._titleBarVisible:
height += max(self._dialogButton.height(),
self._closeButton.height()) + 10
curr_w = self.width()
curr_h = self.height()
# determine if we need to move based on our anchor
anchor = self.anchor()
if anchor & (self.Anchor.LeftBottom | self.Anchor.RightBottom | \
self.Anchor.BottomLeft | self.Anchor.BottomCenter | \
self.Anchor.BottomRight):
delta_y = height - curr_h
elif anchor & (self.Anchor.LeftCenter | self.Anchor.RightCenter):
delta_y = (height - curr_h) / 2
else:
delta_y = 0
if anchor & (self.Anchor.RightTop | self.Anchor.RightCenter | \
self.Anchor.RightTop | self.Anchor.TopRight):
delta_x = width - curr_w
elif anchor & (self.Anchor.TopCenter | self.Anchor.BottomCenter):
delta_x = (width - curr_w) / 2
else:
delta_x = 0
self.setMinimumSize(width, height)
self.resize(width, height)
pos = self.pos()
pos.setX(pos.x() - delta_x)
pos.setY(pos.y() - delta_y)
self.move(pos)
|
def function[adjustSize, parameter[self]]:
constant[
Adjusts the size of this popup to best fit the new widget size.
]
variable[widget] assign[=] call[name[self].centralWidget, parameter[]]
if compare[name[widget] is constant[None]] begin[:]
call[call[name[super], parameter[name[XPopupWidget], name[self]]].adjustSize, parameter[]]
return[None]
call[name[widget].adjustSize, parameter[]]
variable[hint] assign[=] call[name[widget].minimumSizeHint, parameter[]]
variable[size] assign[=] call[name[widget].minimumSize, parameter[]]
variable[width] assign[=] call[name[max], parameter[call[name[size].width, parameter[]], call[name[hint].width, parameter[]]]]
variable[height] assign[=] call[name[max], parameter[call[name[size].height, parameter[]], call[name[hint].height, parameter[]]]]
<ast.AugAssign object at 0x7da20cabf700>
<ast.AugAssign object at 0x7da20cabe0e0>
if name[self]._buttonBoxVisible begin[:]
<ast.AugAssign object at 0x7da20cabdf90>
if name[self]._titleBarVisible begin[:]
<ast.AugAssign object at 0x7da20cabe5f0>
variable[curr_w] assign[=] call[name[self].width, parameter[]]
variable[curr_h] assign[=] call[name[self].height, parameter[]]
variable[anchor] assign[=] call[name[self].anchor, parameter[]]
if binary_operation[name[anchor] <ast.BitAnd object at 0x7da2590d6b60> binary_operation[binary_operation[binary_operation[binary_operation[name[self].Anchor.LeftBottom <ast.BitOr object at 0x7da2590d6aa0> name[self].Anchor.RightBottom] <ast.BitOr object at 0x7da2590d6aa0> name[self].Anchor.BottomLeft] <ast.BitOr object at 0x7da2590d6aa0> name[self].Anchor.BottomCenter] <ast.BitOr object at 0x7da2590d6aa0> name[self].Anchor.BottomRight]] begin[:]
variable[delta_y] assign[=] binary_operation[name[height] - name[curr_h]]
if binary_operation[name[anchor] <ast.BitAnd object at 0x7da2590d6b60> binary_operation[binary_operation[binary_operation[name[self].Anchor.RightTop <ast.BitOr object at 0x7da2590d6aa0> name[self].Anchor.RightCenter] <ast.BitOr object at 0x7da2590d6aa0> name[self].Anchor.RightTop] <ast.BitOr object at 0x7da2590d6aa0> name[self].Anchor.TopRight]] begin[:]
variable[delta_x] assign[=] binary_operation[name[width] - name[curr_w]]
call[name[self].setMinimumSize, parameter[name[width], name[height]]]
call[name[self].resize, parameter[name[width], name[height]]]
variable[pos] assign[=] call[name[self].pos, parameter[]]
call[name[pos].setX, parameter[binary_operation[call[name[pos].x, parameter[]] - name[delta_x]]]]
call[name[pos].setY, parameter[binary_operation[call[name[pos].y, parameter[]] - name[delta_y]]]]
call[name[self].move, parameter[name[pos]]]
|
keyword[def] identifier[adjustSize] ( identifier[self] ):
literal[string]
identifier[widget] = identifier[self] . identifier[centralWidget] ()
keyword[if] identifier[widget] keyword[is] keyword[None] :
identifier[super] ( identifier[XPopupWidget] , identifier[self] ). identifier[adjustSize] ()
keyword[return]
identifier[widget] . identifier[adjustSize] ()
identifier[hint] = identifier[widget] . identifier[minimumSizeHint] ()
identifier[size] = identifier[widget] . identifier[minimumSize] ()
identifier[width] = identifier[max] ( identifier[size] . identifier[width] (), identifier[hint] . identifier[width] ())
identifier[height] = identifier[max] ( identifier[size] . identifier[height] (), identifier[hint] . identifier[height] ())
identifier[width] += literal[int]
identifier[height] += literal[int]
keyword[if] identifier[self] . identifier[_buttonBoxVisible] :
identifier[height] += identifier[self] . identifier[buttonBox] (). identifier[height] ()+ literal[int]
keyword[if] identifier[self] . identifier[_titleBarVisible] :
identifier[height] += identifier[max] ( identifier[self] . identifier[_dialogButton] . identifier[height] (),
identifier[self] . identifier[_closeButton] . identifier[height] ())+ literal[int]
identifier[curr_w] = identifier[self] . identifier[width] ()
identifier[curr_h] = identifier[self] . identifier[height] ()
identifier[anchor] = identifier[self] . identifier[anchor] ()
keyword[if] identifier[anchor] &( identifier[self] . identifier[Anchor] . identifier[LeftBottom] | identifier[self] . identifier[Anchor] . identifier[RightBottom] | identifier[self] . identifier[Anchor] . identifier[BottomLeft] | identifier[self] . identifier[Anchor] . identifier[BottomCenter] | identifier[self] . identifier[Anchor] . identifier[BottomRight] ):
identifier[delta_y] = identifier[height] - identifier[curr_h]
keyword[elif] identifier[anchor] &( identifier[self] . identifier[Anchor] . identifier[LeftCenter] | identifier[self] . identifier[Anchor] . identifier[RightCenter] ):
identifier[delta_y] =( identifier[height] - identifier[curr_h] )/ literal[int]
keyword[else] :
identifier[delta_y] = literal[int]
keyword[if] identifier[anchor] &( identifier[self] . identifier[Anchor] . identifier[RightTop] | identifier[self] . identifier[Anchor] . identifier[RightCenter] | identifier[self] . identifier[Anchor] . identifier[RightTop] | identifier[self] . identifier[Anchor] . identifier[TopRight] ):
identifier[delta_x] = identifier[width] - identifier[curr_w]
keyword[elif] identifier[anchor] &( identifier[self] . identifier[Anchor] . identifier[TopCenter] | identifier[self] . identifier[Anchor] . identifier[BottomCenter] ):
identifier[delta_x] =( identifier[width] - identifier[curr_w] )/ literal[int]
keyword[else] :
identifier[delta_x] = literal[int]
identifier[self] . identifier[setMinimumSize] ( identifier[width] , identifier[height] )
identifier[self] . identifier[resize] ( identifier[width] , identifier[height] )
identifier[pos] = identifier[self] . identifier[pos] ()
identifier[pos] . identifier[setX] ( identifier[pos] . identifier[x] ()- identifier[delta_x] )
identifier[pos] . identifier[setY] ( identifier[pos] . identifier[y] ()- identifier[delta_y] )
identifier[self] . identifier[move] ( identifier[pos] )
|
def adjustSize(self):
"""
Adjusts the size of this popup to best fit the new widget size.
"""
widget = self.centralWidget()
if widget is None:
super(XPopupWidget, self).adjustSize()
return # depends on [control=['if'], data=[]]
widget.adjustSize()
hint = widget.minimumSizeHint()
size = widget.minimumSize()
width = max(size.width(), hint.width())
height = max(size.height(), hint.height())
width += 20
height += 20
if self._buttonBoxVisible:
height += self.buttonBox().height() + 10 # depends on [control=['if'], data=[]]
if self._titleBarVisible:
height += max(self._dialogButton.height(), self._closeButton.height()) + 10 # depends on [control=['if'], data=[]]
curr_w = self.width()
curr_h = self.height() # determine if we need to move based on our anchor
anchor = self.anchor()
if anchor & (self.Anchor.LeftBottom | self.Anchor.RightBottom | self.Anchor.BottomLeft | self.Anchor.BottomCenter | self.Anchor.BottomRight):
delta_y = height - curr_h # depends on [control=['if'], data=[]]
elif anchor & (self.Anchor.LeftCenter | self.Anchor.RightCenter):
delta_y = (height - curr_h) / 2 # depends on [control=['if'], data=[]]
else:
delta_y = 0
if anchor & (self.Anchor.RightTop | self.Anchor.RightCenter | self.Anchor.RightTop | self.Anchor.TopRight):
delta_x = width - curr_w # depends on [control=['if'], data=[]]
elif anchor & (self.Anchor.TopCenter | self.Anchor.BottomCenter):
delta_x = (width - curr_w) / 2 # depends on [control=['if'], data=[]]
else:
delta_x = 0
self.setMinimumSize(width, height)
self.resize(width, height)
pos = self.pos()
pos.setX(pos.x() - delta_x)
pos.setY(pos.y() - delta_y)
self.move(pos)
|
def DeregisterPlugin(cls, plugin_class):
"""Deregisters an preprocess plugin class.
Args:
plugin_class (type): preprocess plugin class.
Raises:
KeyError: if plugin class is not set for the corresponding name.
TypeError: if the source type of the plugin class is not supported.
"""
name = getattr(
plugin_class, 'ARTIFACT_DEFINITION_NAME', plugin_class.__name__)
name = name.lower()
if name not in cls._plugins:
raise KeyError(
'Artifact plugin class not set for name: {0:s}.'.format(name))
del cls._plugins[name]
if name in cls._file_system_plugins:
del cls._file_system_plugins[name]
if name in cls._knowledge_base_plugins:
del cls._knowledge_base_plugins[name]
if name in cls._windows_registry_plugins:
del cls._windows_registry_plugins[name]
|
def function[DeregisterPlugin, parameter[cls, plugin_class]]:
constant[Deregisters an preprocess plugin class.
Args:
plugin_class (type): preprocess plugin class.
Raises:
KeyError: if plugin class is not set for the corresponding name.
TypeError: if the source type of the plugin class is not supported.
]
variable[name] assign[=] call[name[getattr], parameter[name[plugin_class], constant[ARTIFACT_DEFINITION_NAME], name[plugin_class].__name__]]
variable[name] assign[=] call[name[name].lower, parameter[]]
if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[cls]._plugins] begin[:]
<ast.Raise object at 0x7da2044c02b0>
<ast.Delete object at 0x7da2044c2a40>
if compare[name[name] in name[cls]._file_system_plugins] begin[:]
<ast.Delete object at 0x7da2044c2620>
if compare[name[name] in name[cls]._knowledge_base_plugins] begin[:]
<ast.Delete object at 0x7da2044c11b0>
if compare[name[name] in name[cls]._windows_registry_plugins] begin[:]
<ast.Delete object at 0x7da2044c25f0>
|
keyword[def] identifier[DeregisterPlugin] ( identifier[cls] , identifier[plugin_class] ):
literal[string]
identifier[name] = identifier[getattr] (
identifier[plugin_class] , literal[string] , identifier[plugin_class] . identifier[__name__] )
identifier[name] = identifier[name] . identifier[lower] ()
keyword[if] identifier[name] keyword[not] keyword[in] identifier[cls] . identifier[_plugins] :
keyword[raise] identifier[KeyError] (
literal[string] . identifier[format] ( identifier[name] ))
keyword[del] identifier[cls] . identifier[_plugins] [ identifier[name] ]
keyword[if] identifier[name] keyword[in] identifier[cls] . identifier[_file_system_plugins] :
keyword[del] identifier[cls] . identifier[_file_system_plugins] [ identifier[name] ]
keyword[if] identifier[name] keyword[in] identifier[cls] . identifier[_knowledge_base_plugins] :
keyword[del] identifier[cls] . identifier[_knowledge_base_plugins] [ identifier[name] ]
keyword[if] identifier[name] keyword[in] identifier[cls] . identifier[_windows_registry_plugins] :
keyword[del] identifier[cls] . identifier[_windows_registry_plugins] [ identifier[name] ]
|
def DeregisterPlugin(cls, plugin_class):
"""Deregisters an preprocess plugin class.
Args:
plugin_class (type): preprocess plugin class.
Raises:
KeyError: if plugin class is not set for the corresponding name.
TypeError: if the source type of the plugin class is not supported.
"""
name = getattr(plugin_class, 'ARTIFACT_DEFINITION_NAME', plugin_class.__name__)
name = name.lower()
if name not in cls._plugins:
raise KeyError('Artifact plugin class not set for name: {0:s}.'.format(name)) # depends on [control=['if'], data=['name']]
del cls._plugins[name]
if name in cls._file_system_plugins:
del cls._file_system_plugins[name] # depends on [control=['if'], data=['name']]
if name in cls._knowledge_base_plugins:
del cls._knowledge_base_plugins[name] # depends on [control=['if'], data=['name']]
if name in cls._windows_registry_plugins:
del cls._windows_registry_plugins[name] # depends on [control=['if'], data=['name']]
|
def indices(self):
"""Returns dict {group name -> group indices}."""
self._prep_pandas_groupby()
def extract_group_indices(frame):
return (frame[0], frame[1].index)
return self._mergedRDD.map(extract_group_indices).collectAsMap()
|
def function[indices, parameter[self]]:
constant[Returns dict {group name -> group indices}.]
call[name[self]._prep_pandas_groupby, parameter[]]
def function[extract_group_indices, parameter[frame]]:
return[tuple[[<ast.Subscript object at 0x7da2047eaa40>, <ast.Attribute object at 0x7da2047ea050>]]]
return[call[call[name[self]._mergedRDD.map, parameter[name[extract_group_indices]]].collectAsMap, parameter[]]]
|
keyword[def] identifier[indices] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_prep_pandas_groupby] ()
keyword[def] identifier[extract_group_indices] ( identifier[frame] ):
keyword[return] ( identifier[frame] [ literal[int] ], identifier[frame] [ literal[int] ]. identifier[index] )
keyword[return] identifier[self] . identifier[_mergedRDD] . identifier[map] ( identifier[extract_group_indices] ). identifier[collectAsMap] ()
|
def indices(self):
"""Returns dict {group name -> group indices}."""
self._prep_pandas_groupby()
def extract_group_indices(frame):
return (frame[0], frame[1].index)
return self._mergedRDD.map(extract_group_indices).collectAsMap()
|
def create_big_url(name):
""" If name looks like a url, with an http, add an entry for it in BIG_URLS """
# BIG side effect
global BIG_URLS
filemeta = get_url_filemeta(name)
if not filemeta:
return None
filename = filemeta['filename']
remote_size = filemeta['remote_size']
url = filemeta['url']
name = filename.split('.')
name = (name[0] if name[0] not in ('', '.') else name[1]).replace(' ', '-')
name = name.lower().strip()
BIG_URLS[name] = (url, int(remote_size or -1), filename)
return name
|
def function[create_big_url, parameter[name]]:
constant[ If name looks like a url, with an http, add an entry for it in BIG_URLS ]
<ast.Global object at 0x7da2047ea3e0>
variable[filemeta] assign[=] call[name[get_url_filemeta], parameter[name[name]]]
if <ast.UnaryOp object at 0x7da2047e84f0> begin[:]
return[constant[None]]
variable[filename] assign[=] call[name[filemeta]][constant[filename]]
variable[remote_size] assign[=] call[name[filemeta]][constant[remote_size]]
variable[url] assign[=] call[name[filemeta]][constant[url]]
variable[name] assign[=] call[name[filename].split, parameter[constant[.]]]
variable[name] assign[=] call[<ast.IfExp object at 0x7da2047e8d30>.replace, parameter[constant[ ], constant[-]]]
variable[name] assign[=] call[call[name[name].lower, parameter[]].strip, parameter[]]
call[name[BIG_URLS]][name[name]] assign[=] tuple[[<ast.Name object at 0x7da2047e8d90>, <ast.Call object at 0x7da2047e8400>, <ast.Name object at 0x7da2047eb1c0>]]
return[name[name]]
|
keyword[def] identifier[create_big_url] ( identifier[name] ):
literal[string]
keyword[global] identifier[BIG_URLS]
identifier[filemeta] = identifier[get_url_filemeta] ( identifier[name] )
keyword[if] keyword[not] identifier[filemeta] :
keyword[return] keyword[None]
identifier[filename] = identifier[filemeta] [ literal[string] ]
identifier[remote_size] = identifier[filemeta] [ literal[string] ]
identifier[url] = identifier[filemeta] [ literal[string] ]
identifier[name] = identifier[filename] . identifier[split] ( literal[string] )
identifier[name] =( identifier[name] [ literal[int] ] keyword[if] identifier[name] [ literal[int] ] keyword[not] keyword[in] ( literal[string] , literal[string] ) keyword[else] identifier[name] [ literal[int] ]). identifier[replace] ( literal[string] , literal[string] )
identifier[name] = identifier[name] . identifier[lower] (). identifier[strip] ()
identifier[BIG_URLS] [ identifier[name] ]=( identifier[url] , identifier[int] ( identifier[remote_size] keyword[or] - literal[int] ), identifier[filename] )
keyword[return] identifier[name]
|
def create_big_url(name):
""" If name looks like a url, with an http, add an entry for it in BIG_URLS """
# BIG side effect
global BIG_URLS
filemeta = get_url_filemeta(name)
if not filemeta:
return None # depends on [control=['if'], data=[]]
filename = filemeta['filename']
remote_size = filemeta['remote_size']
url = filemeta['url']
name = filename.split('.')
name = (name[0] if name[0] not in ('', '.') else name[1]).replace(' ', '-')
name = name.lower().strip()
BIG_URLS[name] = (url, int(remote_size or -1), filename)
return name
|
def extract_metatile(io, fmt, offset=None):
"""
Extract the tile at the given offset (defaults to 0/0/0) and format from
the metatile in the file-like object io.
"""
ext = fmt.extension
if offset is None:
tile_name = '0/0/0.%s' % ext
else:
tile_name = '%d/%d/%d.%s' % (offset.zoom, offset.column, offset.row,
ext)
with zipfile.ZipFile(io, mode='r') as zf:
if tile_name in zf.namelist():
return zf.read(tile_name)
else:
return None
|
def function[extract_metatile, parameter[io, fmt, offset]]:
constant[
Extract the tile at the given offset (defaults to 0/0/0) and format from
the metatile in the file-like object io.
]
variable[ext] assign[=] name[fmt].extension
if compare[name[offset] is constant[None]] begin[:]
variable[tile_name] assign[=] binary_operation[constant[0/0/0.%s] <ast.Mod object at 0x7da2590d6920> name[ext]]
with call[name[zipfile].ZipFile, parameter[name[io]]] begin[:]
if compare[name[tile_name] in call[name[zf].namelist, parameter[]]] begin[:]
return[call[name[zf].read, parameter[name[tile_name]]]]
|
keyword[def] identifier[extract_metatile] ( identifier[io] , identifier[fmt] , identifier[offset] = keyword[None] ):
literal[string]
identifier[ext] = identifier[fmt] . identifier[extension]
keyword[if] identifier[offset] keyword[is] keyword[None] :
identifier[tile_name] = literal[string] % identifier[ext]
keyword[else] :
identifier[tile_name] = literal[string] %( identifier[offset] . identifier[zoom] , identifier[offset] . identifier[column] , identifier[offset] . identifier[row] ,
identifier[ext] )
keyword[with] identifier[zipfile] . identifier[ZipFile] ( identifier[io] , identifier[mode] = literal[string] ) keyword[as] identifier[zf] :
keyword[if] identifier[tile_name] keyword[in] identifier[zf] . identifier[namelist] ():
keyword[return] identifier[zf] . identifier[read] ( identifier[tile_name] )
keyword[else] :
keyword[return] keyword[None]
|
def extract_metatile(io, fmt, offset=None):
"""
Extract the tile at the given offset (defaults to 0/0/0) and format from
the metatile in the file-like object io.
"""
ext = fmt.extension
if offset is None:
tile_name = '0/0/0.%s' % ext # depends on [control=['if'], data=[]]
else:
tile_name = '%d/%d/%d.%s' % (offset.zoom, offset.column, offset.row, ext)
with zipfile.ZipFile(io, mode='r') as zf:
if tile_name in zf.namelist():
return zf.read(tile_name) # depends on [control=['if'], data=['tile_name']]
else:
return None # depends on [control=['with'], data=['zf']]
|
def handle_update(self, action, params):
"""Handle the specified action on this component."""
_LOGGER.debug('Keypad: "%s" %s Action: %s Params: %s"' % (
self._keypad.name, self, action, params))
ev_map = {
Button._ACTION_PRESS: Button.Event.PRESSED,
Button._ACTION_RELEASE: Button.Event.RELEASED
}
if action not in ev_map:
_LOGGER.debug("Unknown action %d for button %d in keypad %d" % (
action, self.number, self.keypad.name))
return False
self._dispatch_event(ev_map[action], {})
return True
|
def function[handle_update, parameter[self, action, params]]:
constant[Handle the specified action on this component.]
call[name[_LOGGER].debug, parameter[binary_operation[constant[Keypad: "%s" %s Action: %s Params: %s"] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b054a980>, <ast.Name object at 0x7da1b0548f40>, <ast.Name object at 0x7da1b054b250>, <ast.Name object at 0x7da1b05481f0>]]]]]
variable[ev_map] assign[=] dictionary[[<ast.Attribute object at 0x7da1b054a1a0>, <ast.Attribute object at 0x7da1b054b7f0>], [<ast.Attribute object at 0x7da1b05bd1e0>, <ast.Attribute object at 0x7da1b05bc430>]]
if compare[name[action] <ast.NotIn object at 0x7da2590d7190> name[ev_map]] begin[:]
call[name[_LOGGER].debug, parameter[binary_operation[constant[Unknown action %d for button %d in keypad %d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b05bd2a0>, <ast.Attribute object at 0x7da1b05bd4b0>, <ast.Attribute object at 0x7da1b05bffa0>]]]]]
return[constant[False]]
call[name[self]._dispatch_event, parameter[call[name[ev_map]][name[action]], dictionary[[], []]]]
return[constant[True]]
|
keyword[def] identifier[handle_update] ( identifier[self] , identifier[action] , identifier[params] ):
literal[string]
identifier[_LOGGER] . identifier[debug] ( literal[string] %(
identifier[self] . identifier[_keypad] . identifier[name] , identifier[self] , identifier[action] , identifier[params] ))
identifier[ev_map] ={
identifier[Button] . identifier[_ACTION_PRESS] : identifier[Button] . identifier[Event] . identifier[PRESSED] ,
identifier[Button] . identifier[_ACTION_RELEASE] : identifier[Button] . identifier[Event] . identifier[RELEASED]
}
keyword[if] identifier[action] keyword[not] keyword[in] identifier[ev_map] :
identifier[_LOGGER] . identifier[debug] ( literal[string] %(
identifier[action] , identifier[self] . identifier[number] , identifier[self] . identifier[keypad] . identifier[name] ))
keyword[return] keyword[False]
identifier[self] . identifier[_dispatch_event] ( identifier[ev_map] [ identifier[action] ],{})
keyword[return] keyword[True]
|
def handle_update(self, action, params):
"""Handle the specified action on this component."""
_LOGGER.debug('Keypad: "%s" %s Action: %s Params: %s"' % (self._keypad.name, self, action, params))
ev_map = {Button._ACTION_PRESS: Button.Event.PRESSED, Button._ACTION_RELEASE: Button.Event.RELEASED}
if action not in ev_map:
_LOGGER.debug('Unknown action %d for button %d in keypad %d' % (action, self.number, self.keypad.name))
return False # depends on [control=['if'], data=['action']]
self._dispatch_event(ev_map[action], {})
return True
|
def get_resource_group(access_token, subscription_id, rgname):
'''Get details about the named resource group.
Args:
access_token (str): A valid Azure authentication token.
subscription_id (str): Azure subscription id.
rgname (str): Azure resource group name.
Returns:
HTTP response. JSON body.
'''
endpoint = ''.join([get_rm_endpoint(),
'/subscriptions/', subscription_id,
'/resourceGroups/', rgname,
'?api-version=', RESOURCE_API])
return do_get(endpoint, access_token)
|
def function[get_resource_group, parameter[access_token, subscription_id, rgname]]:
constant[Get details about the named resource group.
Args:
access_token (str): A valid Azure authentication token.
subscription_id (str): Azure subscription id.
rgname (str): Azure resource group name.
Returns:
HTTP response. JSON body.
]
variable[endpoint] assign[=] call[constant[].join, parameter[list[[<ast.Call object at 0x7da1b0475390>, <ast.Constant object at 0x7da1b0477f70>, <ast.Name object at 0x7da1b0477e80>, <ast.Constant object at 0x7da1b04758a0>, <ast.Name object at 0x7da1b0475570>, <ast.Constant object at 0x7da1b0474310>, <ast.Name object at 0x7da1b04755d0>]]]]
return[call[name[do_get], parameter[name[endpoint], name[access_token]]]]
|
keyword[def] identifier[get_resource_group] ( identifier[access_token] , identifier[subscription_id] , identifier[rgname] ):
literal[string]
identifier[endpoint] = literal[string] . identifier[join] ([ identifier[get_rm_endpoint] (),
literal[string] , identifier[subscription_id] ,
literal[string] , identifier[rgname] ,
literal[string] , identifier[RESOURCE_API] ])
keyword[return] identifier[do_get] ( identifier[endpoint] , identifier[access_token] )
|
def get_resource_group(access_token, subscription_id, rgname):
"""Get details about the named resource group.
Args:
access_token (str): A valid Azure authentication token.
subscription_id (str): Azure subscription id.
rgname (str): Azure resource group name.
Returns:
HTTP response. JSON body.
"""
endpoint = ''.join([get_rm_endpoint(), '/subscriptions/', subscription_id, '/resourceGroups/', rgname, '?api-version=', RESOURCE_API])
return do_get(endpoint, access_token)
|
def create(cls, extension_name=None, extension_tag=None,
extension_type=None):
"""
Construct an ExtensionInformation object from provided extension
values.
Args:
extension_name (str): The name of the extension. Optional,
defaults to None.
extension_tag (int): The tag number of the extension. Optional,
defaults to None.
extension_type (int): The type index of the extension. Optional,
defaults to None.
Returns:
ExtensionInformation: The newly created set of extension
information.
Example:
>>> x = ExtensionInformation.create('extension', 1, 1)
>>> x.extension_name.value
ExtensionName(value='extension')
>>> x.extension_tag.value
ExtensionTag(value=1)
>>> x.extension_type.value
ExtensionType(value=1)
"""
extension_name = ExtensionName(extension_name)
extension_tag = ExtensionTag(extension_tag)
extension_type = ExtensionType(extension_type)
return ExtensionInformation(
extension_name=extension_name,
extension_tag=extension_tag,
extension_type=extension_type)
|
def function[create, parameter[cls, extension_name, extension_tag, extension_type]]:
constant[
Construct an ExtensionInformation object from provided extension
values.
Args:
extension_name (str): The name of the extension. Optional,
defaults to None.
extension_tag (int): The tag number of the extension. Optional,
defaults to None.
extension_type (int): The type index of the extension. Optional,
defaults to None.
Returns:
ExtensionInformation: The newly created set of extension
information.
Example:
>>> x = ExtensionInformation.create('extension', 1, 1)
>>> x.extension_name.value
ExtensionName(value='extension')
>>> x.extension_tag.value
ExtensionTag(value=1)
>>> x.extension_type.value
ExtensionType(value=1)
]
variable[extension_name] assign[=] call[name[ExtensionName], parameter[name[extension_name]]]
variable[extension_tag] assign[=] call[name[ExtensionTag], parameter[name[extension_tag]]]
variable[extension_type] assign[=] call[name[ExtensionType], parameter[name[extension_type]]]
return[call[name[ExtensionInformation], parameter[]]]
|
keyword[def] identifier[create] ( identifier[cls] , identifier[extension_name] = keyword[None] , identifier[extension_tag] = keyword[None] ,
identifier[extension_type] = keyword[None] ):
literal[string]
identifier[extension_name] = identifier[ExtensionName] ( identifier[extension_name] )
identifier[extension_tag] = identifier[ExtensionTag] ( identifier[extension_tag] )
identifier[extension_type] = identifier[ExtensionType] ( identifier[extension_type] )
keyword[return] identifier[ExtensionInformation] (
identifier[extension_name] = identifier[extension_name] ,
identifier[extension_tag] = identifier[extension_tag] ,
identifier[extension_type] = identifier[extension_type] )
|
def create(cls, extension_name=None, extension_tag=None, extension_type=None):
"""
Construct an ExtensionInformation object from provided extension
values.
Args:
extension_name (str): The name of the extension. Optional,
defaults to None.
extension_tag (int): The tag number of the extension. Optional,
defaults to None.
extension_type (int): The type index of the extension. Optional,
defaults to None.
Returns:
ExtensionInformation: The newly created set of extension
information.
Example:
>>> x = ExtensionInformation.create('extension', 1, 1)
>>> x.extension_name.value
ExtensionName(value='extension')
>>> x.extension_tag.value
ExtensionTag(value=1)
>>> x.extension_type.value
ExtensionType(value=1)
"""
extension_name = ExtensionName(extension_name)
extension_tag = ExtensionTag(extension_tag)
extension_type = ExtensionType(extension_type)
return ExtensionInformation(extension_name=extension_name, extension_tag=extension_tag, extension_type=extension_type)
|
def get_template(self):
"""
读取一个Excel模板,将此Excel的所有行读出来,并且识别特殊的标记进行记录
:return: 返回读取后的模板,结果类似:
[
{'cols': #各列,与subs不会同时生效
'subs':[ #子模板
{'cols':#各列,
'subs': #子模板
'field': #对应数据中字段名称
},
...
]
'field': #对应数据中字段名称
},
...
]
子模板的判断根据第一列是否为 {{for field}} 来判断,结束使用 {{end}}
"""
rows = []
stack = []
stack.append(rows)
#top用来记录当前栈
top = rows
for i in range(1, self.sheet.max_row+1):
cell = self.sheet.cell(row=i, column=1)
#是否子模板开始
if (isinstance(cell.value, (str, unicode)) and
cell.value.startswith('{{for ') and
cell.value.endswith('}}')):
row = {'field':cell.value[6:-2].strip(), 'cols':[], 'subs':[]}
top.append(row)
top = row['subs']
stack.append(top)
if self.begin == 1:
self.begin = i
#是否子模板结束
elif (isinstance(cell.value, (str, unicode)) and
cell.value == '{{end}}'):
stack.pop()
top = stack[-1]
else:
row = {'cols':[], 'subs':[]}
cols = row['cols']
for j in range(1, self.sheet.max_column+1):
cell = self.sheet.cell(row=i, column=j)
v = self.process_cell(i, j, cell)
if v:
cols.append(v)
if row['cols'] or row['subs']:
top.append(row)
# pprint(rows)
return rows
|
def function[get_template, parameter[self]]:
constant[
读取一个Excel模板,将此Excel的所有行读出来,并且识别特殊的标记进行记录
:return: 返回读取后的模板,结果类似:
[
{'cols': #各列,与subs不会同时生效
'subs':[ #子模板
{'cols':#各列,
'subs': #子模板
'field': #对应数据中字段名称
},
...
]
'field': #对应数据中字段名称
},
...
]
子模板的判断根据第一列是否为 {{for field}} 来判断,结束使用 {{end}}
]
variable[rows] assign[=] list[[]]
variable[stack] assign[=] list[[]]
call[name[stack].append, parameter[name[rows]]]
variable[top] assign[=] name[rows]
for taget[name[i]] in starred[call[name[range], parameter[constant[1], binary_operation[name[self].sheet.max_row + constant[1]]]]] begin[:]
variable[cell] assign[=] call[name[self].sheet.cell, parameter[]]
if <ast.BoolOp object at 0x7da1b26af520> begin[:]
variable[row] assign[=] dictionary[[<ast.Constant object at 0x7da1b26ae5f0>, <ast.Constant object at 0x7da1b26ae290>, <ast.Constant object at 0x7da1b26afb80>], [<ast.Call object at 0x7da1b26ac640>, <ast.List object at 0x7da1b26aed40>, <ast.List object at 0x7da1b26aef50>]]
call[name[top].append, parameter[name[row]]]
variable[top] assign[=] call[name[row]][constant[subs]]
call[name[stack].append, parameter[name[top]]]
if compare[name[self].begin equal[==] constant[1]] begin[:]
name[self].begin assign[=] name[i]
return[name[rows]]
|
keyword[def] identifier[get_template] ( identifier[self] ):
literal[string]
identifier[rows] =[]
identifier[stack] =[]
identifier[stack] . identifier[append] ( identifier[rows] )
identifier[top] = identifier[rows]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[sheet] . identifier[max_row] + literal[int] ):
identifier[cell] = identifier[self] . identifier[sheet] . identifier[cell] ( identifier[row] = identifier[i] , identifier[column] = literal[int] )
keyword[if] ( identifier[isinstance] ( identifier[cell] . identifier[value] ,( identifier[str] , identifier[unicode] )) keyword[and]
identifier[cell] . identifier[value] . identifier[startswith] ( literal[string] ) keyword[and]
identifier[cell] . identifier[value] . identifier[endswith] ( literal[string] )):
identifier[row] ={ literal[string] : identifier[cell] . identifier[value] [ literal[int] :- literal[int] ]. identifier[strip] (), literal[string] :[], literal[string] :[]}
identifier[top] . identifier[append] ( identifier[row] )
identifier[top] = identifier[row] [ literal[string] ]
identifier[stack] . identifier[append] ( identifier[top] )
keyword[if] identifier[self] . identifier[begin] == literal[int] :
identifier[self] . identifier[begin] = identifier[i]
keyword[elif] ( identifier[isinstance] ( identifier[cell] . identifier[value] ,( identifier[str] , identifier[unicode] )) keyword[and]
identifier[cell] . identifier[value] == literal[string] ):
identifier[stack] . identifier[pop] ()
identifier[top] = identifier[stack] [- literal[int] ]
keyword[else] :
identifier[row] ={ literal[string] :[], literal[string] :[]}
identifier[cols] = identifier[row] [ literal[string] ]
keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[sheet] . identifier[max_column] + literal[int] ):
identifier[cell] = identifier[self] . identifier[sheet] . identifier[cell] ( identifier[row] = identifier[i] , identifier[column] = identifier[j] )
identifier[v] = identifier[self] . identifier[process_cell] ( identifier[i] , identifier[j] , identifier[cell] )
keyword[if] identifier[v] :
identifier[cols] . identifier[append] ( identifier[v] )
keyword[if] identifier[row] [ literal[string] ] keyword[or] identifier[row] [ literal[string] ]:
identifier[top] . identifier[append] ( identifier[row] )
keyword[return] identifier[rows]
|
def get_template(self):
"""
读取一个Excel模板,将此Excel的所有行读出来,并且识别特殊的标记进行记录
:return: 返回读取后的模板,结果类似:
[
{'cols': #各列,与subs不会同时生效
'subs':[ #子模板
{'cols':#各列,
'subs': #子模板
'field': #对应数据中字段名称
},
...
]
'field': #对应数据中字段名称
},
...
]
子模板的判断根据第一列是否为 {{for field}} 来判断,结束使用 {{end}}
"""
rows = []
stack = []
stack.append(rows)
#top用来记录当前栈
top = rows
for i in range(1, self.sheet.max_row + 1):
cell = self.sheet.cell(row=i, column=1)
#是否子模板开始
if isinstance(cell.value, (str, unicode)) and cell.value.startswith('{{for ') and cell.value.endswith('}}'):
row = {'field': cell.value[6:-2].strip(), 'cols': [], 'subs': []}
top.append(row)
top = row['subs']
stack.append(top)
if self.begin == 1:
self.begin = i # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
#是否子模板结束
elif isinstance(cell.value, (str, unicode)) and cell.value == '{{end}}':
stack.pop()
top = stack[-1] # depends on [control=['if'], data=[]]
else:
row = {'cols': [], 'subs': []}
cols = row['cols']
for j in range(1, self.sheet.max_column + 1):
cell = self.sheet.cell(row=i, column=j)
v = self.process_cell(i, j, cell)
if v:
cols.append(v) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['j']]
if row['cols'] or row['subs']:
top.append(row) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
# pprint(rows)
return rows
|
def eventReminder(self, thread_id, time, title, location="", location_id=""):
"""
Deprecated. Use :func:`fbchat.Client.createPlan` instead
"""
plan = Plan(time=time, title=title, location=location, location_id=location_id)
self.createPlan(plan=plan, thread_id=thread_id)
|
def function[eventReminder, parameter[self, thread_id, time, title, location, location_id]]:
constant[
Deprecated. Use :func:`fbchat.Client.createPlan` instead
]
variable[plan] assign[=] call[name[Plan], parameter[]]
call[name[self].createPlan, parameter[]]
|
keyword[def] identifier[eventReminder] ( identifier[self] , identifier[thread_id] , identifier[time] , identifier[title] , identifier[location] = literal[string] , identifier[location_id] = literal[string] ):
literal[string]
identifier[plan] = identifier[Plan] ( identifier[time] = identifier[time] , identifier[title] = identifier[title] , identifier[location] = identifier[location] , identifier[location_id] = identifier[location_id] )
identifier[self] . identifier[createPlan] ( identifier[plan] = identifier[plan] , identifier[thread_id] = identifier[thread_id] )
|
def eventReminder(self, thread_id, time, title, location='', location_id=''):
"""
Deprecated. Use :func:`fbchat.Client.createPlan` instead
"""
plan = Plan(time=time, title=title, location=location, location_id=location_id)
self.createPlan(plan=plan, thread_id=thread_id)
|
def moment_magnitude_scalar(moment):
'''
Uses Hanks & Kanamori formula for calculating moment magnitude from
a scalar moment (Nm)
'''
if isinstance(moment, np.ndarray):
return (2. / 3.) * (np.log10(moment) - 9.05)
else:
return (2. / 3.) * (log10(moment) - 9.05)
|
def function[moment_magnitude_scalar, parameter[moment]]:
constant[
Uses Hanks & Kanamori formula for calculating moment magnitude from
a scalar moment (Nm)
]
if call[name[isinstance], parameter[name[moment], name[np].ndarray]] begin[:]
return[binary_operation[binary_operation[constant[2.0] / constant[3.0]] * binary_operation[call[name[np].log10, parameter[name[moment]]] - constant[9.05]]]]
|
keyword[def] identifier[moment_magnitude_scalar] ( identifier[moment] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[moment] , identifier[np] . identifier[ndarray] ):
keyword[return] ( literal[int] / literal[int] )*( identifier[np] . identifier[log10] ( identifier[moment] )- literal[int] )
keyword[else] :
keyword[return] ( literal[int] / literal[int] )*( identifier[log10] ( identifier[moment] )- literal[int] )
|
def moment_magnitude_scalar(moment):
"""
Uses Hanks & Kanamori formula for calculating moment magnitude from
a scalar moment (Nm)
"""
if isinstance(moment, np.ndarray):
return 2.0 / 3.0 * (np.log10(moment) - 9.05) # depends on [control=['if'], data=[]]
else:
return 2.0 / 3.0 * (log10(moment) - 9.05)
|
def summary_pb(self):
"""Create a top-level experiment summary describing this experiment.
The resulting summary should be written to a log directory that
encloses all the individual sessions' log directories.
Analogous to the low-level `experiment_pb` function in the
`hparams.summary` module.
"""
hparam_infos = []
for hparam in self._hparams:
info = api_pb2.HParamInfo(
name=hparam.name,
description=hparam.description,
display_name=hparam.display_name,
)
domain = hparam.domain
if domain is not None:
domain.update_hparam_info(info)
hparam_infos.append(info)
metric_infos = [metric.as_proto() for metric in self._metrics]
return summary.experiment_pb(
hparam_infos=hparam_infos,
metric_infos=metric_infos,
user=self._user,
description=self._description,
time_created_secs=self._time_created_secs,
)
|
def function[summary_pb, parameter[self]]:
constant[Create a top-level experiment summary describing this experiment.
The resulting summary should be written to a log directory that
encloses all the individual sessions' log directories.
Analogous to the low-level `experiment_pb` function in the
`hparams.summary` module.
]
variable[hparam_infos] assign[=] list[[]]
for taget[name[hparam]] in starred[name[self]._hparams] begin[:]
variable[info] assign[=] call[name[api_pb2].HParamInfo, parameter[]]
variable[domain] assign[=] name[hparam].domain
if compare[name[domain] is_not constant[None]] begin[:]
call[name[domain].update_hparam_info, parameter[name[info]]]
call[name[hparam_infos].append, parameter[name[info]]]
variable[metric_infos] assign[=] <ast.ListComp object at 0x7da1b21cf4c0>
return[call[name[summary].experiment_pb, parameter[]]]
|
keyword[def] identifier[summary_pb] ( identifier[self] ):
literal[string]
identifier[hparam_infos] =[]
keyword[for] identifier[hparam] keyword[in] identifier[self] . identifier[_hparams] :
identifier[info] = identifier[api_pb2] . identifier[HParamInfo] (
identifier[name] = identifier[hparam] . identifier[name] ,
identifier[description] = identifier[hparam] . identifier[description] ,
identifier[display_name] = identifier[hparam] . identifier[display_name] ,
)
identifier[domain] = identifier[hparam] . identifier[domain]
keyword[if] identifier[domain] keyword[is] keyword[not] keyword[None] :
identifier[domain] . identifier[update_hparam_info] ( identifier[info] )
identifier[hparam_infos] . identifier[append] ( identifier[info] )
identifier[metric_infos] =[ identifier[metric] . identifier[as_proto] () keyword[for] identifier[metric] keyword[in] identifier[self] . identifier[_metrics] ]
keyword[return] identifier[summary] . identifier[experiment_pb] (
identifier[hparam_infos] = identifier[hparam_infos] ,
identifier[metric_infos] = identifier[metric_infos] ,
identifier[user] = identifier[self] . identifier[_user] ,
identifier[description] = identifier[self] . identifier[_description] ,
identifier[time_created_secs] = identifier[self] . identifier[_time_created_secs] ,
)
|
def summary_pb(self):
"""Create a top-level experiment summary describing this experiment.
The resulting summary should be written to a log directory that
encloses all the individual sessions' log directories.
Analogous to the low-level `experiment_pb` function in the
`hparams.summary` module.
"""
hparam_infos = []
for hparam in self._hparams:
info = api_pb2.HParamInfo(name=hparam.name, description=hparam.description, display_name=hparam.display_name)
domain = hparam.domain
if domain is not None:
domain.update_hparam_info(info) # depends on [control=['if'], data=['domain']]
hparam_infos.append(info) # depends on [control=['for'], data=['hparam']]
metric_infos = [metric.as_proto() for metric in self._metrics]
return summary.experiment_pb(hparam_infos=hparam_infos, metric_infos=metric_infos, user=self._user, description=self._description, time_created_secs=self._time_created_secs)
|
def describe_vpcs(vpc_id=None, name=None, cidr=None, tags=None,
region=None, key=None, keyid=None, profile=None):
'''
Describe all VPCs, matching the filter criteria if provided.
Returns a list of dictionaries with interesting properties.
.. versionadded:: 2015.8.0
CLI Example:
.. code-block:: bash
salt myminion boto_vpc.describe_vpcs
'''
keys = ('id',
'cidr_block',
'is_default',
'state',
'tags',
'dhcp_options_id',
'instance_tenancy')
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
filter_parameters = {'filters': {}}
if vpc_id:
filter_parameters['vpc_ids'] = [vpc_id]
if cidr:
filter_parameters['filters']['cidr'] = cidr
if name:
filter_parameters['filters']['tag:Name'] = name
if tags:
for tag_name, tag_value in six.iteritems(tags):
filter_parameters['filters']['tag:{0}'.format(tag_name)] = tag_value
vpcs = conn.get_all_vpcs(**filter_parameters)
if vpcs:
ret = []
for vpc in vpcs:
_r = dict([(k, getattr(vpc, k)) for k in keys])
_r.update({'region': getattr(vpc, 'region').name})
ret.append(_r)
return {'vpcs': ret}
else:
return {'vpcs': []}
except BotoServerError as e:
return {'error': __utils__['boto.get_error'](e)}
|
def function[describe_vpcs, parameter[vpc_id, name, cidr, tags, region, key, keyid, profile]]:
constant[
Describe all VPCs, matching the filter criteria if provided.
Returns a list of dictionaries with interesting properties.
.. versionadded:: 2015.8.0
CLI Example:
.. code-block:: bash
salt myminion boto_vpc.describe_vpcs
]
variable[keys] assign[=] tuple[[<ast.Constant object at 0x7da1b21a55a0>, <ast.Constant object at 0x7da1b21a7640>, <ast.Constant object at 0x7da1b21a7a60>, <ast.Constant object at 0x7da1b21a61d0>, <ast.Constant object at 0x7da1b21a6380>, <ast.Constant object at 0x7da1b21a6bf0>, <ast.Constant object at 0x7da1b21a59f0>]]
<ast.Try object at 0x7da1b21a7a30>
|
keyword[def] identifier[describe_vpcs] ( identifier[vpc_id] = keyword[None] , identifier[name] = keyword[None] , identifier[cidr] = keyword[None] , identifier[tags] = keyword[None] ,
identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] ):
literal[string]
identifier[keys] =( literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] )
keyword[try] :
identifier[conn] = identifier[_get_conn] ( identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] )
identifier[filter_parameters] ={ literal[string] :{}}
keyword[if] identifier[vpc_id] :
identifier[filter_parameters] [ literal[string] ]=[ identifier[vpc_id] ]
keyword[if] identifier[cidr] :
identifier[filter_parameters] [ literal[string] ][ literal[string] ]= identifier[cidr]
keyword[if] identifier[name] :
identifier[filter_parameters] [ literal[string] ][ literal[string] ]= identifier[name]
keyword[if] identifier[tags] :
keyword[for] identifier[tag_name] , identifier[tag_value] keyword[in] identifier[six] . identifier[iteritems] ( identifier[tags] ):
identifier[filter_parameters] [ literal[string] ][ literal[string] . identifier[format] ( identifier[tag_name] )]= identifier[tag_value]
identifier[vpcs] = identifier[conn] . identifier[get_all_vpcs] (** identifier[filter_parameters] )
keyword[if] identifier[vpcs] :
identifier[ret] =[]
keyword[for] identifier[vpc] keyword[in] identifier[vpcs] :
identifier[_r] = identifier[dict] ([( identifier[k] , identifier[getattr] ( identifier[vpc] , identifier[k] )) keyword[for] identifier[k] keyword[in] identifier[keys] ])
identifier[_r] . identifier[update] ({ literal[string] : identifier[getattr] ( identifier[vpc] , literal[string] ). identifier[name] })
identifier[ret] . identifier[append] ( identifier[_r] )
keyword[return] { literal[string] : identifier[ret] }
keyword[else] :
keyword[return] { literal[string] :[]}
keyword[except] identifier[BotoServerError] keyword[as] identifier[e] :
keyword[return] { literal[string] : identifier[__utils__] [ literal[string] ]( identifier[e] )}
|
def describe_vpcs(vpc_id=None, name=None, cidr=None, tags=None, region=None, key=None, keyid=None, profile=None):
"""
Describe all VPCs, matching the filter criteria if provided.
Returns a list of dictionaries with interesting properties.
.. versionadded:: 2015.8.0
CLI Example:
.. code-block:: bash
salt myminion boto_vpc.describe_vpcs
"""
keys = ('id', 'cidr_block', 'is_default', 'state', 'tags', 'dhcp_options_id', 'instance_tenancy')
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
filter_parameters = {'filters': {}}
if vpc_id:
filter_parameters['vpc_ids'] = [vpc_id] # depends on [control=['if'], data=[]]
if cidr:
filter_parameters['filters']['cidr'] = cidr # depends on [control=['if'], data=[]]
if name:
filter_parameters['filters']['tag:Name'] = name # depends on [control=['if'], data=[]]
if tags:
for (tag_name, tag_value) in six.iteritems(tags):
filter_parameters['filters']['tag:{0}'.format(tag_name)] = tag_value # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
vpcs = conn.get_all_vpcs(**filter_parameters)
if vpcs:
ret = []
for vpc in vpcs:
_r = dict([(k, getattr(vpc, k)) for k in keys])
_r.update({'region': getattr(vpc, 'region').name})
ret.append(_r) # depends on [control=['for'], data=['vpc']]
return {'vpcs': ret} # depends on [control=['if'], data=[]]
else:
return {'vpcs': []} # depends on [control=['try'], data=[]]
except BotoServerError as e:
return {'error': __utils__['boto.get_error'](e)} # depends on [control=['except'], data=['e']]
|
def delete(self, endpoint, headers):
"""
Method to delete an item or all items
headers['If-Match'] must contain the _etag identifier of the element to delete
:param endpoint: endpoint (API URL)
:type endpoint: str
:param headers: headers (example: Content-Type)
:type headers: dict
:return: response (deletion information)
:rtype: dict
"""
response = self.get_response(method='DELETE', endpoint=endpoint, headers=headers)
logger.debug("delete, response: %s", response)
if response.status_code != 204: # pragma: no cover - should not happen ...
resp = self.decode(response=response)
resp = {"_status": "OK"}
return resp
|
def function[delete, parameter[self, endpoint, headers]]:
constant[
Method to delete an item or all items
headers['If-Match'] must contain the _etag identifier of the element to delete
:param endpoint: endpoint (API URL)
:type endpoint: str
:param headers: headers (example: Content-Type)
:type headers: dict
:return: response (deletion information)
:rtype: dict
]
variable[response] assign[=] call[name[self].get_response, parameter[]]
call[name[logger].debug, parameter[constant[delete, response: %s], name[response]]]
if compare[name[response].status_code not_equal[!=] constant[204]] begin[:]
variable[resp] assign[=] call[name[self].decode, parameter[]]
variable[resp] assign[=] dictionary[[<ast.Constant object at 0x7da18bc719f0>], [<ast.Constant object at 0x7da18bc70490>]]
return[name[resp]]
|
keyword[def] identifier[delete] ( identifier[self] , identifier[endpoint] , identifier[headers] ):
literal[string]
identifier[response] = identifier[self] . identifier[get_response] ( identifier[method] = literal[string] , identifier[endpoint] = identifier[endpoint] , identifier[headers] = identifier[headers] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[response] )
keyword[if] identifier[response] . identifier[status_code] != literal[int] :
identifier[resp] = identifier[self] . identifier[decode] ( identifier[response] = identifier[response] )
identifier[resp] ={ literal[string] : literal[string] }
keyword[return] identifier[resp]
|
def delete(self, endpoint, headers):
"""
Method to delete an item or all items
headers['If-Match'] must contain the _etag identifier of the element to delete
:param endpoint: endpoint (API URL)
:type endpoint: str
:param headers: headers (example: Content-Type)
:type headers: dict
:return: response (deletion information)
:rtype: dict
"""
response = self.get_response(method='DELETE', endpoint=endpoint, headers=headers)
logger.debug('delete, response: %s', response)
if response.status_code != 204: # pragma: no cover - should not happen ...
resp = self.decode(response=response) # depends on [control=['if'], data=[]]
resp = {'_status': 'OK'}
return resp
|
def import_service_version(self, repository_json, mode='production', service_version='default', service_id=None, **kwargs):
'''
import_service_version(self, repository_json, mode='production', service_version='default', service_id=None, **kwargs)
Imports a service version into Opereto from a remote repository (GIT, SVN, AWS S3, any HTTPS repository)
:Parameters:
* *repository_json* (`object`) -- repository_json
:Example of repository JSON:
.. code-block:: json
#GIT source control
{
"repo_type": "git",
"url": "git@bitbucket.org:my_account_name/my_project.git",
"branch": "master",
"ot_dir": "mydir"
}
#SVN
{
"repo_type": "svn",
"url": "svn://myhost/myrepo",
"username": "OPTIONAL_USERNAME",
"password": "OPTIONAL_PASSWORD",
"ot_dir": "my_service_dir"
}
# Any HTTP based remote storage
{
"repo_type": "http",
"url": "https://www.dropbox.com/s/1234567890/MyFile.zip?dl=0",
"username": "OPTIONAL_PASSWORD",
"ot_dir": "my_service_dir"
}
# AWS S3 Storage
{
"repo_type": "s3",
"bucket": "my_bucket/my_service.zip",
"access_key": "MY_ACCESS_KEY",
"secret_key": "MY_SECRET_KEY",
"ot_dir": "my_service_dir"
}
* *mode* (`string`) -- production/development (default is production)
* *service_version* (`string`) -- Service version
* *service_id* (`string`) -- Service version
:return: status - success/failure
:Example:
.. code-block:: python
# for GIT
repository_json = {
"branch": "master",
"ot_dir": "microservices/hello_world",
"repo_type": "git",
"url": "https://github.com/myCompany/my_services.git"
}
opereto_client.import_service_version(repository_json, mode='production', service_version='default', service_id=self.my_service2)
'''
request_data = {'repository': repository_json, 'mode': mode, 'service_version': service_version, 'id': service_id}
url_suffix = '/services'
if kwargs:
url_suffix=url_suffix+'?'+urlencode(kwargs)
return self._call_rest_api('post', url_suffix, data=request_data, error='Failed to import service')
|
def function[import_service_version, parameter[self, repository_json, mode, service_version, service_id]]:
constant[
import_service_version(self, repository_json, mode='production', service_version='default', service_id=None, **kwargs)
Imports a service version into Opereto from a remote repository (GIT, SVN, AWS S3, any HTTPS repository)
:Parameters:
* *repository_json* (`object`) -- repository_json
:Example of repository JSON:
.. code-block:: json
#GIT source control
{
"repo_type": "git",
"url": "git@bitbucket.org:my_account_name/my_project.git",
"branch": "master",
"ot_dir": "mydir"
}
#SVN
{
"repo_type": "svn",
"url": "svn://myhost/myrepo",
"username": "OPTIONAL_USERNAME",
"password": "OPTIONAL_PASSWORD",
"ot_dir": "my_service_dir"
}
# Any HTTP based remote storage
{
"repo_type": "http",
"url": "https://www.dropbox.com/s/1234567890/MyFile.zip?dl=0",
"username": "OPTIONAL_PASSWORD",
"ot_dir": "my_service_dir"
}
# AWS S3 Storage
{
"repo_type": "s3",
"bucket": "my_bucket/my_service.zip",
"access_key": "MY_ACCESS_KEY",
"secret_key": "MY_SECRET_KEY",
"ot_dir": "my_service_dir"
}
* *mode* (`string`) -- production/development (default is production)
* *service_version* (`string`) -- Service version
* *service_id* (`string`) -- Service version
:return: status - success/failure
:Example:
.. code-block:: python
# for GIT
repository_json = {
"branch": "master",
"ot_dir": "microservices/hello_world",
"repo_type": "git",
"url": "https://github.com/myCompany/my_services.git"
}
opereto_client.import_service_version(repository_json, mode='production', service_version='default', service_id=self.my_service2)
]
variable[request_data] assign[=] dictionary[[<ast.Constant object at 0x7da1b28f67a0>, <ast.Constant object at 0x7da1b28f68f0>, <ast.Constant object at 0x7da1b28f6e90>, <ast.Constant object at 0x7da1b28f4af0>], [<ast.Name object at 0x7da1b28f59c0>, <ast.Name object at 0x7da1b28f5300>, <ast.Name object at 0x7da1b28f5b10>, <ast.Name object at 0x7da1b28f67d0>]]
variable[url_suffix] assign[=] constant[/services]
if name[kwargs] begin[:]
variable[url_suffix] assign[=] binary_operation[binary_operation[name[url_suffix] + constant[?]] + call[name[urlencode], parameter[name[kwargs]]]]
return[call[name[self]._call_rest_api, parameter[constant[post], name[url_suffix]]]]
|
keyword[def] identifier[import_service_version] ( identifier[self] , identifier[repository_json] , identifier[mode] = literal[string] , identifier[service_version] = literal[string] , identifier[service_id] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[request_data] ={ literal[string] : identifier[repository_json] , literal[string] : identifier[mode] , literal[string] : identifier[service_version] , literal[string] : identifier[service_id] }
identifier[url_suffix] = literal[string]
keyword[if] identifier[kwargs] :
identifier[url_suffix] = identifier[url_suffix] + literal[string] + identifier[urlencode] ( identifier[kwargs] )
keyword[return] identifier[self] . identifier[_call_rest_api] ( literal[string] , identifier[url_suffix] , identifier[data] = identifier[request_data] , identifier[error] = literal[string] )
|
def import_service_version(self, repository_json, mode='production', service_version='default', service_id=None, **kwargs):
"""
import_service_version(self, repository_json, mode='production', service_version='default', service_id=None, **kwargs)
Imports a service version into Opereto from a remote repository (GIT, SVN, AWS S3, any HTTPS repository)
:Parameters:
* *repository_json* (`object`) -- repository_json
:Example of repository JSON:
.. code-block:: json
#GIT source control
{
"repo_type": "git",
"url": "git@bitbucket.org:my_account_name/my_project.git",
"branch": "master",
"ot_dir": "mydir"
}
#SVN
{
"repo_type": "svn",
"url": "svn://myhost/myrepo",
"username": "OPTIONAL_USERNAME",
"password": "OPTIONAL_PASSWORD",
"ot_dir": "my_service_dir"
}
# Any HTTP based remote storage
{
"repo_type": "http",
"url": "https://www.dropbox.com/s/1234567890/MyFile.zip?dl=0",
"username": "OPTIONAL_PASSWORD",
"ot_dir": "my_service_dir"
}
# AWS S3 Storage
{
"repo_type": "s3",
"bucket": "my_bucket/my_service.zip",
"access_key": "MY_ACCESS_KEY",
"secret_key": "MY_SECRET_KEY",
"ot_dir": "my_service_dir"
}
* *mode* (`string`) -- production/development (default is production)
* *service_version* (`string`) -- Service version
* *service_id* (`string`) -- Service version
:return: status - success/failure
:Example:
.. code-block:: python
# for GIT
repository_json = {
"branch": "master",
"ot_dir": "microservices/hello_world",
"repo_type": "git",
"url": "https://github.com/myCompany/my_services.git"
}
opereto_client.import_service_version(repository_json, mode='production', service_version='default', service_id=self.my_service2)
"""
request_data = {'repository': repository_json, 'mode': mode, 'service_version': service_version, 'id': service_id}
url_suffix = '/services'
if kwargs:
url_suffix = url_suffix + '?' + urlencode(kwargs) # depends on [control=['if'], data=[]]
return self._call_rest_api('post', url_suffix, data=request_data, error='Failed to import service')
|
def LoadElement(href, only_etag=False):
"""
Return an instance of a element as a ElementCache dict
used as a cache.
:rtype ElementCache
"""
request = SMCRequest(href=href)
request.exception = FetchElementFailed
result = request.read()
if only_etag:
return result.etag
return ElementCache(
result.json, etag=result.etag)
|
def function[LoadElement, parameter[href, only_etag]]:
constant[
Return an instance of a element as a ElementCache dict
used as a cache.
:rtype ElementCache
]
variable[request] assign[=] call[name[SMCRequest], parameter[]]
name[request].exception assign[=] name[FetchElementFailed]
variable[result] assign[=] call[name[request].read, parameter[]]
if name[only_etag] begin[:]
return[name[result].etag]
return[call[name[ElementCache], parameter[name[result].json]]]
|
keyword[def] identifier[LoadElement] ( identifier[href] , identifier[only_etag] = keyword[False] ):
literal[string]
identifier[request] = identifier[SMCRequest] ( identifier[href] = identifier[href] )
identifier[request] . identifier[exception] = identifier[FetchElementFailed]
identifier[result] = identifier[request] . identifier[read] ()
keyword[if] identifier[only_etag] :
keyword[return] identifier[result] . identifier[etag]
keyword[return] identifier[ElementCache] (
identifier[result] . identifier[json] , identifier[etag] = identifier[result] . identifier[etag] )
|
def LoadElement(href, only_etag=False):
"""
Return an instance of a element as a ElementCache dict
used as a cache.
:rtype ElementCache
"""
request = SMCRequest(href=href)
request.exception = FetchElementFailed
result = request.read()
if only_etag:
return result.etag # depends on [control=['if'], data=[]]
return ElementCache(result.json, etag=result.etag)
|
def _deprecated(func):
"""A decorator that warns about deprecation when the passed-in function is
invoked."""
@wraps(func)
def with_warning(*args, **kwargs):
warnings.warn(
('The %s method is deprecated and will be removed in v2.*.*' %
func.__name__),
DeprecationWarning
)
return func(*args, **kwargs)
return with_warning
|
def function[_deprecated, parameter[func]]:
constant[A decorator that warns about deprecation when the passed-in function is
invoked.]
def function[with_warning, parameter[]]:
call[name[warnings].warn, parameter[binary_operation[constant[The %s method is deprecated and will be removed in v2.*.*] <ast.Mod object at 0x7da2590d6920> name[func].__name__], name[DeprecationWarning]]]
return[call[name[func], parameter[<ast.Starred object at 0x7da2044c3670>]]]
return[name[with_warning]]
|
keyword[def] identifier[_deprecated] ( identifier[func] ):
literal[string]
@ identifier[wraps] ( identifier[func] )
keyword[def] identifier[with_warning] (* identifier[args] ,** identifier[kwargs] ):
identifier[warnings] . identifier[warn] (
( literal[string] %
identifier[func] . identifier[__name__] ),
identifier[DeprecationWarning]
)
keyword[return] identifier[func] (* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[with_warning]
|
def _deprecated(func):
"""A decorator that warns about deprecation when the passed-in function is
invoked."""
@wraps(func)
def with_warning(*args, **kwargs):
warnings.warn('The %s method is deprecated and will be removed in v2.*.*' % func.__name__, DeprecationWarning)
return func(*args, **kwargs)
return with_warning
|
def connection_with_anon(credentials, anon=True):
"""
Connect to S3 with automatic handling for anonymous access.
Parameters
----------
credentials : dict
AWS access key ('access') and secret access key ('secret')
anon : boolean, optional, default = True
Whether to make an anonymous connection if credentials fail to authenticate
"""
from boto.s3.connection import S3Connection
from boto.exception import NoAuthHandlerFound
try:
conn = S3Connection(aws_access_key_id=credentials['access'],
aws_secret_access_key=credentials['secret'])
return conn
except NoAuthHandlerFound:
if anon:
conn = S3Connection(anon=True)
return conn
else:
raise
|
def function[connection_with_anon, parameter[credentials, anon]]:
constant[
Connect to S3 with automatic handling for anonymous access.
Parameters
----------
credentials : dict
AWS access key ('access') and secret access key ('secret')
anon : boolean, optional, default = True
Whether to make an anonymous connection if credentials fail to authenticate
]
from relative_module[boto.s3.connection] import module[S3Connection]
from relative_module[boto.exception] import module[NoAuthHandlerFound]
<ast.Try object at 0x7da18f09cb20>
|
keyword[def] identifier[connection_with_anon] ( identifier[credentials] , identifier[anon] = keyword[True] ):
literal[string]
keyword[from] identifier[boto] . identifier[s3] . identifier[connection] keyword[import] identifier[S3Connection]
keyword[from] identifier[boto] . identifier[exception] keyword[import] identifier[NoAuthHandlerFound]
keyword[try] :
identifier[conn] = identifier[S3Connection] ( identifier[aws_access_key_id] = identifier[credentials] [ literal[string] ],
identifier[aws_secret_access_key] = identifier[credentials] [ literal[string] ])
keyword[return] identifier[conn]
keyword[except] identifier[NoAuthHandlerFound] :
keyword[if] identifier[anon] :
identifier[conn] = identifier[S3Connection] ( identifier[anon] = keyword[True] )
keyword[return] identifier[conn]
keyword[else] :
keyword[raise]
|
def connection_with_anon(credentials, anon=True):
"""
Connect to S3 with automatic handling for anonymous access.
Parameters
----------
credentials : dict
AWS access key ('access') and secret access key ('secret')
anon : boolean, optional, default = True
Whether to make an anonymous connection if credentials fail to authenticate
"""
from boto.s3.connection import S3Connection
from boto.exception import NoAuthHandlerFound
try:
conn = S3Connection(aws_access_key_id=credentials['access'], aws_secret_access_key=credentials['secret'])
return conn # depends on [control=['try'], data=[]]
except NoAuthHandlerFound:
if anon:
conn = S3Connection(anon=True)
return conn # depends on [control=['if'], data=[]]
else:
raise # depends on [control=['except'], data=[]]
|
def unindent(self):
"""
Unindents the document text under cursor.
:return: Method success.
:rtype: bool
"""
cursor = self.textCursor()
if not cursor.hasSelection():
cursor.movePosition(QTextCursor.StartOfBlock)
line = foundations.strings.to_string(self.document().findBlockByNumber(cursor.blockNumber()).text())
indent_marker = re.match(r"({0})".format(self.__indent_marker), line)
if indent_marker:
foundations.common.repeat(cursor.deleteChar, len(indent_marker.group(1)))
else:
block = self.document().findBlock(cursor.selectionStart())
while True:
block_cursor = self.textCursor()
block_cursor.setPosition(block.position())
indent_marker = re.match(r"({0})".format(self.__indent_marker), block.text())
if indent_marker:
foundations.common.repeat(block_cursor.deleteChar, len(indent_marker.group(1)))
if block.contains(cursor.selectionEnd()):
break
block = block.next()
return True
|
def function[unindent, parameter[self]]:
constant[
Unindents the document text under cursor.
:return: Method success.
:rtype: bool
]
variable[cursor] assign[=] call[name[self].textCursor, parameter[]]
if <ast.UnaryOp object at 0x7da1b09e8640> begin[:]
call[name[cursor].movePosition, parameter[name[QTextCursor].StartOfBlock]]
variable[line] assign[=] call[name[foundations].strings.to_string, parameter[call[call[call[name[self].document, parameter[]].findBlockByNumber, parameter[call[name[cursor].blockNumber, parameter[]]]].text, parameter[]]]]
variable[indent_marker] assign[=] call[name[re].match, parameter[call[constant[({0})].format, parameter[name[self].__indent_marker]], name[line]]]
if name[indent_marker] begin[:]
call[name[foundations].common.repeat, parameter[name[cursor].deleteChar, call[name[len], parameter[call[name[indent_marker].group, parameter[constant[1]]]]]]]
return[constant[True]]
|
keyword[def] identifier[unindent] ( identifier[self] ):
literal[string]
identifier[cursor] = identifier[self] . identifier[textCursor] ()
keyword[if] keyword[not] identifier[cursor] . identifier[hasSelection] ():
identifier[cursor] . identifier[movePosition] ( identifier[QTextCursor] . identifier[StartOfBlock] )
identifier[line] = identifier[foundations] . identifier[strings] . identifier[to_string] ( identifier[self] . identifier[document] (). identifier[findBlockByNumber] ( identifier[cursor] . identifier[blockNumber] ()). identifier[text] ())
identifier[indent_marker] = identifier[re] . identifier[match] ( literal[string] . identifier[format] ( identifier[self] . identifier[__indent_marker] ), identifier[line] )
keyword[if] identifier[indent_marker] :
identifier[foundations] . identifier[common] . identifier[repeat] ( identifier[cursor] . identifier[deleteChar] , identifier[len] ( identifier[indent_marker] . identifier[group] ( literal[int] )))
keyword[else] :
identifier[block] = identifier[self] . identifier[document] (). identifier[findBlock] ( identifier[cursor] . identifier[selectionStart] ())
keyword[while] keyword[True] :
identifier[block_cursor] = identifier[self] . identifier[textCursor] ()
identifier[block_cursor] . identifier[setPosition] ( identifier[block] . identifier[position] ())
identifier[indent_marker] = identifier[re] . identifier[match] ( literal[string] . identifier[format] ( identifier[self] . identifier[__indent_marker] ), identifier[block] . identifier[text] ())
keyword[if] identifier[indent_marker] :
identifier[foundations] . identifier[common] . identifier[repeat] ( identifier[block_cursor] . identifier[deleteChar] , identifier[len] ( identifier[indent_marker] . identifier[group] ( literal[int] )))
keyword[if] identifier[block] . identifier[contains] ( identifier[cursor] . identifier[selectionEnd] ()):
keyword[break]
identifier[block] = identifier[block] . identifier[next] ()
keyword[return] keyword[True]
|
def unindent(self):
"""
Unindents the document text under cursor.
:return: Method success.
:rtype: bool
"""
cursor = self.textCursor()
if not cursor.hasSelection():
cursor.movePosition(QTextCursor.StartOfBlock)
line = foundations.strings.to_string(self.document().findBlockByNumber(cursor.blockNumber()).text())
indent_marker = re.match('({0})'.format(self.__indent_marker), line)
if indent_marker:
foundations.common.repeat(cursor.deleteChar, len(indent_marker.group(1))) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
block = self.document().findBlock(cursor.selectionStart())
while True:
block_cursor = self.textCursor()
block_cursor.setPosition(block.position())
indent_marker = re.match('({0})'.format(self.__indent_marker), block.text())
if indent_marker:
foundations.common.repeat(block_cursor.deleteChar, len(indent_marker.group(1))) # depends on [control=['if'], data=[]]
if block.contains(cursor.selectionEnd()):
break # depends on [control=['if'], data=[]]
block = block.next() # depends on [control=['while'], data=[]]
return True
|
def create_directory(self, path, mode=777):
"""
Creates a directory on the remote system.
:param path: full path to the remote directory to create
:type path: str
:param mode: int representation of octal mode for directory
"""
conn = self.get_conn()
conn.mkdir(path, mode)
|
def function[create_directory, parameter[self, path, mode]]:
constant[
Creates a directory on the remote system.
:param path: full path to the remote directory to create
:type path: str
:param mode: int representation of octal mode for directory
]
variable[conn] assign[=] call[name[self].get_conn, parameter[]]
call[name[conn].mkdir, parameter[name[path], name[mode]]]
|
keyword[def] identifier[create_directory] ( identifier[self] , identifier[path] , identifier[mode] = literal[int] ):
literal[string]
identifier[conn] = identifier[self] . identifier[get_conn] ()
identifier[conn] . identifier[mkdir] ( identifier[path] , identifier[mode] )
|
def create_directory(self, path, mode=777):
"""
Creates a directory on the remote system.
:param path: full path to the remote directory to create
:type path: str
:param mode: int representation of octal mode for directory
"""
conn = self.get_conn()
conn.mkdir(path, mode)
|
def initLogger(obj):
"""
Helper function to create a logger object for the current object with
the standard Numenta prefix.
:param obj: (object) to add a logger to
"""
if inspect.isclass(obj):
myClass = obj
else:
myClass = obj.__class__
logger = logging.getLogger(".".join(
['com.numenta', myClass.__module__, myClass.__name__]))
return logger
|
def function[initLogger, parameter[obj]]:
constant[
Helper function to create a logger object for the current object with
the standard Numenta prefix.
:param obj: (object) to add a logger to
]
if call[name[inspect].isclass, parameter[name[obj]]] begin[:]
variable[myClass] assign[=] name[obj]
variable[logger] assign[=] call[name[logging].getLogger, parameter[call[constant[.].join, parameter[list[[<ast.Constant object at 0x7da20e9b1a20>, <ast.Attribute object at 0x7da20e9b28c0>, <ast.Attribute object at 0x7da20e9b3a30>]]]]]]
return[name[logger]]
|
keyword[def] identifier[initLogger] ( identifier[obj] ):
literal[string]
keyword[if] identifier[inspect] . identifier[isclass] ( identifier[obj] ):
identifier[myClass] = identifier[obj]
keyword[else] :
identifier[myClass] = identifier[obj] . identifier[__class__]
identifier[logger] = identifier[logging] . identifier[getLogger] ( literal[string] . identifier[join] (
[ literal[string] , identifier[myClass] . identifier[__module__] , identifier[myClass] . identifier[__name__] ]))
keyword[return] identifier[logger]
|
def initLogger(obj):
"""
Helper function to create a logger object for the current object with
the standard Numenta prefix.
:param obj: (object) to add a logger to
"""
if inspect.isclass(obj):
myClass = obj # depends on [control=['if'], data=[]]
else:
myClass = obj.__class__
logger = logging.getLogger('.'.join(['com.numenta', myClass.__module__, myClass.__name__]))
return logger
|
def observe_reward_value(self, state_arr, action_arr):
'''
Compute the reward value.
Args:
state_arr: `np.ndarray` of state.
action_arr: `np.ndarray` of action.
Returns:
Reward value.
'''
if self.__check_goal_flag(action_arr) is True:
return 1.0
else:
x, y = np.where(action_arr[-1] == 1)
x, y = x[0], y[0]
goal_x, goal_y = self.__goal_pos
if x == goal_x and y == goal_y:
distance = 0.0
else:
distance = np.sqrt(((x - goal_x) ** 2) + (y - goal_y) ** 2)
if (x, y) in self.__route_long_memory_list:
repeating_penalty = self.__repeating_penalty
else:
repeating_penalty = 0.0
return 1.0 - distance - repeating_penalty
|
def function[observe_reward_value, parameter[self, state_arr, action_arr]]:
constant[
Compute the reward value.
Args:
state_arr: `np.ndarray` of state.
action_arr: `np.ndarray` of action.
Returns:
Reward value.
]
if compare[call[name[self].__check_goal_flag, parameter[name[action_arr]]] is constant[True]] begin[:]
return[constant[1.0]]
|
keyword[def] identifier[observe_reward_value] ( identifier[self] , identifier[state_arr] , identifier[action_arr] ):
literal[string]
keyword[if] identifier[self] . identifier[__check_goal_flag] ( identifier[action_arr] ) keyword[is] keyword[True] :
keyword[return] literal[int]
keyword[else] :
identifier[x] , identifier[y] = identifier[np] . identifier[where] ( identifier[action_arr] [- literal[int] ]== literal[int] )
identifier[x] , identifier[y] = identifier[x] [ literal[int] ], identifier[y] [ literal[int] ]
identifier[goal_x] , identifier[goal_y] = identifier[self] . identifier[__goal_pos]
keyword[if] identifier[x] == identifier[goal_x] keyword[and] identifier[y] == identifier[goal_y] :
identifier[distance] = literal[int]
keyword[else] :
identifier[distance] = identifier[np] . identifier[sqrt] ((( identifier[x] - identifier[goal_x] )** literal[int] )+( identifier[y] - identifier[goal_y] )** literal[int] )
keyword[if] ( identifier[x] , identifier[y] ) keyword[in] identifier[self] . identifier[__route_long_memory_list] :
identifier[repeating_penalty] = identifier[self] . identifier[__repeating_penalty]
keyword[else] :
identifier[repeating_penalty] = literal[int]
keyword[return] literal[int] - identifier[distance] - identifier[repeating_penalty]
|
def observe_reward_value(self, state_arr, action_arr):
"""
Compute the reward value.
Args:
state_arr: `np.ndarray` of state.
action_arr: `np.ndarray` of action.
Returns:
Reward value.
"""
if self.__check_goal_flag(action_arr) is True:
return 1.0 # depends on [control=['if'], data=[]]
else:
(x, y) = np.where(action_arr[-1] == 1)
(x, y) = (x[0], y[0])
(goal_x, goal_y) = self.__goal_pos
if x == goal_x and y == goal_y:
distance = 0.0 # depends on [control=['if'], data=[]]
else:
distance = np.sqrt((x - goal_x) ** 2 + (y - goal_y) ** 2)
if (x, y) in self.__route_long_memory_list:
repeating_penalty = self.__repeating_penalty # depends on [control=['if'], data=[]]
else:
repeating_penalty = 0.0
return 1.0 - distance - repeating_penalty
|
def wrap_cell(entity, json_obj, mapping, table_view=False):
'''
Cell wrappers
for customizing the GUI data table
TODO : must coincide with hierarchy!
TODO : simplify this!
'''
html_class = '' # for GUI javascript
out = ''
#if 'cell_wrapper' in entity: # TODO : this bound type was defined by apps only
# out = entity['cell_wrapper'](json_obj)
#else:
if entity['multiple']:
out = ", ".join( map(lambda x: num2name(x, entity, mapping), json_obj.get(entity['source'], [])) )
elif entity['is_chem_formula']:
out = html_formula(json_obj[ entity['source'] ]) if entity['source'] in json_obj else '—'
elif entity['source'] == 'bandgap':
html_class = ' class=_g'
out = json_obj.get('bandgap')
if out is None: out = '—'
# dynamic determination below:
elif entity['source'] == 'energy':
html_class = ' class=_e'
out = "%6.5f" % json_obj['energy'] if json_obj['energy'] else '—'
elif entity['source'] == 'dims':
out = "%4.2f" % json_obj['dims'] if json_obj['periodicity'] in [2, 3] else '—'
else:
out = num2name(json_obj.get(entity['source']), entity, mapping) or '—'
if table_view:
return '<td rel=' + str(entity['cid']) + html_class + '>' + str(out) + '</td>'
elif html_class:
return '<span' + html_class + '>' + str(out) + '</span>'
return str(out)
|
def function[wrap_cell, parameter[entity, json_obj, mapping, table_view]]:
constant[
Cell wrappers
for customizing the GUI data table
TODO : must coincide with hierarchy!
TODO : simplify this!
]
variable[html_class] assign[=] constant[]
variable[out] assign[=] constant[]
if call[name[entity]][constant[multiple]] begin[:]
variable[out] assign[=] call[constant[, ].join, parameter[call[name[map], parameter[<ast.Lambda object at 0x7da1b190ba60>, call[name[json_obj].get, parameter[call[name[entity]][constant[source]], list[[]]]]]]]]
if name[table_view] begin[:]
return[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[<td rel=] + call[name[str], parameter[call[name[entity]][constant[cid]]]]] + name[html_class]] + constant[>]] + call[name[str], parameter[name[out]]]] + constant[</td>]]]
return[call[name[str], parameter[name[out]]]]
|
keyword[def] identifier[wrap_cell] ( identifier[entity] , identifier[json_obj] , identifier[mapping] , identifier[table_view] = keyword[False] ):
literal[string]
identifier[html_class] = literal[string]
identifier[out] = literal[string]
keyword[if] identifier[entity] [ literal[string] ]:
identifier[out] = literal[string] . identifier[join] ( identifier[map] ( keyword[lambda] identifier[x] : identifier[num2name] ( identifier[x] , identifier[entity] , identifier[mapping] ), identifier[json_obj] . identifier[get] ( identifier[entity] [ literal[string] ],[])))
keyword[elif] identifier[entity] [ literal[string] ]:
identifier[out] = identifier[html_formula] ( identifier[json_obj] [ identifier[entity] [ literal[string] ]]) keyword[if] identifier[entity] [ literal[string] ] keyword[in] identifier[json_obj] keyword[else] literal[string]
keyword[elif] identifier[entity] [ literal[string] ]== literal[string] :
identifier[html_class] = literal[string]
identifier[out] = identifier[json_obj] . identifier[get] ( literal[string] )
keyword[if] identifier[out] keyword[is] keyword[None] : identifier[out] = literal[string]
keyword[elif] identifier[entity] [ literal[string] ]== literal[string] :
identifier[html_class] = literal[string]
identifier[out] = literal[string] % identifier[json_obj] [ literal[string] ] keyword[if] identifier[json_obj] [ literal[string] ] keyword[else] literal[string]
keyword[elif] identifier[entity] [ literal[string] ]== literal[string] :
identifier[out] = literal[string] % identifier[json_obj] [ literal[string] ] keyword[if] identifier[json_obj] [ literal[string] ] keyword[in] [ literal[int] , literal[int] ] keyword[else] literal[string]
keyword[else] :
identifier[out] = identifier[num2name] ( identifier[json_obj] . identifier[get] ( identifier[entity] [ literal[string] ]), identifier[entity] , identifier[mapping] ) keyword[or] literal[string]
keyword[if] identifier[table_view] :
keyword[return] literal[string] + identifier[str] ( identifier[entity] [ literal[string] ])+ identifier[html_class] + literal[string] + identifier[str] ( identifier[out] )+ literal[string]
keyword[elif] identifier[html_class] :
keyword[return] literal[string] + identifier[html_class] + literal[string] + identifier[str] ( identifier[out] )+ literal[string]
keyword[return] identifier[str] ( identifier[out] )
|
def wrap_cell(entity, json_obj, mapping, table_view=False):
"""
Cell wrappers
for customizing the GUI data table
TODO : must coincide with hierarchy!
TODO : simplify this!
"""
html_class = '' # for GUI javascript
out = ''
#if 'cell_wrapper' in entity: # TODO : this bound type was defined by apps only
# out = entity['cell_wrapper'](json_obj)
#else:
if entity['multiple']:
out = ', '.join(map(lambda x: num2name(x, entity, mapping), json_obj.get(entity['source'], []))) # depends on [control=['if'], data=[]]
elif entity['is_chem_formula']:
out = html_formula(json_obj[entity['source']]) if entity['source'] in json_obj else '—' # depends on [control=['if'], data=[]]
elif entity['source'] == 'bandgap':
html_class = ' class=_g'
out = json_obj.get('bandgap')
if out is None:
out = '—' # depends on [control=['if'], data=['out']] # depends on [control=['if'], data=[]]
# dynamic determination below:
elif entity['source'] == 'energy':
html_class = ' class=_e'
out = '%6.5f' % json_obj['energy'] if json_obj['energy'] else '—' # depends on [control=['if'], data=[]]
elif entity['source'] == 'dims':
out = '%4.2f' % json_obj['dims'] if json_obj['periodicity'] in [2, 3] else '—' # depends on [control=['if'], data=[]]
else:
out = num2name(json_obj.get(entity['source']), entity, mapping) or '—'
if table_view:
return '<td rel=' + str(entity['cid']) + html_class + '>' + str(out) + '</td>' # depends on [control=['if'], data=[]]
elif html_class:
return '<span' + html_class + '>' + str(out) + '</span>' # depends on [control=['if'], data=[]]
return str(out)
|
def startswith(self, event_property, value):
"""A starts-with filter chain.
>>> request_time = EventExpression('request', 'elapsed_ms')
>>> filtered = request_time.startswith('path', '/cube')
>>> print(filtered)
request(elapsed_ms).re(path, "^/cube")
"""
c = self.copy()
c.filters.append(filters.RE(event_property, "^{value}".format(
value=value)))
return c
|
def function[startswith, parameter[self, event_property, value]]:
constant[A starts-with filter chain.
>>> request_time = EventExpression('request', 'elapsed_ms')
>>> filtered = request_time.startswith('path', '/cube')
>>> print(filtered)
request(elapsed_ms).re(path, "^/cube")
]
variable[c] assign[=] call[name[self].copy, parameter[]]
call[name[c].filters.append, parameter[call[name[filters].RE, parameter[name[event_property], call[constant[^{value}].format, parameter[]]]]]]
return[name[c]]
|
keyword[def] identifier[startswith] ( identifier[self] , identifier[event_property] , identifier[value] ):
literal[string]
identifier[c] = identifier[self] . identifier[copy] ()
identifier[c] . identifier[filters] . identifier[append] ( identifier[filters] . identifier[RE] ( identifier[event_property] , literal[string] . identifier[format] (
identifier[value] = identifier[value] )))
keyword[return] identifier[c]
|
def startswith(self, event_property, value):
"""A starts-with filter chain.
>>> request_time = EventExpression('request', 'elapsed_ms')
>>> filtered = request_time.startswith('path', '/cube')
>>> print(filtered)
request(elapsed_ms).re(path, "^/cube")
"""
c = self.copy()
c.filters.append(filters.RE(event_property, '^{value}'.format(value=value)))
return c
|
def add_pin_at_xy(self, x, y, text, location='above right',
relative_position=.9, use_arrow=True, style=None):
"""Add pin at x, y location.
:param x: array, list or float, specifying the location of the
pin.
:param y: array, list or float, specifying the location of the
pin.
:param text: the text of the pin label.
:param location: the location of the pin relative to the data
point. Any location accepted by TikZ is allowed.
:param relative_position: location of the data point as a relative
number between 0 and 1.
:param use_arrow: specifies whether to draw an arrow between the
data point and the pin label text.
:type use_arrow: boolean
:param style: optional TikZ styles to apply (e.g. 'red').
If x, y are arrays or lists, relative position is used to pick a
point from the arrays. A relative position of 0.0 will be the
first point from the series, while 1.0 will be the last point.
"""
if relative_position is None:
if location == 'left':
relative_position = 0.
elif location == 'right':
relative_position = 1.
else:
relative_position = .8
x, y = self._calc_position_for_pin(x, y, relative_position)
self.pin_list.append({'x': x, 'y': y, 'text': text,
'location': location,
'use_arrow': use_arrow,
'options': style})
|
def function[add_pin_at_xy, parameter[self, x, y, text, location, relative_position, use_arrow, style]]:
constant[Add pin at x, y location.
:param x: array, list or float, specifying the location of the
pin.
:param y: array, list or float, specifying the location of the
pin.
:param text: the text of the pin label.
:param location: the location of the pin relative to the data
point. Any location accepted by TikZ is allowed.
:param relative_position: location of the data point as a relative
number between 0 and 1.
:param use_arrow: specifies whether to draw an arrow between the
data point and the pin label text.
:type use_arrow: boolean
:param style: optional TikZ styles to apply (e.g. 'red').
If x, y are arrays or lists, relative position is used to pick a
point from the arrays. A relative position of 0.0 will be the
first point from the series, while 1.0 will be the last point.
]
if compare[name[relative_position] is constant[None]] begin[:]
if compare[name[location] equal[==] constant[left]] begin[:]
variable[relative_position] assign[=] constant[0.0]
<ast.Tuple object at 0x7da1b2298580> assign[=] call[name[self]._calc_position_for_pin, parameter[name[x], name[y], name[relative_position]]]
call[name[self].pin_list.append, parameter[dictionary[[<ast.Constant object at 0x7da1b229a5c0>, <ast.Constant object at 0x7da1b229a200>, <ast.Constant object at 0x7da1b229a980>, <ast.Constant object at 0x7da1b229a5f0>, <ast.Constant object at 0x7da1b22997b0>, <ast.Constant object at 0x7da1b229a8f0>], [<ast.Name object at 0x7da1b229a770>, <ast.Name object at 0x7da1b229b550>, <ast.Name object at 0x7da1b2298c70>, <ast.Name object at 0x7da1b22982b0>, <ast.Name object at 0x7da1b229be80>, <ast.Name object at 0x7da1b2298fd0>]]]]
|
keyword[def] identifier[add_pin_at_xy] ( identifier[self] , identifier[x] , identifier[y] , identifier[text] , identifier[location] = literal[string] ,
identifier[relative_position] = literal[int] , identifier[use_arrow] = keyword[True] , identifier[style] = keyword[None] ):
literal[string]
keyword[if] identifier[relative_position] keyword[is] keyword[None] :
keyword[if] identifier[location] == literal[string] :
identifier[relative_position] = literal[int]
keyword[elif] identifier[location] == literal[string] :
identifier[relative_position] = literal[int]
keyword[else] :
identifier[relative_position] = literal[int]
identifier[x] , identifier[y] = identifier[self] . identifier[_calc_position_for_pin] ( identifier[x] , identifier[y] , identifier[relative_position] )
identifier[self] . identifier[pin_list] . identifier[append] ({ literal[string] : identifier[x] , literal[string] : identifier[y] , literal[string] : identifier[text] ,
literal[string] : identifier[location] ,
literal[string] : identifier[use_arrow] ,
literal[string] : identifier[style] })
|
def add_pin_at_xy(self, x, y, text, location='above right', relative_position=0.9, use_arrow=True, style=None):
"""Add pin at x, y location.
:param x: array, list or float, specifying the location of the
pin.
:param y: array, list or float, specifying the location of the
pin.
:param text: the text of the pin label.
:param location: the location of the pin relative to the data
point. Any location accepted by TikZ is allowed.
:param relative_position: location of the data point as a relative
number between 0 and 1.
:param use_arrow: specifies whether to draw an arrow between the
data point and the pin label text.
:type use_arrow: boolean
:param style: optional TikZ styles to apply (e.g. 'red').
If x, y are arrays or lists, relative position is used to pick a
point from the arrays. A relative position of 0.0 will be the
first point from the series, while 1.0 will be the last point.
"""
if relative_position is None:
if location == 'left':
relative_position = 0.0 # depends on [control=['if'], data=[]]
elif location == 'right':
relative_position = 1.0 # depends on [control=['if'], data=[]]
else:
relative_position = 0.8 # depends on [control=['if'], data=['relative_position']]
(x, y) = self._calc_position_for_pin(x, y, relative_position)
self.pin_list.append({'x': x, 'y': y, 'text': text, 'location': location, 'use_arrow': use_arrow, 'options': style})
|
def from_size(cls, data, width, height):
# type: (bytearray, int, int) -> ScreenShot
""" Instantiate a new class given only screen shot's data and size. """
monitor = {"left": 0, "top": 0, "width": width, "height": height}
return cls(data, monitor)
|
def function[from_size, parameter[cls, data, width, height]]:
constant[ Instantiate a new class given only screen shot's data and size. ]
variable[monitor] assign[=] dictionary[[<ast.Constant object at 0x7da1b08330a0>, <ast.Constant object at 0x7da1b0830220>, <ast.Constant object at 0x7da1b0832020>, <ast.Constant object at 0x7da1b07e2bf0>], [<ast.Constant object at 0x7da1b07e0fa0>, <ast.Constant object at 0x7da1b07e33d0>, <ast.Name object at 0x7da1b07e2230>, <ast.Name object at 0x7da1b07e3e80>]]
return[call[name[cls], parameter[name[data], name[monitor]]]]
|
keyword[def] identifier[from_size] ( identifier[cls] , identifier[data] , identifier[width] , identifier[height] ):
literal[string]
identifier[monitor] ={ literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : identifier[width] , literal[string] : identifier[height] }
keyword[return] identifier[cls] ( identifier[data] , identifier[monitor] )
|
def from_size(cls, data, width, height):
# type: (bytearray, int, int) -> ScreenShot
" Instantiate a new class given only screen shot's data and size. "
monitor = {'left': 0, 'top': 0, 'width': width, 'height': height}
return cls(data, monitor)
|
def plotFuncsDer(functions,bottom,top,N=1000,legend_kwds = None):
'''
Plots the first derivative of 1D function(s) over a given range.
Parameters
----------
function : function
A function or list of functions, the derivatives of which are to be plotted.
bottom : float
The lower limit of the domain to be plotted.
top : float
The upper limit of the domain to be plotted.
N : int
Number of points in the domain to evaluate.
legend_kwds: None, or dictionary
If not None, the keyword dictionary to pass to plt.legend
Returns
-------
none
'''
if type(functions)==list:
function_list = functions
else:
function_list = [functions]
step = (top-bottom)/N
for function in function_list:
x = np.arange(bottom,top,step)
y = function.derivative(x)
plt.plot(x,y)
plt.xlim([bottom, top])
if legend_kwds is not None:
plt.legend(**legend_kwds)
plt.show()
|
def function[plotFuncsDer, parameter[functions, bottom, top, N, legend_kwds]]:
constant[
Plots the first derivative of 1D function(s) over a given range.
Parameters
----------
function : function
A function or list of functions, the derivatives of which are to be plotted.
bottom : float
The lower limit of the domain to be plotted.
top : float
The upper limit of the domain to be plotted.
N : int
Number of points in the domain to evaluate.
legend_kwds: None, or dictionary
If not None, the keyword dictionary to pass to plt.legend
Returns
-------
none
]
if compare[call[name[type], parameter[name[functions]]] equal[==] name[list]] begin[:]
variable[function_list] assign[=] name[functions]
variable[step] assign[=] binary_operation[binary_operation[name[top] - name[bottom]] / name[N]]
for taget[name[function]] in starred[name[function_list]] begin[:]
variable[x] assign[=] call[name[np].arange, parameter[name[bottom], name[top], name[step]]]
variable[y] assign[=] call[name[function].derivative, parameter[name[x]]]
call[name[plt].plot, parameter[name[x], name[y]]]
call[name[plt].xlim, parameter[list[[<ast.Name object at 0x7da1b074f310>, <ast.Name object at 0x7da1b074f2e0>]]]]
if compare[name[legend_kwds] is_not constant[None]] begin[:]
call[name[plt].legend, parameter[]]
call[name[plt].show, parameter[]]
|
keyword[def] identifier[plotFuncsDer] ( identifier[functions] , identifier[bottom] , identifier[top] , identifier[N] = literal[int] , identifier[legend_kwds] = keyword[None] ):
literal[string]
keyword[if] identifier[type] ( identifier[functions] )== identifier[list] :
identifier[function_list] = identifier[functions]
keyword[else] :
identifier[function_list] =[ identifier[functions] ]
identifier[step] =( identifier[top] - identifier[bottom] )/ identifier[N]
keyword[for] identifier[function] keyword[in] identifier[function_list] :
identifier[x] = identifier[np] . identifier[arange] ( identifier[bottom] , identifier[top] , identifier[step] )
identifier[y] = identifier[function] . identifier[derivative] ( identifier[x] )
identifier[plt] . identifier[plot] ( identifier[x] , identifier[y] )
identifier[plt] . identifier[xlim] ([ identifier[bottom] , identifier[top] ])
keyword[if] identifier[legend_kwds] keyword[is] keyword[not] keyword[None] :
identifier[plt] . identifier[legend] (** identifier[legend_kwds] )
identifier[plt] . identifier[show] ()
|
def plotFuncsDer(functions, bottom, top, N=1000, legend_kwds=None):
"""
Plots the first derivative of 1D function(s) over a given range.
Parameters
----------
function : function
A function or list of functions, the derivatives of which are to be plotted.
bottom : float
The lower limit of the domain to be plotted.
top : float
The upper limit of the domain to be plotted.
N : int
Number of points in the domain to evaluate.
legend_kwds: None, or dictionary
If not None, the keyword dictionary to pass to plt.legend
Returns
-------
none
"""
if type(functions) == list:
function_list = functions # depends on [control=['if'], data=[]]
else:
function_list = [functions]
step = (top - bottom) / N
for function in function_list:
x = np.arange(bottom, top, step)
y = function.derivative(x)
plt.plot(x, y) # depends on [control=['for'], data=['function']]
plt.xlim([bottom, top])
if legend_kwds is not None:
plt.legend(**legend_kwds) # depends on [control=['if'], data=['legend_kwds']]
plt.show()
|
def configure_flair(self, subreddit, flair_enabled=False,
flair_position='right',
flair_self_assign=False,
link_flair_enabled=False,
link_flair_position='left',
link_flair_self_assign=False):
"""Configure the flair setting for the given subreddit.
:returns: The json response from the server.
"""
flair_enabled = 'on' if flair_enabled else 'off'
flair_self_assign = 'on' if flair_self_assign else 'off'
if not link_flair_enabled:
link_flair_position = ''
link_flair_self_assign = 'on' if link_flair_self_assign else 'off'
data = {'r': six.text_type(subreddit),
'flair_enabled': flair_enabled,
'flair_position': flair_position,
'flair_self_assign_enabled': flair_self_assign,
'link_flair_position': link_flair_position,
'link_flair_self_assign_enabled': link_flair_self_assign}
return self.request_json(self.config['flairconfig'], data=data)
|
def function[configure_flair, parameter[self, subreddit, flair_enabled, flair_position, flair_self_assign, link_flair_enabled, link_flair_position, link_flair_self_assign]]:
constant[Configure the flair setting for the given subreddit.
:returns: The json response from the server.
]
variable[flair_enabled] assign[=] <ast.IfExp object at 0x7da18f58ea40>
variable[flair_self_assign] assign[=] <ast.IfExp object at 0x7da18f58f520>
if <ast.UnaryOp object at 0x7da18f58c760> begin[:]
variable[link_flair_position] assign[=] constant[]
variable[link_flair_self_assign] assign[=] <ast.IfExp object at 0x7da18f58f6a0>
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da18f58ff70>, <ast.Constant object at 0x7da18f58dab0>, <ast.Constant object at 0x7da18f58c850>, <ast.Constant object at 0x7da18f58c7c0>, <ast.Constant object at 0x7da18f58e3e0>, <ast.Constant object at 0x7da18f58c520>], [<ast.Call object at 0x7da18f58fa90>, <ast.Name object at 0x7da18f58e560>, <ast.Name object at 0x7da18f58fd30>, <ast.Name object at 0x7da18f58cca0>, <ast.Name object at 0x7da18f58c190>, <ast.Name object at 0x7da18f58fc70>]]
return[call[name[self].request_json, parameter[call[name[self].config][constant[flairconfig]]]]]
|
keyword[def] identifier[configure_flair] ( identifier[self] , identifier[subreddit] , identifier[flair_enabled] = keyword[False] ,
identifier[flair_position] = literal[string] ,
identifier[flair_self_assign] = keyword[False] ,
identifier[link_flair_enabled] = keyword[False] ,
identifier[link_flair_position] = literal[string] ,
identifier[link_flair_self_assign] = keyword[False] ):
literal[string]
identifier[flair_enabled] = literal[string] keyword[if] identifier[flair_enabled] keyword[else] literal[string]
identifier[flair_self_assign] = literal[string] keyword[if] identifier[flair_self_assign] keyword[else] literal[string]
keyword[if] keyword[not] identifier[link_flair_enabled] :
identifier[link_flair_position] = literal[string]
identifier[link_flair_self_assign] = literal[string] keyword[if] identifier[link_flair_self_assign] keyword[else] literal[string]
identifier[data] ={ literal[string] : identifier[six] . identifier[text_type] ( identifier[subreddit] ),
literal[string] : identifier[flair_enabled] ,
literal[string] : identifier[flair_position] ,
literal[string] : identifier[flair_self_assign] ,
literal[string] : identifier[link_flair_position] ,
literal[string] : identifier[link_flair_self_assign] }
keyword[return] identifier[self] . identifier[request_json] ( identifier[self] . identifier[config] [ literal[string] ], identifier[data] = identifier[data] )
|
def configure_flair(self, subreddit, flair_enabled=False, flair_position='right', flair_self_assign=False, link_flair_enabled=False, link_flair_position='left', link_flair_self_assign=False):
"""Configure the flair setting for the given subreddit.
:returns: The json response from the server.
"""
flair_enabled = 'on' if flair_enabled else 'off'
flair_self_assign = 'on' if flair_self_assign else 'off'
if not link_flair_enabled:
link_flair_position = '' # depends on [control=['if'], data=[]]
link_flair_self_assign = 'on' if link_flair_self_assign else 'off'
data = {'r': six.text_type(subreddit), 'flair_enabled': flair_enabled, 'flair_position': flair_position, 'flair_self_assign_enabled': flair_self_assign, 'link_flair_position': link_flair_position, 'link_flair_self_assign_enabled': link_flair_self_assign}
return self.request_json(self.config['flairconfig'], data=data)
|
def fill_parameters(self, path, blocks, exclude_free_params=False, check_parameters=False):
"""
Load parameters from file to fill all blocks sequentially.
:type blocks: list of deepy.layers.Block
"""
if not os.path.exists(path):
raise Exception("model {} does not exist".format(path))
# Decide which parameters to load
normal_params = sum([nn.parameters for nn in blocks], [])
all_params = sum([nn.all_parameters for nn in blocks], [])
# Load parameters
if path.endswith(".gz"):
opener = gzip.open if path.lower().endswith('.gz') else open
handle = opener(path, 'rb')
saved_params = pickle.load(handle)
handle.close()
# Write parameters
if len(all_params) != len(saved_params):
logging.warning(
"parameters in the network: {}, parameters in the dumped model: {}".format(len(all_params),
len(saved_params)))
for target, source in zip(all_params, saved_params):
if not exclude_free_params or target not in normal_params:
target.set_value(source)
elif path.endswith(".npz"):
arrs = np.load(path)
# Write parameters
if len(all_params) != len(arrs.keys()):
logging.warning(
"parameters in the network: {}, parameters in the dumped model: {}".format(len(all_params),
len(arrs.keys())))
for target, idx in zip(all_params, range(len(arrs.keys()))):
if not exclude_free_params or target not in normal_params:
source = arrs['arr_%d' % idx]
target.set_value(source)
else:
raise Exception("File format of %s is not supported, use '.gz' or '.npz' or '.uncompressed.gz'" % path)
|
def function[fill_parameters, parameter[self, path, blocks, exclude_free_params, check_parameters]]:
constant[
Load parameters from file to fill all blocks sequentially.
:type blocks: list of deepy.layers.Block
]
if <ast.UnaryOp object at 0x7da1b26ae3e0> begin[:]
<ast.Raise object at 0x7da1b26ad2a0>
variable[normal_params] assign[=] call[name[sum], parameter[<ast.ListComp object at 0x7da1b26aed40>, list[[]]]]
variable[all_params] assign[=] call[name[sum], parameter[<ast.ListComp object at 0x7da1b0394820>, list[[]]]]
if call[name[path].endswith, parameter[constant[.gz]]] begin[:]
variable[opener] assign[=] <ast.IfExp object at 0x7da1b0396110>
variable[handle] assign[=] call[name[opener], parameter[name[path], constant[rb]]]
variable[saved_params] assign[=] call[name[pickle].load, parameter[name[handle]]]
call[name[handle].close, parameter[]]
if compare[call[name[len], parameter[name[all_params]]] not_equal[!=] call[name[len], parameter[name[saved_params]]]] begin[:]
call[name[logging].warning, parameter[call[constant[parameters in the network: {}, parameters in the dumped model: {}].format, parameter[call[name[len], parameter[name[all_params]]], call[name[len], parameter[name[saved_params]]]]]]]
for taget[tuple[[<ast.Name object at 0x7da1b26ae590>, <ast.Name object at 0x7da1b26ad2d0>]]] in starred[call[name[zip], parameter[name[all_params], name[saved_params]]]] begin[:]
if <ast.BoolOp object at 0x7da1b26afd00> begin[:]
call[name[target].set_value, parameter[name[source]]]
|
keyword[def] identifier[fill_parameters] ( identifier[self] , identifier[path] , identifier[blocks] , identifier[exclude_free_params] = keyword[False] , identifier[check_parameters] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[path] ):
keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[path] ))
identifier[normal_params] = identifier[sum] ([ identifier[nn] . identifier[parameters] keyword[for] identifier[nn] keyword[in] identifier[blocks] ],[])
identifier[all_params] = identifier[sum] ([ identifier[nn] . identifier[all_parameters] keyword[for] identifier[nn] keyword[in] identifier[blocks] ],[])
keyword[if] identifier[path] . identifier[endswith] ( literal[string] ):
identifier[opener] = identifier[gzip] . identifier[open] keyword[if] identifier[path] . identifier[lower] (). identifier[endswith] ( literal[string] ) keyword[else] identifier[open]
identifier[handle] = identifier[opener] ( identifier[path] , literal[string] )
identifier[saved_params] = identifier[pickle] . identifier[load] ( identifier[handle] )
identifier[handle] . identifier[close] ()
keyword[if] identifier[len] ( identifier[all_params] )!= identifier[len] ( identifier[saved_params] ):
identifier[logging] . identifier[warning] (
literal[string] . identifier[format] ( identifier[len] ( identifier[all_params] ),
identifier[len] ( identifier[saved_params] )))
keyword[for] identifier[target] , identifier[source] keyword[in] identifier[zip] ( identifier[all_params] , identifier[saved_params] ):
keyword[if] keyword[not] identifier[exclude_free_params] keyword[or] identifier[target] keyword[not] keyword[in] identifier[normal_params] :
identifier[target] . identifier[set_value] ( identifier[source] )
keyword[elif] identifier[path] . identifier[endswith] ( literal[string] ):
identifier[arrs] = identifier[np] . identifier[load] ( identifier[path] )
keyword[if] identifier[len] ( identifier[all_params] )!= identifier[len] ( identifier[arrs] . identifier[keys] ()):
identifier[logging] . identifier[warning] (
literal[string] . identifier[format] ( identifier[len] ( identifier[all_params] ),
identifier[len] ( identifier[arrs] . identifier[keys] ())))
keyword[for] identifier[target] , identifier[idx] keyword[in] identifier[zip] ( identifier[all_params] , identifier[range] ( identifier[len] ( identifier[arrs] . identifier[keys] ()))):
keyword[if] keyword[not] identifier[exclude_free_params] keyword[or] identifier[target] keyword[not] keyword[in] identifier[normal_params] :
identifier[source] = identifier[arrs] [ literal[string] % identifier[idx] ]
identifier[target] . identifier[set_value] ( identifier[source] )
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] % identifier[path] )
|
def fill_parameters(self, path, blocks, exclude_free_params=False, check_parameters=False):
"""
Load parameters from file to fill all blocks sequentially.
:type blocks: list of deepy.layers.Block
"""
if not os.path.exists(path):
raise Exception('model {} does not exist'.format(path)) # depends on [control=['if'], data=[]]
# Decide which parameters to load
normal_params = sum([nn.parameters for nn in blocks], [])
all_params = sum([nn.all_parameters for nn in blocks], [])
# Load parameters
if path.endswith('.gz'):
opener = gzip.open if path.lower().endswith('.gz') else open
handle = opener(path, 'rb')
saved_params = pickle.load(handle)
handle.close()
# Write parameters
if len(all_params) != len(saved_params):
logging.warning('parameters in the network: {}, parameters in the dumped model: {}'.format(len(all_params), len(saved_params))) # depends on [control=['if'], data=[]]
for (target, source) in zip(all_params, saved_params):
if not exclude_free_params or target not in normal_params:
target.set_value(source) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
elif path.endswith('.npz'):
arrs = np.load(path)
# Write parameters
if len(all_params) != len(arrs.keys()):
logging.warning('parameters in the network: {}, parameters in the dumped model: {}'.format(len(all_params), len(arrs.keys()))) # depends on [control=['if'], data=[]]
for (target, idx) in zip(all_params, range(len(arrs.keys()))):
if not exclude_free_params or target not in normal_params:
source = arrs['arr_%d' % idx]
target.set_value(source) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
raise Exception("File format of %s is not supported, use '.gz' or '.npz' or '.uncompressed.gz'" % path)
|
def total_seconds(td):
"""convert a timedelta to seconds.
This is patterned after timedelta.total_seconds, which is only
available in python 27.
Args:
td: a timedelta object.
Returns:
total seconds within a timedelta. Rounded up to seconds.
"""
secs = td.seconds + td.days * 24 * 3600
if td.microseconds:
secs += 1
return secs
|
def function[total_seconds, parameter[td]]:
constant[convert a timedelta to seconds.
This is patterned after timedelta.total_seconds, which is only
available in python 27.
Args:
td: a timedelta object.
Returns:
total seconds within a timedelta. Rounded up to seconds.
]
variable[secs] assign[=] binary_operation[name[td].seconds + binary_operation[binary_operation[name[td].days * constant[24]] * constant[3600]]]
if name[td].microseconds begin[:]
<ast.AugAssign object at 0x7da18eb57bb0>
return[name[secs]]
|
keyword[def] identifier[total_seconds] ( identifier[td] ):
literal[string]
identifier[secs] = identifier[td] . identifier[seconds] + identifier[td] . identifier[days] * literal[int] * literal[int]
keyword[if] identifier[td] . identifier[microseconds] :
identifier[secs] += literal[int]
keyword[return] identifier[secs]
|
def total_seconds(td):
"""convert a timedelta to seconds.
This is patterned after timedelta.total_seconds, which is only
available in python 27.
Args:
td: a timedelta object.
Returns:
total seconds within a timedelta. Rounded up to seconds.
"""
secs = td.seconds + td.days * 24 * 3600
if td.microseconds:
secs += 1 # depends on [control=['if'], data=[]]
return secs
|
def isometric_transfer(script, sourceMesh=0, targetMesh=1):
"""Isometric parameterization: transfer between meshes
Provide the layer numbers of the source and target meshes.
"""
filter_xml = ''.join([
' <filter name="Iso Parametrization transfer between meshes">\n',
' <Param name="sourceMesh"',
'value="%s"' % sourceMesh,
'description="Source Mesh"',
'type="RichMesh"',
'tooltip="The mesh already having an Isoparameterization"',
'/>\n',
' <Param name="targetMesh"',
'value="%s"' % targetMesh,
'description="Target Mesh"',
'type="RichMesh"',
'tooltip="The mesh to be Isoparameterized"',
'/>\n',
' </filter>\n'])
util.write_filter(script, filter_xml)
return None
|
def function[isometric_transfer, parameter[script, sourceMesh, targetMesh]]:
constant[Isometric parameterization: transfer between meshes
Provide the layer numbers of the source and target meshes.
]
variable[filter_xml] assign[=] call[constant[].join, parameter[list[[<ast.Constant object at 0x7da1b0294490>, <ast.Constant object at 0x7da1b0294520>, <ast.BinOp object at 0x7da1b0294820>, <ast.Constant object at 0x7da1b02969e0>, <ast.Constant object at 0x7da1b0295000>, <ast.Constant object at 0x7da1b0297b50>, <ast.Constant object at 0x7da1b0297f10>, <ast.Constant object at 0x7da1b0296920>, <ast.BinOp object at 0x7da1b02973d0>, <ast.Constant object at 0x7da1b0294280>, <ast.Constant object at 0x7da1b0294d60>, <ast.Constant object at 0x7da1b02950c0>, <ast.Constant object at 0x7da1b02941c0>, <ast.Constant object at 0x7da1b0295630>]]]]
call[name[util].write_filter, parameter[name[script], name[filter_xml]]]
return[constant[None]]
|
keyword[def] identifier[isometric_transfer] ( identifier[script] , identifier[sourceMesh] = literal[int] , identifier[targetMesh] = literal[int] ):
literal[string]
identifier[filter_xml] = literal[string] . identifier[join] ([
literal[string] ,
literal[string] ,
literal[string] % identifier[sourceMesh] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] % identifier[targetMesh] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ])
identifier[util] . identifier[write_filter] ( identifier[script] , identifier[filter_xml] )
keyword[return] keyword[None]
|
def isometric_transfer(script, sourceMesh=0, targetMesh=1):
"""Isometric parameterization: transfer between meshes
Provide the layer numbers of the source and target meshes.
"""
filter_xml = ''.join([' <filter name="Iso Parametrization transfer between meshes">\n', ' <Param name="sourceMesh"', 'value="%s"' % sourceMesh, 'description="Source Mesh"', 'type="RichMesh"', 'tooltip="The mesh already having an Isoparameterization"', '/>\n', ' <Param name="targetMesh"', 'value="%s"' % targetMesh, 'description="Target Mesh"', 'type="RichMesh"', 'tooltip="The mesh to be Isoparameterized"', '/>\n', ' </filter>\n'])
util.write_filter(script, filter_xml)
return None
|
def stop_sync(self):
""" Stops all the synchonization loop (sensor/effector controllers). """
if not self._syncing:
return
if self._primitive_manager.running:
self._primitive_manager.stop()
[c.stop() for c in self._controllers]
[s.close() for s in self.sensors if hasattr(s, 'close')]
self._syncing = False
logger.info('Stopping robot synchronization.')
|
def function[stop_sync, parameter[self]]:
constant[ Stops all the synchonization loop (sensor/effector controllers). ]
if <ast.UnaryOp object at 0x7da1b15d2c50> begin[:]
return[None]
if name[self]._primitive_manager.running begin[:]
call[name[self]._primitive_manager.stop, parameter[]]
<ast.ListComp object at 0x7da1b15d30a0>
<ast.ListComp object at 0x7da1b15d3160>
name[self]._syncing assign[=] constant[False]
call[name[logger].info, parameter[constant[Stopping robot synchronization.]]]
|
keyword[def] identifier[stop_sync] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_syncing] :
keyword[return]
keyword[if] identifier[self] . identifier[_primitive_manager] . identifier[running] :
identifier[self] . identifier[_primitive_manager] . identifier[stop] ()
[ identifier[c] . identifier[stop] () keyword[for] identifier[c] keyword[in] identifier[self] . identifier[_controllers] ]
[ identifier[s] . identifier[close] () keyword[for] identifier[s] keyword[in] identifier[self] . identifier[sensors] keyword[if] identifier[hasattr] ( identifier[s] , literal[string] )]
identifier[self] . identifier[_syncing] = keyword[False]
identifier[logger] . identifier[info] ( literal[string] )
|
def stop_sync(self):
""" Stops all the synchonization loop (sensor/effector controllers). """
if not self._syncing:
return # depends on [control=['if'], data=[]]
if self._primitive_manager.running:
self._primitive_manager.stop() # depends on [control=['if'], data=[]]
[c.stop() for c in self._controllers]
[s.close() for s in self.sensors if hasattr(s, 'close')]
self._syncing = False
logger.info('Stopping robot synchronization.')
|
def sext(self, num):
"""Sign-extend this farray by *num* bits.
Returns a new farray.
"""
sign = self._items[-1]
return self.__class__(self._items + [sign] * num, ftype=self.ftype)
|
def function[sext, parameter[self, num]]:
constant[Sign-extend this farray by *num* bits.
Returns a new farray.
]
variable[sign] assign[=] call[name[self]._items][<ast.UnaryOp object at 0x7da1b0e16ef0>]
return[call[name[self].__class__, parameter[binary_operation[name[self]._items + binary_operation[list[[<ast.Name object at 0x7da1b0e15480>]] * name[num]]]]]]
|
keyword[def] identifier[sext] ( identifier[self] , identifier[num] ):
literal[string]
identifier[sign] = identifier[self] . identifier[_items] [- literal[int] ]
keyword[return] identifier[self] . identifier[__class__] ( identifier[self] . identifier[_items] +[ identifier[sign] ]* identifier[num] , identifier[ftype] = identifier[self] . identifier[ftype] )
|
def sext(self, num):
"""Sign-extend this farray by *num* bits.
Returns a new farray.
"""
sign = self._items[-1]
return self.__class__(self._items + [sign] * num, ftype=self.ftype)
|
def is_path(value):
"""
Checks whether the given value represents a path, i.e. a string which starts with an indicator for absolute or
relative paths.
:param value: Value to check.
:return: ``True``, if the value appears to be representing a path.
:rtype: bool
"""
return value and isinstance(value, six.string_types) and (value[0] == posixpath.sep or value[:2] == CURRENT_DIR)
|
def function[is_path, parameter[value]]:
constant[
Checks whether the given value represents a path, i.e. a string which starts with an indicator for absolute or
relative paths.
:param value: Value to check.
:return: ``True``, if the value appears to be representing a path.
:rtype: bool
]
return[<ast.BoolOp object at 0x7da20c7cb400>]
|
keyword[def] identifier[is_path] ( identifier[value] ):
literal[string]
keyword[return] identifier[value] keyword[and] identifier[isinstance] ( identifier[value] , identifier[six] . identifier[string_types] ) keyword[and] ( identifier[value] [ literal[int] ]== identifier[posixpath] . identifier[sep] keyword[or] identifier[value] [: literal[int] ]== identifier[CURRENT_DIR] )
|
def is_path(value):
"""
Checks whether the given value represents a path, i.e. a string which starts with an indicator for absolute or
relative paths.
:param value: Value to check.
:return: ``True``, if the value appears to be representing a path.
:rtype: bool
"""
return value and isinstance(value, six.string_types) and (value[0] == posixpath.sep or value[:2] == CURRENT_DIR)
|
def _load_aux_image(self, image, auxfile):
"""
Load a fits file (bkg/rms/curve) and make sure that
it is the same shape as the main image.
Parameters
----------
image : :class:`AegeanTools.fits_image.FitsImage`
The main image that has already been loaded.
auxfile : str or HDUList
The auxiliary file to be loaded.
Returns
-------
aux : :class:`AegeanTools.fits_image.FitsImage`
The loaded image.
"""
auximg = FitsImage(auxfile, beam=self.global_data.beam).get_pixels()
if auximg.shape != image.get_pixels().shape:
self.log.error("file {0} is not the same size as the image map".format(auxfile))
self.log.error("{0}= {1}, image = {2}".format(auxfile, auximg.shape, image.get_pixels().shape))
sys.exit(1)
return auximg
|
def function[_load_aux_image, parameter[self, image, auxfile]]:
constant[
Load a fits file (bkg/rms/curve) and make sure that
it is the same shape as the main image.
Parameters
----------
image : :class:`AegeanTools.fits_image.FitsImage`
The main image that has already been loaded.
auxfile : str or HDUList
The auxiliary file to be loaded.
Returns
-------
aux : :class:`AegeanTools.fits_image.FitsImage`
The loaded image.
]
variable[auximg] assign[=] call[call[name[FitsImage], parameter[name[auxfile]]].get_pixels, parameter[]]
if compare[name[auximg].shape not_equal[!=] call[name[image].get_pixels, parameter[]].shape] begin[:]
call[name[self].log.error, parameter[call[constant[file {0} is not the same size as the image map].format, parameter[name[auxfile]]]]]
call[name[self].log.error, parameter[call[constant[{0}= {1}, image = {2}].format, parameter[name[auxfile], name[auximg].shape, call[name[image].get_pixels, parameter[]].shape]]]]
call[name[sys].exit, parameter[constant[1]]]
return[name[auximg]]
|
keyword[def] identifier[_load_aux_image] ( identifier[self] , identifier[image] , identifier[auxfile] ):
literal[string]
identifier[auximg] = identifier[FitsImage] ( identifier[auxfile] , identifier[beam] = identifier[self] . identifier[global_data] . identifier[beam] ). identifier[get_pixels] ()
keyword[if] identifier[auximg] . identifier[shape] != identifier[image] . identifier[get_pixels] (). identifier[shape] :
identifier[self] . identifier[log] . identifier[error] ( literal[string] . identifier[format] ( identifier[auxfile] ))
identifier[self] . identifier[log] . identifier[error] ( literal[string] . identifier[format] ( identifier[auxfile] , identifier[auximg] . identifier[shape] , identifier[image] . identifier[get_pixels] (). identifier[shape] ))
identifier[sys] . identifier[exit] ( literal[int] )
keyword[return] identifier[auximg]
|
def _load_aux_image(self, image, auxfile):
"""
Load a fits file (bkg/rms/curve) and make sure that
it is the same shape as the main image.
Parameters
----------
image : :class:`AegeanTools.fits_image.FitsImage`
The main image that has already been loaded.
auxfile : str or HDUList
The auxiliary file to be loaded.
Returns
-------
aux : :class:`AegeanTools.fits_image.FitsImage`
The loaded image.
"""
auximg = FitsImage(auxfile, beam=self.global_data.beam).get_pixels()
if auximg.shape != image.get_pixels().shape:
self.log.error('file {0} is not the same size as the image map'.format(auxfile))
self.log.error('{0}= {1}, image = {2}'.format(auxfile, auximg.shape, image.get_pixels().shape))
sys.exit(1) # depends on [control=['if'], data=[]]
return auximg
|
def _iterContours(self, **kwargs):
"""
This must return an iterator that returns wrapped contours.
Subclasses may override this method.
"""
count = len(self)
index = 0
while count:
yield self[index]
count -= 1
index += 1
|
def function[_iterContours, parameter[self]]:
constant[
This must return an iterator that returns wrapped contours.
Subclasses may override this method.
]
variable[count] assign[=] call[name[len], parameter[name[self]]]
variable[index] assign[=] constant[0]
while name[count] begin[:]
<ast.Yield object at 0x7da2041dbaf0>
<ast.AugAssign object at 0x7da2041dac50>
<ast.AugAssign object at 0x7da2041d8ee0>
|
keyword[def] identifier[_iterContours] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[count] = identifier[len] ( identifier[self] )
identifier[index] = literal[int]
keyword[while] identifier[count] :
keyword[yield] identifier[self] [ identifier[index] ]
identifier[count] -= literal[int]
identifier[index] += literal[int]
|
def _iterContours(self, **kwargs):
"""
This must return an iterator that returns wrapped contours.
Subclasses may override this method.
"""
count = len(self)
index = 0
while count:
yield self[index]
count -= 1
index += 1 # depends on [control=['while'], data=[]]
|
def _set_instance(self, v, load=False):
"""
Setter method for instance, mapped from YANG variable /interface/port_channel/spanning_tree/instance (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_instance is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_instance() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("id",instance.instance, yang_name="instance", rest_name="instance", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='id', extensions={u'tailf-common': {u'info': u'Instance ID', u'cli-suppress-mode': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None, u'display-when': u'(/protocol/spanning-tree/mstp)', u'callpoint': u'po-inst-stp-config'}}), is_container='list', yang_name="instance", rest_name="instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Instance ID', u'cli-suppress-mode': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None, u'display-when': u'(/protocol/spanning-tree/mstp)', u'callpoint': u'po-inst-stp-config'}}, namespace='urn:brocade.com:mgmt:brocade-xstp', defining_module='brocade-xstp', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """instance must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("id",instance.instance, yang_name="instance", rest_name="instance", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='id', extensions={u'tailf-common': {u'info': u'Instance ID', u'cli-suppress-mode': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None, u'display-when': u'(/protocol/spanning-tree/mstp)', u'callpoint': u'po-inst-stp-config'}}), is_container='list', yang_name="instance", rest_name="instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Instance ID', u'cli-suppress-mode': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None, u'display-when': u'(/protocol/spanning-tree/mstp)', u'callpoint': u'po-inst-stp-config'}}, namespace='urn:brocade.com:mgmt:brocade-xstp', defining_module='brocade-xstp', yang_type='list', is_config=True)""",
})
self.__instance = t
if hasattr(self, '_set'):
self._set()
|
def function[_set_instance, parameter[self, v, load]]:
constant[
Setter method for instance, mapped from YANG variable /interface/port_channel/spanning_tree/instance (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_instance is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_instance() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da1b26ad6c0>
name[self].__instance assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]]
|
keyword[def] identifier[_set_instance] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[YANGListType] ( literal[string] , identifier[instance] . identifier[instance] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[is_container] = literal[string] , identifier[user_ordered] = keyword[False] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[yang_keys] = literal[string] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] }}), identifier[is_container] = literal[string] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__instance] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] ()
|
def _set_instance(self, v, load=False):
"""
Setter method for instance, mapped from YANG variable /interface/port_channel/spanning_tree/instance (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_instance is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_instance() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=YANGListType('id', instance.instance, yang_name='instance', rest_name='instance', parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='id', extensions={u'tailf-common': {u'info': u'Instance ID', u'cli-suppress-mode': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None, u'display-when': u'(/protocol/spanning-tree/mstp)', u'callpoint': u'po-inst-stp-config'}}), is_container='list', yang_name='instance', rest_name='instance', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Instance ID', u'cli-suppress-mode': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None, u'display-when': u'(/protocol/spanning-tree/mstp)', u'callpoint': u'po-inst-stp-config'}}, namespace='urn:brocade.com:mgmt:brocade-xstp', defining_module='brocade-xstp', yang_type='list', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'instance must be of a type compatible with list', 'defined-type': 'list', 'generated-type': 'YANGDynClass(base=YANGListType("id",instance.instance, yang_name="instance", rest_name="instance", parent=self, is_container=\'list\', user_ordered=False, path_helper=self._path_helper, yang_keys=\'id\', extensions={u\'tailf-common\': {u\'info\': u\'Instance ID\', u\'cli-suppress-mode\': None, u\'cli-incomplete-no\': None, u\'cli-incomplete-command\': None, u\'display-when\': u\'(/protocol/spanning-tree/mstp)\', u\'callpoint\': u\'po-inst-stp-config\'}}), is_container=\'list\', yang_name="instance", rest_name="instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Instance ID\', u\'cli-suppress-mode\': None, u\'cli-incomplete-no\': None, u\'cli-incomplete-command\': None, u\'display-when\': u\'(/protocol/spanning-tree/mstp)\', u\'callpoint\': u\'po-inst-stp-config\'}}, namespace=\'urn:brocade.com:mgmt:brocade-xstp\', defining_module=\'brocade-xstp\', yang_type=\'list\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__instance = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]]
|
def getitem_row_array(self, key):
"""Get row data for target labels.
Args:
key: Target numeric indices by which to retrieve data.
Returns:
A new QueryCompiler.
"""
# Convert to list for type checking
key = list(key)
def getitem(df, internal_indices=[]):
return df.iloc[internal_indices]
result = self.data.apply_func_to_select_indices(
1, getitem, key, keep_remaining=False
)
# We can't just set the index to key here because there may be multiple
# instances of a key.
new_index = self.index[key]
return self.__constructor__(result, new_index, self.columns, self._dtype_cache)
|
def function[getitem_row_array, parameter[self, key]]:
constant[Get row data for target labels.
Args:
key: Target numeric indices by which to retrieve data.
Returns:
A new QueryCompiler.
]
variable[key] assign[=] call[name[list], parameter[name[key]]]
def function[getitem, parameter[df, internal_indices]]:
return[call[name[df].iloc][name[internal_indices]]]
variable[result] assign[=] call[name[self].data.apply_func_to_select_indices, parameter[constant[1], name[getitem], name[key]]]
variable[new_index] assign[=] call[name[self].index][name[key]]
return[call[name[self].__constructor__, parameter[name[result], name[new_index], name[self].columns, name[self]._dtype_cache]]]
|
keyword[def] identifier[getitem_row_array] ( identifier[self] , identifier[key] ):
literal[string]
identifier[key] = identifier[list] ( identifier[key] )
keyword[def] identifier[getitem] ( identifier[df] , identifier[internal_indices] =[]):
keyword[return] identifier[df] . identifier[iloc] [ identifier[internal_indices] ]
identifier[result] = identifier[self] . identifier[data] . identifier[apply_func_to_select_indices] (
literal[int] , identifier[getitem] , identifier[key] , identifier[keep_remaining] = keyword[False]
)
identifier[new_index] = identifier[self] . identifier[index] [ identifier[key] ]
keyword[return] identifier[self] . identifier[__constructor__] ( identifier[result] , identifier[new_index] , identifier[self] . identifier[columns] , identifier[self] . identifier[_dtype_cache] )
|
def getitem_row_array(self, key):
"""Get row data for target labels.
Args:
key: Target numeric indices by which to retrieve data.
Returns:
A new QueryCompiler.
"""
# Convert to list for type checking
key = list(key)
def getitem(df, internal_indices=[]):
return df.iloc[internal_indices]
result = self.data.apply_func_to_select_indices(1, getitem, key, keep_remaining=False)
# We can't just set the index to key here because there may be multiple
# instances of a key.
new_index = self.index[key]
return self.__constructor__(result, new_index, self.columns, self._dtype_cache)
|
def add(self, *args):
"""
Add one or more files or URLs to the manifest.
If files contains a glob, it is expanded.
All files are uploaded to SolveBio. The Upload
object is used to fill the manifest.
"""
def _is_url(path):
p = urlparse(path)
return bool(p.scheme)
for path in args:
path = os.path.expanduser(path)
if _is_url(path):
self.add_url(path)
elif os.path.isfile(path):
self.add_file(path)
elif os.path.isdir(path):
for f in os.listdir(path):
self.add_file(f)
elif glob.glob(path):
for f in glob.glob(path):
self.add_file(f)
else:
raise ValueError(
'Path: "{0}" is not a valid format or does not exist. '
'Manifest paths must be files, directories, or URLs.'
.format(path)
)
|
def function[add, parameter[self]]:
constant[
Add one or more files or URLs to the manifest.
If files contains a glob, it is expanded.
All files are uploaded to SolveBio. The Upload
object is used to fill the manifest.
]
def function[_is_url, parameter[path]]:
variable[p] assign[=] call[name[urlparse], parameter[name[path]]]
return[call[name[bool], parameter[name[p].scheme]]]
for taget[name[path]] in starred[name[args]] begin[:]
variable[path] assign[=] call[name[os].path.expanduser, parameter[name[path]]]
if call[name[_is_url], parameter[name[path]]] begin[:]
call[name[self].add_url, parameter[name[path]]]
|
keyword[def] identifier[add] ( identifier[self] ,* identifier[args] ):
literal[string]
keyword[def] identifier[_is_url] ( identifier[path] ):
identifier[p] = identifier[urlparse] ( identifier[path] )
keyword[return] identifier[bool] ( identifier[p] . identifier[scheme] )
keyword[for] identifier[path] keyword[in] identifier[args] :
identifier[path] = identifier[os] . identifier[path] . identifier[expanduser] ( identifier[path] )
keyword[if] identifier[_is_url] ( identifier[path] ):
identifier[self] . identifier[add_url] ( identifier[path] )
keyword[elif] identifier[os] . identifier[path] . identifier[isfile] ( identifier[path] ):
identifier[self] . identifier[add_file] ( identifier[path] )
keyword[elif] identifier[os] . identifier[path] . identifier[isdir] ( identifier[path] ):
keyword[for] identifier[f] keyword[in] identifier[os] . identifier[listdir] ( identifier[path] ):
identifier[self] . identifier[add_file] ( identifier[f] )
keyword[elif] identifier[glob] . identifier[glob] ( identifier[path] ):
keyword[for] identifier[f] keyword[in] identifier[glob] . identifier[glob] ( identifier[path] ):
identifier[self] . identifier[add_file] ( identifier[f] )
keyword[else] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string]
. identifier[format] ( identifier[path] )
)
|
def add(self, *args):
"""
Add one or more files or URLs to the manifest.
If files contains a glob, it is expanded.
All files are uploaded to SolveBio. The Upload
object is used to fill the manifest.
"""
def _is_url(path):
p = urlparse(path)
return bool(p.scheme)
for path in args:
path = os.path.expanduser(path)
if _is_url(path):
self.add_url(path) # depends on [control=['if'], data=[]]
elif os.path.isfile(path):
self.add_file(path) # depends on [control=['if'], data=[]]
elif os.path.isdir(path):
for f in os.listdir(path):
self.add_file(f) # depends on [control=['for'], data=['f']] # depends on [control=['if'], data=[]]
elif glob.glob(path):
for f in glob.glob(path):
self.add_file(f) # depends on [control=['for'], data=['f']] # depends on [control=['if'], data=[]]
else:
raise ValueError('Path: "{0}" is not a valid format or does not exist. Manifest paths must be files, directories, or URLs.'.format(path)) # depends on [control=['for'], data=['path']]
|
def open_output_file(self, test_record):
"""Open file based on pattern."""
# Ignore keys for the log filename to not convert larger data structures.
record_dict = data.convert_to_base_types(
test_record, ignore_keys=('code_info', 'phases', 'log_records'))
pattern = self.filename_pattern
if isinstance(pattern, six.string_types) or callable(pattern):
output_file = self.open_file(util.format_string(pattern, record_dict))
try:
yield output_file
finally:
output_file.close()
elif hasattr(self.filename_pattern, 'write'):
yield self.filename_pattern
else:
raise ValueError(
'filename_pattern must be string, callable, or File-like object')
|
def function[open_output_file, parameter[self, test_record]]:
constant[Open file based on pattern.]
variable[record_dict] assign[=] call[name[data].convert_to_base_types, parameter[name[test_record]]]
variable[pattern] assign[=] name[self].filename_pattern
if <ast.BoolOp object at 0x7da1b18dfb20> begin[:]
variable[output_file] assign[=] call[name[self].open_file, parameter[call[name[util].format_string, parameter[name[pattern], name[record_dict]]]]]
<ast.Try object at 0x7da1b18dc880>
|
keyword[def] identifier[open_output_file] ( identifier[self] , identifier[test_record] ):
literal[string]
identifier[record_dict] = identifier[data] . identifier[convert_to_base_types] (
identifier[test_record] , identifier[ignore_keys] =( literal[string] , literal[string] , literal[string] ))
identifier[pattern] = identifier[self] . identifier[filename_pattern]
keyword[if] identifier[isinstance] ( identifier[pattern] , identifier[six] . identifier[string_types] ) keyword[or] identifier[callable] ( identifier[pattern] ):
identifier[output_file] = identifier[self] . identifier[open_file] ( identifier[util] . identifier[format_string] ( identifier[pattern] , identifier[record_dict] ))
keyword[try] :
keyword[yield] identifier[output_file]
keyword[finally] :
identifier[output_file] . identifier[close] ()
keyword[elif] identifier[hasattr] ( identifier[self] . identifier[filename_pattern] , literal[string] ):
keyword[yield] identifier[self] . identifier[filename_pattern]
keyword[else] :
keyword[raise] identifier[ValueError] (
literal[string] )
|
def open_output_file(self, test_record):
"""Open file based on pattern."""
# Ignore keys for the log filename to not convert larger data structures.
record_dict = data.convert_to_base_types(test_record, ignore_keys=('code_info', 'phases', 'log_records'))
pattern = self.filename_pattern
if isinstance(pattern, six.string_types) or callable(pattern):
output_file = self.open_file(util.format_string(pattern, record_dict))
try:
yield output_file # depends on [control=['try'], data=[]]
finally:
output_file.close() # depends on [control=['if'], data=[]]
elif hasattr(self.filename_pattern, 'write'):
yield self.filename_pattern # depends on [control=['if'], data=[]]
else:
raise ValueError('filename_pattern must be string, callable, or File-like object')
|
def run(self):
"""
Process all outgoing packets, until `stop()` is called. Intended to run
in its own thread.
"""
while True:
to_send = self._queue.get()
if to_send is _SHUTDOWN:
break
# If we get a gateway object, connect to it. Otherwise, assume
# it's a bytestring and send it out on the socket.
if isinstance(to_send, Gateway):
self._gateway = to_send
self._connected.set()
else:
if not self._gateway:
raise SendException('no gateway')
dest = (self._gateway.addr, self._gateway.port)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.sendto(to_send, dest)
|
def function[run, parameter[self]]:
constant[
Process all outgoing packets, until `stop()` is called. Intended to run
in its own thread.
]
while constant[True] begin[:]
variable[to_send] assign[=] call[name[self]._queue.get, parameter[]]
if compare[name[to_send] is name[_SHUTDOWN]] begin[:]
break
if call[name[isinstance], parameter[name[to_send], name[Gateway]]] begin[:]
name[self]._gateway assign[=] name[to_send]
call[name[self]._connected.set, parameter[]]
|
keyword[def] identifier[run] ( identifier[self] ):
literal[string]
keyword[while] keyword[True] :
identifier[to_send] = identifier[self] . identifier[_queue] . identifier[get] ()
keyword[if] identifier[to_send] keyword[is] identifier[_SHUTDOWN] :
keyword[break]
keyword[if] identifier[isinstance] ( identifier[to_send] , identifier[Gateway] ):
identifier[self] . identifier[_gateway] = identifier[to_send]
identifier[self] . identifier[_connected] . identifier[set] ()
keyword[else] :
keyword[if] keyword[not] identifier[self] . identifier[_gateway] :
keyword[raise] identifier[SendException] ( literal[string] )
identifier[dest] =( identifier[self] . identifier[_gateway] . identifier[addr] , identifier[self] . identifier[_gateway] . identifier[port] )
identifier[sock] = identifier[socket] . identifier[socket] ( identifier[socket] . identifier[AF_INET] , identifier[socket] . identifier[SOCK_DGRAM] )
identifier[sock] . identifier[sendto] ( identifier[to_send] , identifier[dest] )
|
def run(self):
"""
Process all outgoing packets, until `stop()` is called. Intended to run
in its own thread.
"""
while True:
to_send = self._queue.get()
if to_send is _SHUTDOWN:
break # depends on [control=['if'], data=[]]
# If we get a gateway object, connect to it. Otherwise, assume
# it's a bytestring and send it out on the socket.
if isinstance(to_send, Gateway):
self._gateway = to_send
self._connected.set() # depends on [control=['if'], data=[]]
else:
if not self._gateway:
raise SendException('no gateway') # depends on [control=['if'], data=[]]
dest = (self._gateway.addr, self._gateway.port)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.sendto(to_send, dest) # depends on [control=['while'], data=[]]
|
def x12(self, data: ['SASdata', str] = None,
adjust: str = None,
arima: str = None,
automdl: str = None,
by: [str, list] = None,
check: str = None,
estimate: [str, bool] = True,
event: str = None,
forecast: str = None,
id: [str, list] = None,
identify: str = None,
input: [str, list, dict] = None,
outlier: str = None,
output: [str, bool, 'SASdata'] = None,
pickmdl: str = None,
regression: str = None,
seatsdecomp: str = None,
tables: str = None,
transform: str = None,
userdefined: str = None,
var: str = None,
x11: str = None,
procopts: str = None,
stmtpassthrough: str = None,
**kwargs: dict) -> 'SASresults':
"""
Python method to call the X12 procedure
Documentation link:
https://go.documentation.sas.com/?cdcId=pgmsascdc&cdcVersion=9.4_3.4&docsetId=etsug&docsetTarget=etsug_x12_toc.htm&locale=en
:param data: SASdata object or string. This parameter is required.
:parm adjust: The adjust variable can only be a string type.
:parm arima: The arima variable can only be a string type.
:parm automdl: The automdl variable can only be a string type.
:parm by: The by variable can be a string or list type.
:parm check: The check variable can only be a string type.
:parm estimate: The estimate variable can only be a string type.
:parm event: The event variable can only be a string type.
:parm forecast: The forecast variable can only be a string type.
:parm id: The id variable can be a string or list type.
:parm identify: The identify variable can only be a string type.
:parm input: The input variable can be a string, list or dict type. It refers to the dependent, y, or label variable.
:parm outlier: The outlier variable can only be a string type.
:parm output: The output variable can be a string, boolean or SASdata type. The member name for a boolean is "_output".
:parm pickmdl: The pickmdl variable can only be a string type.
:parm regression: The regression variable can only be a string type.
:parm seatsdecomp: The seatsdecomp variable can only be a string type.
:parm tables: The tables variable can only be a string type.
:parm transform: The transform variable can only be a string type.
:parm userdefined: The userdefined variable can only be a string type.
:parm var: The var variable can only be a string type.
:parm x11: The x11 variable can only be a string type.
:parm procopts: The procopts variable is a generic option available for advanced use. It can only be a string type.
:parm stmtpassthrough: The stmtpassthrough variable is a generic option available for advanced use. It can only be a string type.
:return: SAS Result Object
"""
|
def function[x12, parameter[self, data, adjust, arima, automdl, by, check, estimate, event, forecast, id, identify, input, outlier, output, pickmdl, regression, seatsdecomp, tables, transform, userdefined, var, x11, procopts, stmtpassthrough]]:
constant[
Python method to call the X12 procedure
Documentation link:
https://go.documentation.sas.com/?cdcId=pgmsascdc&cdcVersion=9.4_3.4&docsetId=etsug&docsetTarget=etsug_x12_toc.htm&locale=en
:param data: SASdata object or string. This parameter is required.
:parm adjust: The adjust variable can only be a string type.
:parm arima: The arima variable can only be a string type.
:parm automdl: The automdl variable can only be a string type.
:parm by: The by variable can be a string or list type.
:parm check: The check variable can only be a string type.
:parm estimate: The estimate variable can only be a string type.
:parm event: The event variable can only be a string type.
:parm forecast: The forecast variable can only be a string type.
:parm id: The id variable can be a string or list type.
:parm identify: The identify variable can only be a string type.
:parm input: The input variable can be a string, list or dict type. It refers to the dependent, y, or label variable.
:parm outlier: The outlier variable can only be a string type.
:parm output: The output variable can be a string, boolean or SASdata type. The member name for a boolean is "_output".
:parm pickmdl: The pickmdl variable can only be a string type.
:parm regression: The regression variable can only be a string type.
:parm seatsdecomp: The seatsdecomp variable can only be a string type.
:parm tables: The tables variable can only be a string type.
:parm transform: The transform variable can only be a string type.
:parm userdefined: The userdefined variable can only be a string type.
:parm var: The var variable can only be a string type.
:parm x11: The x11 variable can only be a string type.
:parm procopts: The procopts variable is a generic option available for advanced use. It can only be a string type.
:parm stmtpassthrough: The stmtpassthrough variable is a generic option available for advanced use. It can only be a string type.
:return: SAS Result Object
]
|
keyword[def] identifier[x12] ( identifier[self] , identifier[data] :[ literal[string] , identifier[str] ]= keyword[None] ,
identifier[adjust] : identifier[str] = keyword[None] ,
identifier[arima] : identifier[str] = keyword[None] ,
identifier[automdl] : identifier[str] = keyword[None] ,
identifier[by] :[ identifier[str] , identifier[list] ]= keyword[None] ,
identifier[check] : identifier[str] = keyword[None] ,
identifier[estimate] :[ identifier[str] , identifier[bool] ]= keyword[True] ,
identifier[event] : identifier[str] = keyword[None] ,
identifier[forecast] : identifier[str] = keyword[None] ,
identifier[id] :[ identifier[str] , identifier[list] ]= keyword[None] ,
identifier[identify] : identifier[str] = keyword[None] ,
identifier[input] :[ identifier[str] , identifier[list] , identifier[dict] ]= keyword[None] ,
identifier[outlier] : identifier[str] = keyword[None] ,
identifier[output] :[ identifier[str] , identifier[bool] , literal[string] ]= keyword[None] ,
identifier[pickmdl] : identifier[str] = keyword[None] ,
identifier[regression] : identifier[str] = keyword[None] ,
identifier[seatsdecomp] : identifier[str] = keyword[None] ,
identifier[tables] : identifier[str] = keyword[None] ,
identifier[transform] : identifier[str] = keyword[None] ,
identifier[userdefined] : identifier[str] = keyword[None] ,
identifier[var] : identifier[str] = keyword[None] ,
identifier[x11] : identifier[str] = keyword[None] ,
identifier[procopts] : identifier[str] = keyword[None] ,
identifier[stmtpassthrough] : identifier[str] = keyword[None] ,
** identifier[kwargs] : identifier[dict] )-> literal[string] :
literal[string]
|
def x12(self, data: ['SASdata', str]=None, adjust: str=None, arima: str=None, automdl: str=None, by: [str, list]=None, check: str=None, estimate: [str, bool]=True, event: str=None, forecast: str=None, id: [str, list]=None, identify: str=None, input: [str, list, dict]=None, outlier: str=None, output: [str, bool, 'SASdata']=None, pickmdl: str=None, regression: str=None, seatsdecomp: str=None, tables: str=None, transform: str=None, userdefined: str=None, var: str=None, x11: str=None, procopts: str=None, stmtpassthrough: str=None, **kwargs: dict) -> 'SASresults':
"""
Python method to call the X12 procedure
Documentation link:
https://go.documentation.sas.com/?cdcId=pgmsascdc&cdcVersion=9.4_3.4&docsetId=etsug&docsetTarget=etsug_x12_toc.htm&locale=en
:param data: SASdata object or string. This parameter is required.
:parm adjust: The adjust variable can only be a string type.
:parm arima: The arima variable can only be a string type.
:parm automdl: The automdl variable can only be a string type.
:parm by: The by variable can be a string or list type.
:parm check: The check variable can only be a string type.
:parm estimate: The estimate variable can only be a string type.
:parm event: The event variable can only be a string type.
:parm forecast: The forecast variable can only be a string type.
:parm id: The id variable can be a string or list type.
:parm identify: The identify variable can only be a string type.
:parm input: The input variable can be a string, list or dict type. It refers to the dependent, y, or label variable.
:parm outlier: The outlier variable can only be a string type.
:parm output: The output variable can be a string, boolean or SASdata type. The member name for a boolean is "_output".
:parm pickmdl: The pickmdl variable can only be a string type.
:parm regression: The regression variable can only be a string type.
:parm seatsdecomp: The seatsdecomp variable can only be a string type.
:parm tables: The tables variable can only be a string type.
:parm transform: The transform variable can only be a string type.
:parm userdefined: The userdefined variable can only be a string type.
:parm var: The var variable can only be a string type.
:parm x11: The x11 variable can only be a string type.
:parm procopts: The procopts variable is a generic option available for advanced use. It can only be a string type.
:parm stmtpassthrough: The stmtpassthrough variable is a generic option available for advanced use. It can only be a string type.
:return: SAS Result Object
"""
|
def _expand_wildcard_action(action):
"""
:param action: 'autoscaling:*'
:return: A list of all autoscaling permissions matching the wildcard
"""
if isinstance(action, list):
expanded_actions = []
for item in action:
expanded_actions.extend(_expand_wildcard_action(item))
return expanded_actions
else:
if '*' in action:
expanded = [
expanded_action.lower() for expanded_action in all_permissions if fnmatch.fnmatchcase(
expanded_action.lower(), action.lower()
)
]
# if we get a wildcard for a tech we've never heard of, just return the wildcard
if not expanded:
return [action.lower()]
return expanded
return [action.lower()]
|
def function[_expand_wildcard_action, parameter[action]]:
constant[
:param action: 'autoscaling:*'
:return: A list of all autoscaling permissions matching the wildcard
]
if call[name[isinstance], parameter[name[action], name[list]]] begin[:]
variable[expanded_actions] assign[=] list[[]]
for taget[name[item]] in starred[name[action]] begin[:]
call[name[expanded_actions].extend, parameter[call[name[_expand_wildcard_action], parameter[name[item]]]]]
return[name[expanded_actions]]
|
keyword[def] identifier[_expand_wildcard_action] ( identifier[action] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[action] , identifier[list] ):
identifier[expanded_actions] =[]
keyword[for] identifier[item] keyword[in] identifier[action] :
identifier[expanded_actions] . identifier[extend] ( identifier[_expand_wildcard_action] ( identifier[item] ))
keyword[return] identifier[expanded_actions]
keyword[else] :
keyword[if] literal[string] keyword[in] identifier[action] :
identifier[expanded] =[
identifier[expanded_action] . identifier[lower] () keyword[for] identifier[expanded_action] keyword[in] identifier[all_permissions] keyword[if] identifier[fnmatch] . identifier[fnmatchcase] (
identifier[expanded_action] . identifier[lower] (), identifier[action] . identifier[lower] ()
)
]
keyword[if] keyword[not] identifier[expanded] :
keyword[return] [ identifier[action] . identifier[lower] ()]
keyword[return] identifier[expanded]
keyword[return] [ identifier[action] . identifier[lower] ()]
|
def _expand_wildcard_action(action):
"""
:param action: 'autoscaling:*'
:return: A list of all autoscaling permissions matching the wildcard
"""
if isinstance(action, list):
expanded_actions = []
for item in action:
expanded_actions.extend(_expand_wildcard_action(item)) # depends on [control=['for'], data=['item']]
return expanded_actions # depends on [control=['if'], data=[]]
else:
if '*' in action:
expanded = [expanded_action.lower() for expanded_action in all_permissions if fnmatch.fnmatchcase(expanded_action.lower(), action.lower())]
# if we get a wildcard for a tech we've never heard of, just return the wildcard
if not expanded:
return [action.lower()] # depends on [control=['if'], data=[]]
return expanded # depends on [control=['if'], data=['action']]
return [action.lower()]
|
def get_courses_metadata(self):
"""Gets the metadata for the courses.
return: (osid.Metadata) - metadata for the courses
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.learning.ActivityForm.get_assets_metadata_template
metadata = dict(self._mdata['courses'])
metadata.update({'existing_courses_values': self._my_map['courseIds']})
return Metadata(**metadata)
|
def function[get_courses_metadata, parameter[self]]:
constant[Gets the metadata for the courses.
return: (osid.Metadata) - metadata for the courses
*compliance: mandatory -- This method must be implemented.*
]
variable[metadata] assign[=] call[name[dict], parameter[call[name[self]._mdata][constant[courses]]]]
call[name[metadata].update, parameter[dictionary[[<ast.Constant object at 0x7da18ede7520>], [<ast.Subscript object at 0x7da18ede4bb0>]]]]
return[call[name[Metadata], parameter[]]]
|
keyword[def] identifier[get_courses_metadata] ( identifier[self] ):
literal[string]
identifier[metadata] = identifier[dict] ( identifier[self] . identifier[_mdata] [ literal[string] ])
identifier[metadata] . identifier[update] ({ literal[string] : identifier[self] . identifier[_my_map] [ literal[string] ]})
keyword[return] identifier[Metadata] (** identifier[metadata] )
|
def get_courses_metadata(self):
"""Gets the metadata for the courses.
return: (osid.Metadata) - metadata for the courses
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.learning.ActivityForm.get_assets_metadata_template
metadata = dict(self._mdata['courses'])
metadata.update({'existing_courses_values': self._my_map['courseIds']})
return Metadata(**metadata)
|
def follow(self, delay=1.0):
"""\
Iterator generator that returns lines as data is added to the file.
Based on: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/157035
"""
trailing = True
while 1:
where = self.file.tell()
line = self.file.readline()
if line:
if trailing and line in self.line_terminators:
# This is just the line terminator added to the end of the file
# before a new line, ignore.
trailing = False
continue
if line[-1] in self.line_terminators:
line = line[:-1]
if line[-1:] == '\r\n' and '\r\n' in self.line_terminators:
# found crlf
line = line[:-1]
trailing = False
yield line
else:
trailing = True
self.seek(where)
time.sleep(delay)
|
def function[follow, parameter[self, delay]]:
constant[ Iterator generator that returns lines as data is added to the file.
Based on: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/157035
]
variable[trailing] assign[=] constant[True]
while constant[1] begin[:]
variable[where] assign[=] call[name[self].file.tell, parameter[]]
variable[line] assign[=] call[name[self].file.readline, parameter[]]
if name[line] begin[:]
if <ast.BoolOp object at 0x7da18bc70fa0> begin[:]
variable[trailing] assign[=] constant[False]
continue
if compare[call[name[line]][<ast.UnaryOp object at 0x7da18bc701f0>] in name[self].line_terminators] begin[:]
variable[line] assign[=] call[name[line]][<ast.Slice object at 0x7da20c991c30>]
if <ast.BoolOp object at 0x7da20c990490> begin[:]
variable[line] assign[=] call[name[line]][<ast.Slice object at 0x7da20c991390>]
variable[trailing] assign[=] constant[False]
<ast.Yield object at 0x7da20c993340>
|
keyword[def] identifier[follow] ( identifier[self] , identifier[delay] = literal[int] ):
literal[string]
identifier[trailing] = keyword[True]
keyword[while] literal[int] :
identifier[where] = identifier[self] . identifier[file] . identifier[tell] ()
identifier[line] = identifier[self] . identifier[file] . identifier[readline] ()
keyword[if] identifier[line] :
keyword[if] identifier[trailing] keyword[and] identifier[line] keyword[in] identifier[self] . identifier[line_terminators] :
identifier[trailing] = keyword[False]
keyword[continue]
keyword[if] identifier[line] [- literal[int] ] keyword[in] identifier[self] . identifier[line_terminators] :
identifier[line] = identifier[line] [:- literal[int] ]
keyword[if] identifier[line] [- literal[int] :]== literal[string] keyword[and] literal[string] keyword[in] identifier[self] . identifier[line_terminators] :
identifier[line] = identifier[line] [:- literal[int] ]
identifier[trailing] = keyword[False]
keyword[yield] identifier[line]
keyword[else] :
identifier[trailing] = keyword[True]
identifier[self] . identifier[seek] ( identifier[where] )
identifier[time] . identifier[sleep] ( identifier[delay] )
|
def follow(self, delay=1.0):
""" Iterator generator that returns lines as data is added to the file.
Based on: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/157035
"""
trailing = True
while 1:
where = self.file.tell()
line = self.file.readline()
if line:
if trailing and line in self.line_terminators:
# This is just the line terminator added to the end of the file
# before a new line, ignore.
trailing = False
continue # depends on [control=['if'], data=[]]
if line[-1] in self.line_terminators:
line = line[:-1]
if line[-1:] == '\r\n' and '\r\n' in self.line_terminators:
# found crlf
line = line[:-1] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
trailing = False
yield line # depends on [control=['if'], data=[]]
else:
trailing = True
self.seek(where)
time.sleep(delay) # depends on [control=['while'], data=[]]
|
def _coerce_fields_parameters(self, fields):
"""
Used by values and values_list to get the list of fields to use in the
redis sort command to retrieve fields.
The result is a dict with two lists:
- 'names', with wanted field names
- 'keys', with keys to use in the sort command
When sorting by score, we allow to retrieve the score in values/values_list.
For this, just pass SORTED_SCORE (importable from contrib.collection) as
a name to retrieve.
If finally the result is not sorted by score, the value for this part
will be None
"""
try:
sorted_score_pos = fields.index(SORTED_SCORE)
except:
sorted_score_pos = None
else:
fields = list(fields)
fields.pop(sorted_score_pos)
final_fields = {'names': [], 'keys': []}
for field_name in fields:
if self._field_is_pk(field_name):
final_fields['names'].append(field_name)
final_fields['keys'].append('#')
else:
if not self.cls.has_field(field_name):
raise ValueError("%s if not a valid field to get from collection"
" for %s" % (field_name, self.cls.__name__))
field = self.cls.get_field(field_name)
if isinstance(field, MultiValuesField):
raise ValueError("It's not possible to get a MultiValuesField"
" from a collection (asked: %s" % field_name)
final_fields['names'].append(field_name)
final_fields['keys'].append(field.sort_wildcard)
if sorted_score_pos is not None:
final_fields['names'].insert(sorted_score_pos, SORTED_SCORE)
final_fields['keys'].insert(sorted_score_pos, SORTED_SCORE)
return final_fields
|
def function[_coerce_fields_parameters, parameter[self, fields]]:
constant[
Used by values and values_list to get the list of fields to use in the
redis sort command to retrieve fields.
The result is a dict with two lists:
- 'names', with wanted field names
- 'keys', with keys to use in the sort command
When sorting by score, we allow to retrieve the score in values/values_list.
For this, just pass SORTED_SCORE (importable from contrib.collection) as
a name to retrieve.
If finally the result is not sorted by score, the value for this part
will be None
]
<ast.Try object at 0x7da20c6c7040>
variable[final_fields] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c5b10>, <ast.Constant object at 0x7da20c6c5c90>], [<ast.List object at 0x7da20c6c4970>, <ast.List object at 0x7da20c6c5930>]]
for taget[name[field_name]] in starred[name[fields]] begin[:]
if call[name[self]._field_is_pk, parameter[name[field_name]]] begin[:]
call[call[name[final_fields]][constant[names]].append, parameter[name[field_name]]]
call[call[name[final_fields]][constant[keys]].append, parameter[constant[#]]]
if compare[name[sorted_score_pos] is_not constant[None]] begin[:]
call[call[name[final_fields]][constant[names]].insert, parameter[name[sorted_score_pos], name[SORTED_SCORE]]]
call[call[name[final_fields]][constant[keys]].insert, parameter[name[sorted_score_pos], name[SORTED_SCORE]]]
return[name[final_fields]]
|
keyword[def] identifier[_coerce_fields_parameters] ( identifier[self] , identifier[fields] ):
literal[string]
keyword[try] :
identifier[sorted_score_pos] = identifier[fields] . identifier[index] ( identifier[SORTED_SCORE] )
keyword[except] :
identifier[sorted_score_pos] = keyword[None]
keyword[else] :
identifier[fields] = identifier[list] ( identifier[fields] )
identifier[fields] . identifier[pop] ( identifier[sorted_score_pos] )
identifier[final_fields] ={ literal[string] :[], literal[string] :[]}
keyword[for] identifier[field_name] keyword[in] identifier[fields] :
keyword[if] identifier[self] . identifier[_field_is_pk] ( identifier[field_name] ):
identifier[final_fields] [ literal[string] ]. identifier[append] ( identifier[field_name] )
identifier[final_fields] [ literal[string] ]. identifier[append] ( literal[string] )
keyword[else] :
keyword[if] keyword[not] identifier[self] . identifier[cls] . identifier[has_field] ( identifier[field_name] ):
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] %( identifier[field_name] , identifier[self] . identifier[cls] . identifier[__name__] ))
identifier[field] = identifier[self] . identifier[cls] . identifier[get_field] ( identifier[field_name] )
keyword[if] identifier[isinstance] ( identifier[field] , identifier[MultiValuesField] ):
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] % identifier[field_name] )
identifier[final_fields] [ literal[string] ]. identifier[append] ( identifier[field_name] )
identifier[final_fields] [ literal[string] ]. identifier[append] ( identifier[field] . identifier[sort_wildcard] )
keyword[if] identifier[sorted_score_pos] keyword[is] keyword[not] keyword[None] :
identifier[final_fields] [ literal[string] ]. identifier[insert] ( identifier[sorted_score_pos] , identifier[SORTED_SCORE] )
identifier[final_fields] [ literal[string] ]. identifier[insert] ( identifier[sorted_score_pos] , identifier[SORTED_SCORE] )
keyword[return] identifier[final_fields]
|
def _coerce_fields_parameters(self, fields):
"""
Used by values and values_list to get the list of fields to use in the
redis sort command to retrieve fields.
The result is a dict with two lists:
- 'names', with wanted field names
- 'keys', with keys to use in the sort command
When sorting by score, we allow to retrieve the score in values/values_list.
For this, just pass SORTED_SCORE (importable from contrib.collection) as
a name to retrieve.
If finally the result is not sorted by score, the value for this part
will be None
"""
try:
sorted_score_pos = fields.index(SORTED_SCORE) # depends on [control=['try'], data=[]]
except:
sorted_score_pos = None # depends on [control=['except'], data=[]]
else:
fields = list(fields)
fields.pop(sorted_score_pos)
final_fields = {'names': [], 'keys': []}
for field_name in fields:
if self._field_is_pk(field_name):
final_fields['names'].append(field_name)
final_fields['keys'].append('#') # depends on [control=['if'], data=[]]
else:
if not self.cls.has_field(field_name):
raise ValueError('%s if not a valid field to get from collection for %s' % (field_name, self.cls.__name__)) # depends on [control=['if'], data=[]]
field = self.cls.get_field(field_name)
if isinstance(field, MultiValuesField):
raise ValueError("It's not possible to get a MultiValuesField from a collection (asked: %s" % field_name) # depends on [control=['if'], data=[]]
final_fields['names'].append(field_name)
final_fields['keys'].append(field.sort_wildcard) # depends on [control=['for'], data=['field_name']]
if sorted_score_pos is not None:
final_fields['names'].insert(sorted_score_pos, SORTED_SCORE)
final_fields['keys'].insert(sorted_score_pos, SORTED_SCORE) # depends on [control=['if'], data=['sorted_score_pos']]
return final_fields
|
def calc_qar_v1(self):
"""Calculate the discharge responses of the different AR processes.
Required derived parameters:
|Nmb|
|AR_Order|
|AR_Coefs|
Required log sequence:
|LogOut|
Calculated flux sequence:
|QAR|
Examples:
Assume there are four response functions, involving zero, one, two,
and three AR coefficients respectively:
>>> from hydpy.models.arma import *
>>> parameterstep()
>>> derived.nmb(4)
>>> derived.ar_order.shape = 4
>>> derived.ar_order = 0, 1, 2, 3
>>> derived.ar_coefs.shape = (4, 3)
>>> logs.logout.shape = (4, 3)
>>> fluxes.qar.shape = 4
The coefficients of the different AR processes are stored in
separate rows of the 2-dimensional parameter `ma_coefs`.
Note the special case of the first AR process of zero order
(first row), which involves no autoregressive memory at all:
>>> derived.ar_coefs = ((nan, nan, nan),
... (1.0, nan, nan),
... (0.8, 0.2, nan),
... (0.5, 0.3, 0.2))
The "memory values" of the different AR processes are defined as
follows (one row for each process). The values of the last time
step are stored in first column, the values of the last time step
in the second column, and so on:
>>> logs.logout = ((nan, nan, nan),
... (1.0, nan, nan),
... (2.0, 3.0, nan),
... (4.0, 5.0, 6.0))
Applying method |calc_qar_v1| is equivalent to calculating the
inner product of the different rows of both matrices:
>>> model.calc_qar_v1()
>>> fluxes.qar
qar(0.0, 1.0, 2.2, 4.7)
"""
der = self.parameters.derived.fastaccess
flu = self.sequences.fluxes.fastaccess
log = self.sequences.logs.fastaccess
for idx in range(der.nmb):
flu.qar[idx] = 0.
for jdx in range(der.ar_order[idx]):
flu.qar[idx] += der.ar_coefs[idx, jdx] * log.logout[idx, jdx]
|
def function[calc_qar_v1, parameter[self]]:
constant[Calculate the discharge responses of the different AR processes.
Required derived parameters:
|Nmb|
|AR_Order|
|AR_Coefs|
Required log sequence:
|LogOut|
Calculated flux sequence:
|QAR|
Examples:
Assume there are four response functions, involving zero, one, two,
and three AR coefficients respectively:
>>> from hydpy.models.arma import *
>>> parameterstep()
>>> derived.nmb(4)
>>> derived.ar_order.shape = 4
>>> derived.ar_order = 0, 1, 2, 3
>>> derived.ar_coefs.shape = (4, 3)
>>> logs.logout.shape = (4, 3)
>>> fluxes.qar.shape = 4
The coefficients of the different AR processes are stored in
separate rows of the 2-dimensional parameter `ma_coefs`.
Note the special case of the first AR process of zero order
(first row), which involves no autoregressive memory at all:
>>> derived.ar_coefs = ((nan, nan, nan),
... (1.0, nan, nan),
... (0.8, 0.2, nan),
... (0.5, 0.3, 0.2))
The "memory values" of the different AR processes are defined as
follows (one row for each process). The values of the last time
step are stored in first column, the values of the last time step
in the second column, and so on:
>>> logs.logout = ((nan, nan, nan),
... (1.0, nan, nan),
... (2.0, 3.0, nan),
... (4.0, 5.0, 6.0))
Applying method |calc_qar_v1| is equivalent to calculating the
inner product of the different rows of both matrices:
>>> model.calc_qar_v1()
>>> fluxes.qar
qar(0.0, 1.0, 2.2, 4.7)
]
variable[der] assign[=] name[self].parameters.derived.fastaccess
variable[flu] assign[=] name[self].sequences.fluxes.fastaccess
variable[log] assign[=] name[self].sequences.logs.fastaccess
for taget[name[idx]] in starred[call[name[range], parameter[name[der].nmb]]] begin[:]
call[name[flu].qar][name[idx]] assign[=] constant[0.0]
for taget[name[jdx]] in starred[call[name[range], parameter[call[name[der].ar_order][name[idx]]]]] begin[:]
<ast.AugAssign object at 0x7da1b0fef160>
|
keyword[def] identifier[calc_qar_v1] ( identifier[self] ):
literal[string]
identifier[der] = identifier[self] . identifier[parameters] . identifier[derived] . identifier[fastaccess]
identifier[flu] = identifier[self] . identifier[sequences] . identifier[fluxes] . identifier[fastaccess]
identifier[log] = identifier[self] . identifier[sequences] . identifier[logs] . identifier[fastaccess]
keyword[for] identifier[idx] keyword[in] identifier[range] ( identifier[der] . identifier[nmb] ):
identifier[flu] . identifier[qar] [ identifier[idx] ]= literal[int]
keyword[for] identifier[jdx] keyword[in] identifier[range] ( identifier[der] . identifier[ar_order] [ identifier[idx] ]):
identifier[flu] . identifier[qar] [ identifier[idx] ]+= identifier[der] . identifier[ar_coefs] [ identifier[idx] , identifier[jdx] ]* identifier[log] . identifier[logout] [ identifier[idx] , identifier[jdx] ]
|
def calc_qar_v1(self):
"""Calculate the discharge responses of the different AR processes.
Required derived parameters:
|Nmb|
|AR_Order|
|AR_Coefs|
Required log sequence:
|LogOut|
Calculated flux sequence:
|QAR|
Examples:
Assume there are four response functions, involving zero, one, two,
and three AR coefficients respectively:
>>> from hydpy.models.arma import *
>>> parameterstep()
>>> derived.nmb(4)
>>> derived.ar_order.shape = 4
>>> derived.ar_order = 0, 1, 2, 3
>>> derived.ar_coefs.shape = (4, 3)
>>> logs.logout.shape = (4, 3)
>>> fluxes.qar.shape = 4
The coefficients of the different AR processes are stored in
separate rows of the 2-dimensional parameter `ma_coefs`.
Note the special case of the first AR process of zero order
(first row), which involves no autoregressive memory at all:
>>> derived.ar_coefs = ((nan, nan, nan),
... (1.0, nan, nan),
... (0.8, 0.2, nan),
... (0.5, 0.3, 0.2))
The "memory values" of the different AR processes are defined as
follows (one row for each process). The values of the last time
step are stored in first column, the values of the last time step
in the second column, and so on:
>>> logs.logout = ((nan, nan, nan),
... (1.0, nan, nan),
... (2.0, 3.0, nan),
... (4.0, 5.0, 6.0))
Applying method |calc_qar_v1| is equivalent to calculating the
inner product of the different rows of both matrices:
>>> model.calc_qar_v1()
>>> fluxes.qar
qar(0.0, 1.0, 2.2, 4.7)
"""
der = self.parameters.derived.fastaccess
flu = self.sequences.fluxes.fastaccess
log = self.sequences.logs.fastaccess
for idx in range(der.nmb):
flu.qar[idx] = 0.0
for jdx in range(der.ar_order[idx]):
flu.qar[idx] += der.ar_coefs[idx, jdx] * log.logout[idx, jdx] # depends on [control=['for'], data=['jdx']] # depends on [control=['for'], data=['idx']]
|
def schedule_forced_svc_check(self, service, check_time):
"""Schedule a forced check on a service
Format of the line that triggers function call::
SCHEDULE_FORCED_SVC_CHECK;<host_name>;<service_description>;<check_time>
:param service: service to check
:type service: alignak.object.service.Service
:param check_time: time to check
:type check_time: int
:return: None
"""
service.schedule(self.daemon.hosts, self.daemon.services,
self.daemon.timeperiods, self.daemon.macromodulations,
self.daemon.checkmodulations, self.daemon.checks,
force=True, force_time=check_time)
self.send_an_element(service.get_update_status_brok())
|
def function[schedule_forced_svc_check, parameter[self, service, check_time]]:
constant[Schedule a forced check on a service
Format of the line that triggers function call::
SCHEDULE_FORCED_SVC_CHECK;<host_name>;<service_description>;<check_time>
:param service: service to check
:type service: alignak.object.service.Service
:param check_time: time to check
:type check_time: int
:return: None
]
call[name[service].schedule, parameter[name[self].daemon.hosts, name[self].daemon.services, name[self].daemon.timeperiods, name[self].daemon.macromodulations, name[self].daemon.checkmodulations, name[self].daemon.checks]]
call[name[self].send_an_element, parameter[call[name[service].get_update_status_brok, parameter[]]]]
|
keyword[def] identifier[schedule_forced_svc_check] ( identifier[self] , identifier[service] , identifier[check_time] ):
literal[string]
identifier[service] . identifier[schedule] ( identifier[self] . identifier[daemon] . identifier[hosts] , identifier[self] . identifier[daemon] . identifier[services] ,
identifier[self] . identifier[daemon] . identifier[timeperiods] , identifier[self] . identifier[daemon] . identifier[macromodulations] ,
identifier[self] . identifier[daemon] . identifier[checkmodulations] , identifier[self] . identifier[daemon] . identifier[checks] ,
identifier[force] = keyword[True] , identifier[force_time] = identifier[check_time] )
identifier[self] . identifier[send_an_element] ( identifier[service] . identifier[get_update_status_brok] ())
|
def schedule_forced_svc_check(self, service, check_time):
"""Schedule a forced check on a service
Format of the line that triggers function call::
SCHEDULE_FORCED_SVC_CHECK;<host_name>;<service_description>;<check_time>
:param service: service to check
:type service: alignak.object.service.Service
:param check_time: time to check
:type check_time: int
:return: None
"""
service.schedule(self.daemon.hosts, self.daemon.services, self.daemon.timeperiods, self.daemon.macromodulations, self.daemon.checkmodulations, self.daemon.checks, force=True, force_time=check_time)
self.send_an_element(service.get_update_status_brok())
|
def get_hull_energy(self, comp):
"""
Args:
comp (Composition): Input composition
Returns:
Energy of lowest energy equilibrium at desired composition. Not
normalized by atoms, i.e. E(Li4O2) = 2 * E(Li2O)
"""
e = 0
for k, v in self.get_decomposition(comp).items():
e += k.energy_per_atom * v
return e * comp.num_atoms
|
def function[get_hull_energy, parameter[self, comp]]:
constant[
Args:
comp (Composition): Input composition
Returns:
Energy of lowest energy equilibrium at desired composition. Not
normalized by atoms, i.e. E(Li4O2) = 2 * E(Li2O)
]
variable[e] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da1b21ae350>, <ast.Name object at 0x7da1b21ad7e0>]]] in starred[call[call[name[self].get_decomposition, parameter[name[comp]]].items, parameter[]]] begin[:]
<ast.AugAssign object at 0x7da1b21adcc0>
return[binary_operation[name[e] * name[comp].num_atoms]]
|
keyword[def] identifier[get_hull_energy] ( identifier[self] , identifier[comp] ):
literal[string]
identifier[e] = literal[int]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[get_decomposition] ( identifier[comp] ). identifier[items] ():
identifier[e] += identifier[k] . identifier[energy_per_atom] * identifier[v]
keyword[return] identifier[e] * identifier[comp] . identifier[num_atoms]
|
def get_hull_energy(self, comp):
"""
Args:
comp (Composition): Input composition
Returns:
Energy of lowest energy equilibrium at desired composition. Not
normalized by atoms, i.e. E(Li4O2) = 2 * E(Li2O)
"""
e = 0
for (k, v) in self.get_decomposition(comp).items():
e += k.energy_per_atom * v # depends on [control=['for'], data=[]]
return e * comp.num_atoms
|
def create(self, name, serviceId, timezone, description, enabled):
"""
Create a connector for the organization in the Watson IoT Platform.
The connector must reference the target service that the Watson IoT Platform will store the IoT data in.
Parameters:
- name (string) - Name of the service
- serviceId (string) - must be either eventstreams or cloudant
- timezone (string) -
- description (string) - description of the service
- enabled (boolean) - enabled
Throws APIException on failure
"""
connector = {
"name": name,
"description": description,
"serviceId": serviceId,
"timezone": timezone,
"enabled": enabled,
}
url = "api/v0002/historianconnectors"
r = self._apiClient.post(url, data=connector)
if r.status_code == 201:
return Connector(apiClient=self._apiClient, **r.json())
else:
raise ApiException(r)
|
def function[create, parameter[self, name, serviceId, timezone, description, enabled]]:
constant[
Create a connector for the organization in the Watson IoT Platform.
The connector must reference the target service that the Watson IoT Platform will store the IoT data in.
Parameters:
- name (string) - Name of the service
- serviceId (string) - must be either eventstreams or cloudant
- timezone (string) -
- description (string) - description of the service
- enabled (boolean) - enabled
Throws APIException on failure
]
variable[connector] assign[=] dictionary[[<ast.Constant object at 0x7da1b025e710>, <ast.Constant object at 0x7da1b025feb0>, <ast.Constant object at 0x7da1b025d870>, <ast.Constant object at 0x7da1b025c820>, <ast.Constant object at 0x7da1b025d1b0>], [<ast.Name object at 0x7da1b025d840>, <ast.Name object at 0x7da1b025cb20>, <ast.Name object at 0x7da1b025ca30>, <ast.Name object at 0x7da1b025d690>, <ast.Name object at 0x7da1b025fe50>]]
variable[url] assign[=] constant[api/v0002/historianconnectors]
variable[r] assign[=] call[name[self]._apiClient.post, parameter[name[url]]]
if compare[name[r].status_code equal[==] constant[201]] begin[:]
return[call[name[Connector], parameter[]]]
|
keyword[def] identifier[create] ( identifier[self] , identifier[name] , identifier[serviceId] , identifier[timezone] , identifier[description] , identifier[enabled] ):
literal[string]
identifier[connector] ={
literal[string] : identifier[name] ,
literal[string] : identifier[description] ,
literal[string] : identifier[serviceId] ,
literal[string] : identifier[timezone] ,
literal[string] : identifier[enabled] ,
}
identifier[url] = literal[string]
identifier[r] = identifier[self] . identifier[_apiClient] . identifier[post] ( identifier[url] , identifier[data] = identifier[connector] )
keyword[if] identifier[r] . identifier[status_code] == literal[int] :
keyword[return] identifier[Connector] ( identifier[apiClient] = identifier[self] . identifier[_apiClient] ,** identifier[r] . identifier[json] ())
keyword[else] :
keyword[raise] identifier[ApiException] ( identifier[r] )
|
def create(self, name, serviceId, timezone, description, enabled):
"""
Create a connector for the organization in the Watson IoT Platform.
The connector must reference the target service that the Watson IoT Platform will store the IoT data in.
Parameters:
- name (string) - Name of the service
- serviceId (string) - must be either eventstreams or cloudant
- timezone (string) -
- description (string) - description of the service
- enabled (boolean) - enabled
Throws APIException on failure
"""
connector = {'name': name, 'description': description, 'serviceId': serviceId, 'timezone': timezone, 'enabled': enabled}
url = 'api/v0002/historianconnectors'
r = self._apiClient.post(url, data=connector)
if r.status_code == 201:
return Connector(apiClient=self._apiClient, **r.json()) # depends on [control=['if'], data=[]]
else:
raise ApiException(r)
|
def parse(self, extent, length, fp, log_block_size):
# type: (int, int, BinaryIO, int) -> None
'''
Parse an existing Inode. This just saves off the extent for later use.
Parameters:
extent - The original extent that the data lives at.
Returns:
Nothing.
'''
if self._initialized:
raise pycdlibexception.PyCdlibInternalError('Inode is already initialized')
self.orig_extent_loc = extent
self.data_length = length
self.data_fp = fp
self.manage_fp = False
self.fp_offset = extent * log_block_size
self.original_data_location = self.DATA_ON_ORIGINAL_ISO
self._initialized = True
|
def function[parse, parameter[self, extent, length, fp, log_block_size]]:
constant[
Parse an existing Inode. This just saves off the extent for later use.
Parameters:
extent - The original extent that the data lives at.
Returns:
Nothing.
]
if name[self]._initialized begin[:]
<ast.Raise object at 0x7da18bc736a0>
name[self].orig_extent_loc assign[=] name[extent]
name[self].data_length assign[=] name[length]
name[self].data_fp assign[=] name[fp]
name[self].manage_fp assign[=] constant[False]
name[self].fp_offset assign[=] binary_operation[name[extent] * name[log_block_size]]
name[self].original_data_location assign[=] name[self].DATA_ON_ORIGINAL_ISO
name[self]._initialized assign[=] constant[True]
|
keyword[def] identifier[parse] ( identifier[self] , identifier[extent] , identifier[length] , identifier[fp] , identifier[log_block_size] ):
literal[string]
keyword[if] identifier[self] . identifier[_initialized] :
keyword[raise] identifier[pycdlibexception] . identifier[PyCdlibInternalError] ( literal[string] )
identifier[self] . identifier[orig_extent_loc] = identifier[extent]
identifier[self] . identifier[data_length] = identifier[length]
identifier[self] . identifier[data_fp] = identifier[fp]
identifier[self] . identifier[manage_fp] = keyword[False]
identifier[self] . identifier[fp_offset] = identifier[extent] * identifier[log_block_size]
identifier[self] . identifier[original_data_location] = identifier[self] . identifier[DATA_ON_ORIGINAL_ISO]
identifier[self] . identifier[_initialized] = keyword[True]
|
def parse(self, extent, length, fp, log_block_size):
# type: (int, int, BinaryIO, int) -> None
'\n Parse an existing Inode. This just saves off the extent for later use.\n\n Parameters:\n extent - The original extent that the data lives at.\n Returns:\n Nothing.\n '
if self._initialized:
raise pycdlibexception.PyCdlibInternalError('Inode is already initialized') # depends on [control=['if'], data=[]]
self.orig_extent_loc = extent
self.data_length = length
self.data_fp = fp
self.manage_fp = False
self.fp_offset = extent * log_block_size
self.original_data_location = self.DATA_ON_ORIGINAL_ISO
self._initialized = True
|
def _process_replacements(self, html):
""" Do raw string replacements on :param:`html`. """
if self.config.find_string:
for find_pattern, replace_pattern in self.config.replace_patterns:
html = html.replace(find_pattern, replace_pattern)
LOGGER.info(u'Done replacements.',
extra={'siteconfig': self.config.host})
return html
|
def function[_process_replacements, parameter[self, html]]:
constant[ Do raw string replacements on :param:`html`. ]
if name[self].config.find_string begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b0aa6ec0>, <ast.Name object at 0x7da1b0aa6470>]]] in starred[name[self].config.replace_patterns] begin[:]
variable[html] assign[=] call[name[html].replace, parameter[name[find_pattern], name[replace_pattern]]]
call[name[LOGGER].info, parameter[constant[Done replacements.]]]
return[name[html]]
|
keyword[def] identifier[_process_replacements] ( identifier[self] , identifier[html] ):
literal[string]
keyword[if] identifier[self] . identifier[config] . identifier[find_string] :
keyword[for] identifier[find_pattern] , identifier[replace_pattern] keyword[in] identifier[self] . identifier[config] . identifier[replace_patterns] :
identifier[html] = identifier[html] . identifier[replace] ( identifier[find_pattern] , identifier[replace_pattern] )
identifier[LOGGER] . identifier[info] ( literal[string] ,
identifier[extra] ={ literal[string] : identifier[self] . identifier[config] . identifier[host] })
keyword[return] identifier[html]
|
def _process_replacements(self, html):
""" Do raw string replacements on :param:`html`. """
if self.config.find_string:
for (find_pattern, replace_pattern) in self.config.replace_patterns:
html = html.replace(find_pattern, replace_pattern) # depends on [control=['for'], data=[]]
LOGGER.info(u'Done replacements.', extra={'siteconfig': self.config.host}) # depends on [control=['if'], data=[]]
return html
|
def MWAPIWrapper(func):
"""
MWAPIWrapper 控制API请求异常的装饰器
根据requests库定义的异常来控制请求返回的意外情况
"""
@wraps(func)
def wrapper(*args, **kwargs):
self = args[0]
try:
result = func(*args, **kwargs)
return result
except ConnectionError:
err_title = '连接错误'
err_message = '[{name}] 连接错误,网络状况异常'.format(name=func.__name__, host=self.host)
except HTTPError as e:
err_title = 'HTTP响应错误'
err_message = '[{name}] 目标服务器"{host}" HTTP响应错误({detail})'.format(name=func.__name__,
host=self.host, detail=e.message)
except Timeout:
err_title = '请求超时'
err_message = '[{name}] 目标服务器"{host}" 请求超时'.format(name=func.__name__, host=self.host)
except TooManyRedirects:
err_title = '过多重定向'
err_message = '[{name}] 目标服务器"{host}" 过多重定向'.format(name=func.__name__, host=self.host)
except ValueError as e:
if e.message.find('JSON') >= 0:
err_title = 'API JSON返回值异常'
err_message = '[{name}] 目标服务器"{host}" API JSON返回值异常'.format(name=func.__name__, host=self.host)
else:
err_title = '值错误'
err_message = '[{name}] 存在ValueError:{msg}'.format(name=func.__name__, msg=e.message)
self.log.error(e, exc_info=True)
except KeyError as e:
err_title = '键错误'
err_message = '[{name}] 存在KeyError,错误键为{key}'.format(name=func.__name__, key=e.message)
self.log.error(e, exc_info=True)
except MWAPIException as e:
err_title = 'Mediawiki API 异常'
err_message = e.message
self.log.error('%s:%s', err_title, err_message)
return {'success': False, 'errtitle': err_title, 'errmsg': err_message}
return wrapper
|
def function[MWAPIWrapper, parameter[func]]:
constant[
MWAPIWrapper 控制API请求异常的装饰器
根据requests库定义的异常来控制请求返回的意外情况
]
def function[wrapper, parameter[]]:
variable[self] assign[=] call[name[args]][constant[0]]
<ast.Try object at 0x7da2047e9390>
call[name[self].log.error, parameter[constant[%s:%s], name[err_title], name[err_message]]]
return[dictionary[[<ast.Constant object at 0x7da2041da560>, <ast.Constant object at 0x7da2041db190>, <ast.Constant object at 0x7da2041d90f0>], [<ast.Constant object at 0x7da2041d8400>, <ast.Name object at 0x7da2041dbfd0>, <ast.Name object at 0x7da2041d9bd0>]]]
return[name[wrapper]]
|
keyword[def] identifier[MWAPIWrapper] ( identifier[func] ):
literal[string]
@ identifier[wraps] ( identifier[func] )
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ):
identifier[self] = identifier[args] [ literal[int] ]
keyword[try] :
identifier[result] = identifier[func] (* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[result]
keyword[except] identifier[ConnectionError] :
identifier[err_title] = literal[string]
identifier[err_message] = literal[string] . identifier[format] ( identifier[name] = identifier[func] . identifier[__name__] , identifier[host] = identifier[self] . identifier[host] )
keyword[except] identifier[HTTPError] keyword[as] identifier[e] :
identifier[err_title] = literal[string]
identifier[err_message] = literal[string] . identifier[format] ( identifier[name] = identifier[func] . identifier[__name__] ,
identifier[host] = identifier[self] . identifier[host] , identifier[detail] = identifier[e] . identifier[message] )
keyword[except] identifier[Timeout] :
identifier[err_title] = literal[string]
identifier[err_message] = literal[string] . identifier[format] ( identifier[name] = identifier[func] . identifier[__name__] , identifier[host] = identifier[self] . identifier[host] )
keyword[except] identifier[TooManyRedirects] :
identifier[err_title] = literal[string]
identifier[err_message] = literal[string] . identifier[format] ( identifier[name] = identifier[func] . identifier[__name__] , identifier[host] = identifier[self] . identifier[host] )
keyword[except] identifier[ValueError] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[message] . identifier[find] ( literal[string] )>= literal[int] :
identifier[err_title] = literal[string]
identifier[err_message] = literal[string] . identifier[format] ( identifier[name] = identifier[func] . identifier[__name__] , identifier[host] = identifier[self] . identifier[host] )
keyword[else] :
identifier[err_title] = literal[string]
identifier[err_message] = literal[string] . identifier[format] ( identifier[name] = identifier[func] . identifier[__name__] , identifier[msg] = identifier[e] . identifier[message] )
identifier[self] . identifier[log] . identifier[error] ( identifier[e] , identifier[exc_info] = keyword[True] )
keyword[except] identifier[KeyError] keyword[as] identifier[e] :
identifier[err_title] = literal[string]
identifier[err_message] = literal[string] . identifier[format] ( identifier[name] = identifier[func] . identifier[__name__] , identifier[key] = identifier[e] . identifier[message] )
identifier[self] . identifier[log] . identifier[error] ( identifier[e] , identifier[exc_info] = keyword[True] )
keyword[except] identifier[MWAPIException] keyword[as] identifier[e] :
identifier[err_title] = literal[string]
identifier[err_message] = identifier[e] . identifier[message]
identifier[self] . identifier[log] . identifier[error] ( literal[string] , identifier[err_title] , identifier[err_message] )
keyword[return] { literal[string] : keyword[False] , literal[string] : identifier[err_title] , literal[string] : identifier[err_message] }
keyword[return] identifier[wrapper]
|
def MWAPIWrapper(func):
"""
MWAPIWrapper 控制API请求异常的装饰器
根据requests库定义的异常来控制请求返回的意外情况
"""
@wraps(func)
def wrapper(*args, **kwargs):
self = args[0]
try:
result = func(*args, **kwargs)
return result # depends on [control=['try'], data=[]]
except ConnectionError:
err_title = '连接错误'
err_message = '[{name}] 连接错误,网络状况异常'.format(name=func.__name__, host=self.host) # depends on [control=['except'], data=[]]
except HTTPError as e:
err_title = 'HTTP响应错误'
err_message = '[{name}] 目标服务器"{host}" HTTP响应错误({detail})'.format(name=func.__name__, host=self.host, detail=e.message) # depends on [control=['except'], data=['e']]
except Timeout:
err_title = '请求超时'
err_message = '[{name}] 目标服务器"{host}" 请求超时'.format(name=func.__name__, host=self.host) # depends on [control=['except'], data=[]]
except TooManyRedirects:
err_title = '过多重定向'
err_message = '[{name}] 目标服务器"{host}" 过多重定向'.format(name=func.__name__, host=self.host) # depends on [control=['except'], data=[]]
except ValueError as e:
if e.message.find('JSON') >= 0:
err_title = 'API JSON返回值异常'
err_message = '[{name}] 目标服务器"{host}" API JSON返回值异常'.format(name=func.__name__, host=self.host) # depends on [control=['if'], data=[]]
else:
err_title = '值错误'
err_message = '[{name}] 存在ValueError:{msg}'.format(name=func.__name__, msg=e.message)
self.log.error(e, exc_info=True) # depends on [control=['except'], data=['e']]
except KeyError as e:
err_title = '键错误'
err_message = '[{name}] 存在KeyError,错误键为{key}'.format(name=func.__name__, key=e.message)
self.log.error(e, exc_info=True) # depends on [control=['except'], data=['e']]
except MWAPIException as e:
err_title = 'Mediawiki API 异常'
err_message = e.message # depends on [control=['except'], data=['e']]
self.log.error('%s:%s', err_title, err_message)
return {'success': False, 'errtitle': err_title, 'errmsg': err_message}
return wrapper
|
def pipeline(self, source=None, phase='build', ps=None):
"""
Construct the ETL pipeline for all phases. Segments that are not used for the current phase
are filtered out later.
:param source: A source object, or a source string name
:return: an etl Pipeline
"""
from ambry.etl.pipeline import Pipeline, PartitionWriter
from ambry.dbexceptions import ConfigurationError
if source:
source = self.source(source) if isinstance(source, string_types) else source
else:
source = None
sf, sp = self.source_pipe(source, ps) if source else (None, None)
pl = Pipeline(self, source=sp)
# Get the default pipeline, from the config at the head of this file.
try:
phase_config = self.default_pipelines[phase]
except KeyError:
phase_config = None # Ok for non-conventional pipe names
if phase_config:
pl.configure(phase_config)
# Find the pipe configuration, from the metadata
pipe_config = None
pipe_name = None
if source and source.pipeline:
pipe_name = source.pipeline
try:
pipe_config = self.metadata.pipelines[pipe_name]
except KeyError:
raise ConfigurationError("Pipeline '{}' declared in source '{}', but not found in metadata"
.format(source.pipeline, source.name))
else:
pipe_name, pipe_config = self._find_pipeline(source, phase)
if pipe_name:
pl.name = pipe_name
else:
pl.name = phase
pl.phase = phase
# The pipe_config can either be a list, in which case it is a list of pipe pipes for the
# augment segment or it could be a dict, in which case each is a list of pipes
# for the named segments.
def apply_config(pl, pipe_config):
if isinstance(pipe_config, (list, tuple)):
# Just convert it to dict form for the next section
# PartitionWriters are always moved to the 'store' section
store, body = [], []
for pipe in pipe_config:
store.append(pipe) if isinstance(pipe, PartitionWriter) else body.append(pipe)
pipe_config = dict(body=body, store=store)
if pipe_config:
pl.configure(pipe_config)
apply_config(pl, pipe_config)
# One more time, for the configuration for 'all' phases
if 'all' in self.metadata.pipelines:
apply_config(pl, self.metadata.pipelines['all'])
# Allows developer to over ride pipe configuration in code
self.edit_pipeline(pl)
try:
pl.dest_table = source.dest_table_name
pl.source_table = source.source_table.name
pl.source_name = source.name
except AttributeError:
pl.dest_table = None
return pl
|
def function[pipeline, parameter[self, source, phase, ps]]:
constant[
Construct the ETL pipeline for all phases. Segments that are not used for the current phase
are filtered out later.
:param source: A source object, or a source string name
:return: an etl Pipeline
]
from relative_module[ambry.etl.pipeline] import module[Pipeline], module[PartitionWriter]
from relative_module[ambry.dbexceptions] import module[ConfigurationError]
if name[source] begin[:]
variable[source] assign[=] <ast.IfExp object at 0x7da18f00ea10>
<ast.Tuple object at 0x7da18f00d2d0> assign[=] <ast.IfExp object at 0x7da18f00e230>
variable[pl] assign[=] call[name[Pipeline], parameter[name[self]]]
<ast.Try object at 0x7da18f00e830>
if name[phase_config] begin[:]
call[name[pl].configure, parameter[name[phase_config]]]
variable[pipe_config] assign[=] constant[None]
variable[pipe_name] assign[=] constant[None]
if <ast.BoolOp object at 0x7da20e9546a0> begin[:]
variable[pipe_name] assign[=] name[source].pipeline
<ast.Try object at 0x7da20e955ed0>
if name[pipe_name] begin[:]
name[pl].name assign[=] name[pipe_name]
name[pl].phase assign[=] name[phase]
def function[apply_config, parameter[pl, pipe_config]]:
if call[name[isinstance], parameter[name[pipe_config], tuple[[<ast.Name object at 0x7da20e955510>, <ast.Name object at 0x7da20e957310>]]]] begin[:]
<ast.Tuple object at 0x7da20e9578e0> assign[=] tuple[[<ast.List object at 0x7da20e954a90>, <ast.List object at 0x7da20e957dc0>]]
for taget[name[pipe]] in starred[name[pipe_config]] begin[:]
<ast.IfExp object at 0x7da20e955570>
variable[pipe_config] assign[=] call[name[dict], parameter[]]
if name[pipe_config] begin[:]
call[name[pl].configure, parameter[name[pipe_config]]]
call[name[apply_config], parameter[name[pl], name[pipe_config]]]
if compare[constant[all] in name[self].metadata.pipelines] begin[:]
call[name[apply_config], parameter[name[pl], call[name[self].metadata.pipelines][constant[all]]]]
call[name[self].edit_pipeline, parameter[name[pl]]]
<ast.Try object at 0x7da20e955780>
return[name[pl]]
|
keyword[def] identifier[pipeline] ( identifier[self] , identifier[source] = keyword[None] , identifier[phase] = literal[string] , identifier[ps] = keyword[None] ):
literal[string]
keyword[from] identifier[ambry] . identifier[etl] . identifier[pipeline] keyword[import] identifier[Pipeline] , identifier[PartitionWriter]
keyword[from] identifier[ambry] . identifier[dbexceptions] keyword[import] identifier[ConfigurationError]
keyword[if] identifier[source] :
identifier[source] = identifier[self] . identifier[source] ( identifier[source] ) keyword[if] identifier[isinstance] ( identifier[source] , identifier[string_types] ) keyword[else] identifier[source]
keyword[else] :
identifier[source] = keyword[None]
identifier[sf] , identifier[sp] = identifier[self] . identifier[source_pipe] ( identifier[source] , identifier[ps] ) keyword[if] identifier[source] keyword[else] ( keyword[None] , keyword[None] )
identifier[pl] = identifier[Pipeline] ( identifier[self] , identifier[source] = identifier[sp] )
keyword[try] :
identifier[phase_config] = identifier[self] . identifier[default_pipelines] [ identifier[phase] ]
keyword[except] identifier[KeyError] :
identifier[phase_config] = keyword[None]
keyword[if] identifier[phase_config] :
identifier[pl] . identifier[configure] ( identifier[phase_config] )
identifier[pipe_config] = keyword[None]
identifier[pipe_name] = keyword[None]
keyword[if] identifier[source] keyword[and] identifier[source] . identifier[pipeline] :
identifier[pipe_name] = identifier[source] . identifier[pipeline]
keyword[try] :
identifier[pipe_config] = identifier[self] . identifier[metadata] . identifier[pipelines] [ identifier[pipe_name] ]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[ConfigurationError] ( literal[string]
. identifier[format] ( identifier[source] . identifier[pipeline] , identifier[source] . identifier[name] ))
keyword[else] :
identifier[pipe_name] , identifier[pipe_config] = identifier[self] . identifier[_find_pipeline] ( identifier[source] , identifier[phase] )
keyword[if] identifier[pipe_name] :
identifier[pl] . identifier[name] = identifier[pipe_name]
keyword[else] :
identifier[pl] . identifier[name] = identifier[phase]
identifier[pl] . identifier[phase] = identifier[phase]
keyword[def] identifier[apply_config] ( identifier[pl] , identifier[pipe_config] ):
keyword[if] identifier[isinstance] ( identifier[pipe_config] ,( identifier[list] , identifier[tuple] )):
identifier[store] , identifier[body] =[],[]
keyword[for] identifier[pipe] keyword[in] identifier[pipe_config] :
identifier[store] . identifier[append] ( identifier[pipe] ) keyword[if] identifier[isinstance] ( identifier[pipe] , identifier[PartitionWriter] ) keyword[else] identifier[body] . identifier[append] ( identifier[pipe] )
identifier[pipe_config] = identifier[dict] ( identifier[body] = identifier[body] , identifier[store] = identifier[store] )
keyword[if] identifier[pipe_config] :
identifier[pl] . identifier[configure] ( identifier[pipe_config] )
identifier[apply_config] ( identifier[pl] , identifier[pipe_config] )
keyword[if] literal[string] keyword[in] identifier[self] . identifier[metadata] . identifier[pipelines] :
identifier[apply_config] ( identifier[pl] , identifier[self] . identifier[metadata] . identifier[pipelines] [ literal[string] ])
identifier[self] . identifier[edit_pipeline] ( identifier[pl] )
keyword[try] :
identifier[pl] . identifier[dest_table] = identifier[source] . identifier[dest_table_name]
identifier[pl] . identifier[source_table] = identifier[source] . identifier[source_table] . identifier[name]
identifier[pl] . identifier[source_name] = identifier[source] . identifier[name]
keyword[except] identifier[AttributeError] :
identifier[pl] . identifier[dest_table] = keyword[None]
keyword[return] identifier[pl]
|
def pipeline(self, source=None, phase='build', ps=None):
"""
Construct the ETL pipeline for all phases. Segments that are not used for the current phase
are filtered out later.
:param source: A source object, or a source string name
:return: an etl Pipeline
"""
from ambry.etl.pipeline import Pipeline, PartitionWriter
from ambry.dbexceptions import ConfigurationError
if source:
source = self.source(source) if isinstance(source, string_types) else source # depends on [control=['if'], data=[]]
else:
source = None
(sf, sp) = self.source_pipe(source, ps) if source else (None, None)
pl = Pipeline(self, source=sp)
# Get the default pipeline, from the config at the head of this file.
try:
phase_config = self.default_pipelines[phase] # depends on [control=['try'], data=[]]
except KeyError:
phase_config = None # Ok for non-conventional pipe names # depends on [control=['except'], data=[]]
if phase_config:
pl.configure(phase_config) # depends on [control=['if'], data=[]]
# Find the pipe configuration, from the metadata
pipe_config = None
pipe_name = None
if source and source.pipeline:
pipe_name = source.pipeline
try:
pipe_config = self.metadata.pipelines[pipe_name] # depends on [control=['try'], data=[]]
except KeyError:
raise ConfigurationError("Pipeline '{}' declared in source '{}', but not found in metadata".format(source.pipeline, source.name)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
(pipe_name, pipe_config) = self._find_pipeline(source, phase)
if pipe_name:
pl.name = pipe_name # depends on [control=['if'], data=[]]
else:
pl.name = phase
pl.phase = phase
# The pipe_config can either be a list, in which case it is a list of pipe pipes for the
# augment segment or it could be a dict, in which case each is a list of pipes
# for the named segments.
def apply_config(pl, pipe_config):
if isinstance(pipe_config, (list, tuple)):
# Just convert it to dict form for the next section
# PartitionWriters are always moved to the 'store' section
(store, body) = ([], [])
for pipe in pipe_config:
store.append(pipe) if isinstance(pipe, PartitionWriter) else body.append(pipe) # depends on [control=['for'], data=['pipe']]
pipe_config = dict(body=body, store=store) # depends on [control=['if'], data=[]]
if pipe_config:
pl.configure(pipe_config) # depends on [control=['if'], data=[]]
apply_config(pl, pipe_config)
# One more time, for the configuration for 'all' phases
if 'all' in self.metadata.pipelines:
apply_config(pl, self.metadata.pipelines['all']) # depends on [control=['if'], data=[]]
# Allows developer to over ride pipe configuration in code
self.edit_pipeline(pl)
try:
pl.dest_table = source.dest_table_name
pl.source_table = source.source_table.name
pl.source_name = source.name # depends on [control=['try'], data=[]]
except AttributeError:
pl.dest_table = None # depends on [control=['except'], data=[]]
return pl
|
def select_inputs(self, address, nfees, ntokens, min_confirmations=6):
"""
Selects the inputs for the spool transaction.
Args:
address (str): bitcoin address to select inputs for
nfees (int): number of fees
ntokens (int): number of tokens
min_confirmations (Optional[int]): minimum number of required
confirmations; defaults to 6
"""
unspents = self._t.get(address, min_confirmations=min_confirmations)['unspents']
unspents = [u for u in unspents if u not in self._spents.queue]
if len(unspents) == 0:
raise Exception("No spendable outputs found")
fees = [u for u in unspents if u['amount'] == self.fee][:nfees]
tokens = [u for u in unspents if u['amount'] == self.token][:ntokens]
if len(fees) != nfees or len(tokens) != ntokens:
raise SpoolFundsError("Not enough outputs to spend. Refill your wallet")
if self._spents.qsize() > self.SPENTS_QUEUE_MAXSIZE - (nfees + ntokens):
[self._spents.get() for i in range(self._spents.qsize() + nfees + ntokens - self.SPENTS_QUEUE_MAXSIZE)]
[self._spents.put(fee) for fee in fees]
[self._spents.put(token) for token in tokens]
return fees + tokens
|
def function[select_inputs, parameter[self, address, nfees, ntokens, min_confirmations]]:
constant[
Selects the inputs for the spool transaction.
Args:
address (str): bitcoin address to select inputs for
nfees (int): number of fees
ntokens (int): number of tokens
min_confirmations (Optional[int]): minimum number of required
confirmations; defaults to 6
]
variable[unspents] assign[=] call[call[name[self]._t.get, parameter[name[address]]]][constant[unspents]]
variable[unspents] assign[=] <ast.ListComp object at 0x7da20c7969e0>
if compare[call[name[len], parameter[name[unspents]]] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da20c9918a0>
variable[fees] assign[=] call[<ast.ListComp object at 0x7da20c991e40>][<ast.Slice object at 0x7da20c993100>]
variable[tokens] assign[=] call[<ast.ListComp object at 0x7da20c992650>][<ast.Slice object at 0x7da20c9925f0>]
if <ast.BoolOp object at 0x7da20c993b80> begin[:]
<ast.Raise object at 0x7da1b092f4f0>
if compare[call[name[self]._spents.qsize, parameter[]] greater[>] binary_operation[name[self].SPENTS_QUEUE_MAXSIZE - binary_operation[name[nfees] + name[ntokens]]]] begin[:]
<ast.ListComp object at 0x7da1b092e950>
<ast.ListComp object at 0x7da1b092dba0>
<ast.ListComp object at 0x7da1b0a21ea0>
return[binary_operation[name[fees] + name[tokens]]]
|
keyword[def] identifier[select_inputs] ( identifier[self] , identifier[address] , identifier[nfees] , identifier[ntokens] , identifier[min_confirmations] = literal[int] ):
literal[string]
identifier[unspents] = identifier[self] . identifier[_t] . identifier[get] ( identifier[address] , identifier[min_confirmations] = identifier[min_confirmations] )[ literal[string] ]
identifier[unspents] =[ identifier[u] keyword[for] identifier[u] keyword[in] identifier[unspents] keyword[if] identifier[u] keyword[not] keyword[in] identifier[self] . identifier[_spents] . identifier[queue] ]
keyword[if] identifier[len] ( identifier[unspents] )== literal[int] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[fees] =[ identifier[u] keyword[for] identifier[u] keyword[in] identifier[unspents] keyword[if] identifier[u] [ literal[string] ]== identifier[self] . identifier[fee] ][: identifier[nfees] ]
identifier[tokens] =[ identifier[u] keyword[for] identifier[u] keyword[in] identifier[unspents] keyword[if] identifier[u] [ literal[string] ]== identifier[self] . identifier[token] ][: identifier[ntokens] ]
keyword[if] identifier[len] ( identifier[fees] )!= identifier[nfees] keyword[or] identifier[len] ( identifier[tokens] )!= identifier[ntokens] :
keyword[raise] identifier[SpoolFundsError] ( literal[string] )
keyword[if] identifier[self] . identifier[_spents] . identifier[qsize] ()> identifier[self] . identifier[SPENTS_QUEUE_MAXSIZE] -( identifier[nfees] + identifier[ntokens] ):
[ identifier[self] . identifier[_spents] . identifier[get] () keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[_spents] . identifier[qsize] ()+ identifier[nfees] + identifier[ntokens] - identifier[self] . identifier[SPENTS_QUEUE_MAXSIZE] )]
[ identifier[self] . identifier[_spents] . identifier[put] ( identifier[fee] ) keyword[for] identifier[fee] keyword[in] identifier[fees] ]
[ identifier[self] . identifier[_spents] . identifier[put] ( identifier[token] ) keyword[for] identifier[token] keyword[in] identifier[tokens] ]
keyword[return] identifier[fees] + identifier[tokens]
|
def select_inputs(self, address, nfees, ntokens, min_confirmations=6):
"""
Selects the inputs for the spool transaction.
Args:
address (str): bitcoin address to select inputs for
nfees (int): number of fees
ntokens (int): number of tokens
min_confirmations (Optional[int]): minimum number of required
confirmations; defaults to 6
"""
unspents = self._t.get(address, min_confirmations=min_confirmations)['unspents']
unspents = [u for u in unspents if u not in self._spents.queue]
if len(unspents) == 0:
raise Exception('No spendable outputs found') # depends on [control=['if'], data=[]]
fees = [u for u in unspents if u['amount'] == self.fee][:nfees]
tokens = [u for u in unspents if u['amount'] == self.token][:ntokens]
if len(fees) != nfees or len(tokens) != ntokens:
raise SpoolFundsError('Not enough outputs to spend. Refill your wallet') # depends on [control=['if'], data=[]]
if self._spents.qsize() > self.SPENTS_QUEUE_MAXSIZE - (nfees + ntokens):
[self._spents.get() for i in range(self._spents.qsize() + nfees + ntokens - self.SPENTS_QUEUE_MAXSIZE)] # depends on [control=['if'], data=[]]
[self._spents.put(fee) for fee in fees]
[self._spents.put(token) for token in tokens]
return fees + tokens
|
def append_faces(vertices_seq, faces_seq):
"""
Given a sequence of zero- indexed faces and vertices
combine them into a single array of faces and
a single array of vertices.
Parameters
-----------
vertices_seq : (n, ) sequence of (m, d) float
Multiple arrays of verticesvertex arrays
faces_seq : (n, ) sequence of (p, j) int
Zero indexed faces for matching vertices
Returns
----------
vertices : (i, d) float
Points in space
faces : (j, 3) int
Reference vertex indices
"""
# the length of each vertex array
vertices_len = np.array([len(i) for i in vertices_seq])
# how much each group of faces needs to be offset
face_offset = np.append(0, np.cumsum(vertices_len)[:-1])
new_faces = []
for offset, faces in zip(face_offset, faces_seq):
if len(faces) == 0:
continue
# apply the index offset
new_faces.append(faces + offset)
# stack to clean (n, 3) float
vertices = vstack_empty(vertices_seq)
# stack to clean (n, 3) int
faces = vstack_empty(new_faces)
return vertices, faces
|
def function[append_faces, parameter[vertices_seq, faces_seq]]:
constant[
Given a sequence of zero- indexed faces and vertices
combine them into a single array of faces and
a single array of vertices.
Parameters
-----------
vertices_seq : (n, ) sequence of (m, d) float
Multiple arrays of verticesvertex arrays
faces_seq : (n, ) sequence of (p, j) int
Zero indexed faces for matching vertices
Returns
----------
vertices : (i, d) float
Points in space
faces : (j, 3) int
Reference vertex indices
]
variable[vertices_len] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da2044c1ff0>]]
variable[face_offset] assign[=] call[name[np].append, parameter[constant[0], call[call[name[np].cumsum, parameter[name[vertices_len]]]][<ast.Slice object at 0x7da2044c17b0>]]]
variable[new_faces] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da2044c3100>, <ast.Name object at 0x7da2044c1090>]]] in starred[call[name[zip], parameter[name[face_offset], name[faces_seq]]]] begin[:]
if compare[call[name[len], parameter[name[faces]]] equal[==] constant[0]] begin[:]
continue
call[name[new_faces].append, parameter[binary_operation[name[faces] + name[offset]]]]
variable[vertices] assign[=] call[name[vstack_empty], parameter[name[vertices_seq]]]
variable[faces] assign[=] call[name[vstack_empty], parameter[name[new_faces]]]
return[tuple[[<ast.Name object at 0x7da2044c2890>, <ast.Name object at 0x7da2044c3dc0>]]]
|
keyword[def] identifier[append_faces] ( identifier[vertices_seq] , identifier[faces_seq] ):
literal[string]
identifier[vertices_len] = identifier[np] . identifier[array] ([ identifier[len] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[vertices_seq] ])
identifier[face_offset] = identifier[np] . identifier[append] ( literal[int] , identifier[np] . identifier[cumsum] ( identifier[vertices_len] )[:- literal[int] ])
identifier[new_faces] =[]
keyword[for] identifier[offset] , identifier[faces] keyword[in] identifier[zip] ( identifier[face_offset] , identifier[faces_seq] ):
keyword[if] identifier[len] ( identifier[faces] )== literal[int] :
keyword[continue]
identifier[new_faces] . identifier[append] ( identifier[faces] + identifier[offset] )
identifier[vertices] = identifier[vstack_empty] ( identifier[vertices_seq] )
identifier[faces] = identifier[vstack_empty] ( identifier[new_faces] )
keyword[return] identifier[vertices] , identifier[faces]
|
def append_faces(vertices_seq, faces_seq):
"""
Given a sequence of zero- indexed faces and vertices
combine them into a single array of faces and
a single array of vertices.
Parameters
-----------
vertices_seq : (n, ) sequence of (m, d) float
Multiple arrays of verticesvertex arrays
faces_seq : (n, ) sequence of (p, j) int
Zero indexed faces for matching vertices
Returns
----------
vertices : (i, d) float
Points in space
faces : (j, 3) int
Reference vertex indices
"""
# the length of each vertex array
vertices_len = np.array([len(i) for i in vertices_seq])
# how much each group of faces needs to be offset
face_offset = np.append(0, np.cumsum(vertices_len)[:-1])
new_faces = []
for (offset, faces) in zip(face_offset, faces_seq):
if len(faces) == 0:
continue # depends on [control=['if'], data=[]]
# apply the index offset
new_faces.append(faces + offset) # depends on [control=['for'], data=[]]
# stack to clean (n, 3) float
vertices = vstack_empty(vertices_seq)
# stack to clean (n, 3) int
faces = vstack_empty(new_faces)
return (vertices, faces)
|
def from_json(json_data):
"""
Returns a pyalveo.Client given a json string built from the client.to_json() method.
"""
# If we have a string, then decode it, otherwise assume it's already decoded
if isinstance(json_data, str):
data = json.loads(json_data)
else:
data = json_data
oauth_dict = {
'client_id':data.get('oauth',{}).get('client_id',None),
'client_secret':data.get('oauth',{}).get('client_secret',None),
'redirect_url':data.get('oauth',{}).get('redirect_url',None),
}
client = Client(api_key=data.get('api_key',None),
api_url=data.get('api_url',None),
oauth=oauth_dict,
use_cache=data.get('use_cache',None),
cache_dir=data.get('cache_dir',None),
update_cache=data.get('update_cache',None),
verifySSL=data.get('oauth',{}).get('verifySSL',None)
)
client.cache = Cache.from_json(data.get('cache',None))
client.oauth = OAuth2.from_json(data.get('oauth',None))
return client
|
def function[from_json, parameter[json_data]]:
constant[
Returns a pyalveo.Client given a json string built from the client.to_json() method.
]
if call[name[isinstance], parameter[name[json_data], name[str]]] begin[:]
variable[data] assign[=] call[name[json].loads, parameter[name[json_data]]]
variable[oauth_dict] assign[=] dictionary[[<ast.Constant object at 0x7da20c6e7c10>, <ast.Constant object at 0x7da20c6e56f0>, <ast.Constant object at 0x7da20c6e6770>], [<ast.Call object at 0x7da20c6e5570>, <ast.Call object at 0x7da20c6e7d30>, <ast.Call object at 0x7da20c6e4e20>]]
variable[client] assign[=] call[name[Client], parameter[]]
name[client].cache assign[=] call[name[Cache].from_json, parameter[call[name[data].get, parameter[constant[cache], constant[None]]]]]
name[client].oauth assign[=] call[name[OAuth2].from_json, parameter[call[name[data].get, parameter[constant[oauth], constant[None]]]]]
return[name[client]]
|
keyword[def] identifier[from_json] ( identifier[json_data] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[json_data] , identifier[str] ):
identifier[data] = identifier[json] . identifier[loads] ( identifier[json_data] )
keyword[else] :
identifier[data] = identifier[json_data]
identifier[oauth_dict] ={
literal[string] : identifier[data] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] , keyword[None] ),
literal[string] : identifier[data] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] , keyword[None] ),
literal[string] : identifier[data] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] , keyword[None] ),
}
identifier[client] = identifier[Client] ( identifier[api_key] = identifier[data] . identifier[get] ( literal[string] , keyword[None] ),
identifier[api_url] = identifier[data] . identifier[get] ( literal[string] , keyword[None] ),
identifier[oauth] = identifier[oauth_dict] ,
identifier[use_cache] = identifier[data] . identifier[get] ( literal[string] , keyword[None] ),
identifier[cache_dir] = identifier[data] . identifier[get] ( literal[string] , keyword[None] ),
identifier[update_cache] = identifier[data] . identifier[get] ( literal[string] , keyword[None] ),
identifier[verifySSL] = identifier[data] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] , keyword[None] )
)
identifier[client] . identifier[cache] = identifier[Cache] . identifier[from_json] ( identifier[data] . identifier[get] ( literal[string] , keyword[None] ))
identifier[client] . identifier[oauth] = identifier[OAuth2] . identifier[from_json] ( identifier[data] . identifier[get] ( literal[string] , keyword[None] ))
keyword[return] identifier[client]
|
def from_json(json_data):
"""
Returns a pyalveo.Client given a json string built from the client.to_json() method.
"""
# If we have a string, then decode it, otherwise assume it's already decoded
if isinstance(json_data, str):
data = json.loads(json_data) # depends on [control=['if'], data=[]]
else:
data = json_data
oauth_dict = {'client_id': data.get('oauth', {}).get('client_id', None), 'client_secret': data.get('oauth', {}).get('client_secret', None), 'redirect_url': data.get('oauth', {}).get('redirect_url', None)}
client = Client(api_key=data.get('api_key', None), api_url=data.get('api_url', None), oauth=oauth_dict, use_cache=data.get('use_cache', None), cache_dir=data.get('cache_dir', None), update_cache=data.get('update_cache', None), verifySSL=data.get('oauth', {}).get('verifySSL', None))
client.cache = Cache.from_json(data.get('cache', None))
client.oauth = OAuth2.from_json(data.get('oauth', None))
return client
|
def mousePressEvent( self, event ):
"""
Handles the mouse press event.
:param event | <QMouseEvent>
"""
scene_point = self.mapToScene(event.pos())
date = self.scene().dateAt(scene_point)
date_time = self.scene().dateTimeAt(scene_point)
item = self.scene().itemAt(scene_point)
if ( not isinstance(item, XCalendarItem) ):
item = None
# checks to see if the signals are blocked
if ( not self.signalsBlocked() ):
if ( item ):
self.calendarItemClicked.emit(item)
elif ( date_time.isValid() ):
self.dateTimeClicked.emit(date_time)
elif ( date.isValid() ):
self.dateClicked.emit(date)
return super(XCalendarWidget, self).mousePressEvent(event)
|
def function[mousePressEvent, parameter[self, event]]:
constant[
Handles the mouse press event.
:param event | <QMouseEvent>
]
variable[scene_point] assign[=] call[name[self].mapToScene, parameter[call[name[event].pos, parameter[]]]]
variable[date] assign[=] call[call[name[self].scene, parameter[]].dateAt, parameter[name[scene_point]]]
variable[date_time] assign[=] call[call[name[self].scene, parameter[]].dateTimeAt, parameter[name[scene_point]]]
variable[item] assign[=] call[call[name[self].scene, parameter[]].itemAt, parameter[name[scene_point]]]
if <ast.UnaryOp object at 0x7da1b234bdf0> begin[:]
variable[item] assign[=] constant[None]
if <ast.UnaryOp object at 0x7da1b2349510> begin[:]
if name[item] begin[:]
call[name[self].calendarItemClicked.emit, parameter[name[item]]]
return[call[call[name[super], parameter[name[XCalendarWidget], name[self]]].mousePressEvent, parameter[name[event]]]]
|
keyword[def] identifier[mousePressEvent] ( identifier[self] , identifier[event] ):
literal[string]
identifier[scene_point] = identifier[self] . identifier[mapToScene] ( identifier[event] . identifier[pos] ())
identifier[date] = identifier[self] . identifier[scene] (). identifier[dateAt] ( identifier[scene_point] )
identifier[date_time] = identifier[self] . identifier[scene] (). identifier[dateTimeAt] ( identifier[scene_point] )
identifier[item] = identifier[self] . identifier[scene] (). identifier[itemAt] ( identifier[scene_point] )
keyword[if] ( keyword[not] identifier[isinstance] ( identifier[item] , identifier[XCalendarItem] )):
identifier[item] = keyword[None]
keyword[if] ( keyword[not] identifier[self] . identifier[signalsBlocked] ()):
keyword[if] ( identifier[item] ):
identifier[self] . identifier[calendarItemClicked] . identifier[emit] ( identifier[item] )
keyword[elif] ( identifier[date_time] . identifier[isValid] ()):
identifier[self] . identifier[dateTimeClicked] . identifier[emit] ( identifier[date_time] )
keyword[elif] ( identifier[date] . identifier[isValid] ()):
identifier[self] . identifier[dateClicked] . identifier[emit] ( identifier[date] )
keyword[return] identifier[super] ( identifier[XCalendarWidget] , identifier[self] ). identifier[mousePressEvent] ( identifier[event] )
|
def mousePressEvent(self, event):
"""
Handles the mouse press event.
:param event | <QMouseEvent>
"""
scene_point = self.mapToScene(event.pos())
date = self.scene().dateAt(scene_point)
date_time = self.scene().dateTimeAt(scene_point)
item = self.scene().itemAt(scene_point)
if not isinstance(item, XCalendarItem):
item = None # depends on [control=['if'], data=[]] # checks to see if the signals are blocked
if not self.signalsBlocked():
if item:
self.calendarItemClicked.emit(item) # depends on [control=['if'], data=[]]
elif date_time.isValid():
self.dateTimeClicked.emit(date_time) # depends on [control=['if'], data=[]]
elif date.isValid():
self.dateClicked.emit(date) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return super(XCalendarWidget, self).mousePressEvent(event)
|
def get_loggable_url(url):
"""Strip out secrets from taskcluster urls.
Args:
url (str): the url to strip
Returns:
str: the loggable url
"""
loggable_url = url or ""
for secret_string in ("bewit=", "AWSAccessKeyId=", "access_token="):
parts = loggable_url.split(secret_string)
loggable_url = parts[0]
if loggable_url != url:
loggable_url = "{}<snip>".format(loggable_url)
return loggable_url
|
def function[get_loggable_url, parameter[url]]:
constant[Strip out secrets from taskcluster urls.
Args:
url (str): the url to strip
Returns:
str: the loggable url
]
variable[loggable_url] assign[=] <ast.BoolOp object at 0x7da18bcca140>
for taget[name[secret_string]] in starred[tuple[[<ast.Constant object at 0x7da18c4cf5e0>, <ast.Constant object at 0x7da18c4cc190>, <ast.Constant object at 0x7da18c4cfd00>]]] begin[:]
variable[parts] assign[=] call[name[loggable_url].split, parameter[name[secret_string]]]
variable[loggable_url] assign[=] call[name[parts]][constant[0]]
if compare[name[loggable_url] not_equal[!=] name[url]] begin[:]
variable[loggable_url] assign[=] call[constant[{}<snip>].format, parameter[name[loggable_url]]]
return[name[loggable_url]]
|
keyword[def] identifier[get_loggable_url] ( identifier[url] ):
literal[string]
identifier[loggable_url] = identifier[url] keyword[or] literal[string]
keyword[for] identifier[secret_string] keyword[in] ( literal[string] , literal[string] , literal[string] ):
identifier[parts] = identifier[loggable_url] . identifier[split] ( identifier[secret_string] )
identifier[loggable_url] = identifier[parts] [ literal[int] ]
keyword[if] identifier[loggable_url] != identifier[url] :
identifier[loggable_url] = literal[string] . identifier[format] ( identifier[loggable_url] )
keyword[return] identifier[loggable_url]
|
def get_loggable_url(url):
"""Strip out secrets from taskcluster urls.
Args:
url (str): the url to strip
Returns:
str: the loggable url
"""
loggable_url = url or ''
for secret_string in ('bewit=', 'AWSAccessKeyId=', 'access_token='):
parts = loggable_url.split(secret_string)
loggable_url = parts[0] # depends on [control=['for'], data=['secret_string']]
if loggable_url != url:
loggable_url = '{}<snip>'.format(loggable_url) # depends on [control=['if'], data=['loggable_url']]
return loggable_url
|
def null_beta(self):
"""
Optimal 𝜷 according to the marginal likelihood.
It is compute by solving the equation ::
(XᵀBX)𝜷 = XᵀB𝐲.
Returns
-------
beta : ndarray
Optimal 𝜷.
"""
ETBE = self._ETBE
yTBX = self._yTBX
A = sum(i.XTBX for i in ETBE)
b = sum(yTBX)
return rsolve(A, b)
|
def function[null_beta, parameter[self]]:
constant[
Optimal 𝜷 according to the marginal likelihood.
It is compute by solving the equation ::
(XᵀBX)𝜷 = XᵀB𝐲.
Returns
-------
beta : ndarray
Optimal 𝜷.
]
variable[ETBE] assign[=] name[self]._ETBE
variable[yTBX] assign[=] name[self]._yTBX
variable[A] assign[=] call[name[sum], parameter[<ast.GeneratorExp object at 0x7da1b00b6b90>]]
variable[b] assign[=] call[name[sum], parameter[name[yTBX]]]
return[call[name[rsolve], parameter[name[A], name[b]]]]
|
keyword[def] identifier[null_beta] ( identifier[self] ):
literal[string]
identifier[ETBE] = identifier[self] . identifier[_ETBE]
identifier[yTBX] = identifier[self] . identifier[_yTBX]
identifier[A] = identifier[sum] ( identifier[i] . identifier[XTBX] keyword[for] identifier[i] keyword[in] identifier[ETBE] )
identifier[b] = identifier[sum] ( identifier[yTBX] )
keyword[return] identifier[rsolve] ( identifier[A] , identifier[b] )
|
def null_beta(self):
"""
Optimal 𝜷 according to the marginal likelihood.
It is compute by solving the equation ::
(XᵀBX)𝜷 = XᵀB𝐲.
Returns
-------
beta : ndarray
Optimal 𝜷.
"""
ETBE = self._ETBE
yTBX = self._yTBX
A = sum((i.XTBX for i in ETBE))
b = sum(yTBX)
return rsolve(A, b)
|
def insert_before(self, value: Union[RawValue, Value],
raw: bool = False) -> "ArrayEntry":
"""Insert a new entry before the receiver.
Args:
value: The value of the new entry.
raw: Flag to be set if `value` is raw.
Returns:
An instance node of the new inserted entry.
"""
return ArrayEntry(self.index, self.before, self.after.cons(self.value),
self._cook_value(value, raw), self.parinst,
self.schema_node, datetime.now())
|
def function[insert_before, parameter[self, value, raw]]:
constant[Insert a new entry before the receiver.
Args:
value: The value of the new entry.
raw: Flag to be set if `value` is raw.
Returns:
An instance node of the new inserted entry.
]
return[call[name[ArrayEntry], parameter[name[self].index, name[self].before, call[name[self].after.cons, parameter[name[self].value]], call[name[self]._cook_value, parameter[name[value], name[raw]]], name[self].parinst, name[self].schema_node, call[name[datetime].now, parameter[]]]]]
|
keyword[def] identifier[insert_before] ( identifier[self] , identifier[value] : identifier[Union] [ identifier[RawValue] , identifier[Value] ],
identifier[raw] : identifier[bool] = keyword[False] )-> literal[string] :
literal[string]
keyword[return] identifier[ArrayEntry] ( identifier[self] . identifier[index] , identifier[self] . identifier[before] , identifier[self] . identifier[after] . identifier[cons] ( identifier[self] . identifier[value] ),
identifier[self] . identifier[_cook_value] ( identifier[value] , identifier[raw] ), identifier[self] . identifier[parinst] ,
identifier[self] . identifier[schema_node] , identifier[datetime] . identifier[now] ())
|
def insert_before(self, value: Union[RawValue, Value], raw: bool=False) -> 'ArrayEntry':
"""Insert a new entry before the receiver.
Args:
value: The value of the new entry.
raw: Flag to be set if `value` is raw.
Returns:
An instance node of the new inserted entry.
"""
return ArrayEntry(self.index, self.before, self.after.cons(self.value), self._cook_value(value, raw), self.parinst, self.schema_node, datetime.now())
|
def create_component(self,
name,
project,
description=None,
leadUserName=None,
assigneeType=None,
isAssigneeTypeValid=False,
):
"""Create a component inside a project and return a Resource for it.
:param name: name of the component
:type name: str
:param project: key of the project to create the component in
:type project: str
:param description: a description of the component
:type description: str
:param leadUserName: the username of the user responsible for this component
:type leadUserName: Optional[str]
:param assigneeType: see the ComponentBean.AssigneeType class for valid values
:type assigneeType: Optional[str]
:param isAssigneeTypeValid: boolean specifying whether the assignee type is acceptable (Default: False)
:type isAssigneeTypeValid: bool
:rtype: Component
"""
data = {
'name': name,
'project': project,
'isAssigneeTypeValid': isAssigneeTypeValid}
if description is not None:
data['description'] = description
if leadUserName is not None:
data['leadUserName'] = leadUserName
if assigneeType is not None:
data['assigneeType'] = assigneeType
url = self._get_url('component')
r = self._session.post(
url, data=json.dumps(data))
component = Component(self._options, self._session, raw=json_loads(r))
return component
|
def function[create_component, parameter[self, name, project, description, leadUserName, assigneeType, isAssigneeTypeValid]]:
constant[Create a component inside a project and return a Resource for it.
:param name: name of the component
:type name: str
:param project: key of the project to create the component in
:type project: str
:param description: a description of the component
:type description: str
:param leadUserName: the username of the user responsible for this component
:type leadUserName: Optional[str]
:param assigneeType: see the ComponentBean.AssigneeType class for valid values
:type assigneeType: Optional[str]
:param isAssigneeTypeValid: boolean specifying whether the assignee type is acceptable (Default: False)
:type isAssigneeTypeValid: bool
:rtype: Component
]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b21d7b80>, <ast.Constant object at 0x7da1b21d6710>, <ast.Constant object at 0x7da1b21d57b0>], [<ast.Name object at 0x7da1b21d7910>, <ast.Name object at 0x7da1b21d73d0>, <ast.Name object at 0x7da1b21d4460>]]
if compare[name[description] is_not constant[None]] begin[:]
call[name[data]][constant[description]] assign[=] name[description]
if compare[name[leadUserName] is_not constant[None]] begin[:]
call[name[data]][constant[leadUserName]] assign[=] name[leadUserName]
if compare[name[assigneeType] is_not constant[None]] begin[:]
call[name[data]][constant[assigneeType]] assign[=] name[assigneeType]
variable[url] assign[=] call[name[self]._get_url, parameter[constant[component]]]
variable[r] assign[=] call[name[self]._session.post, parameter[name[url]]]
variable[component] assign[=] call[name[Component], parameter[name[self]._options, name[self]._session]]
return[name[component]]
|
keyword[def] identifier[create_component] ( identifier[self] ,
identifier[name] ,
identifier[project] ,
identifier[description] = keyword[None] ,
identifier[leadUserName] = keyword[None] ,
identifier[assigneeType] = keyword[None] ,
identifier[isAssigneeTypeValid] = keyword[False] ,
):
literal[string]
identifier[data] ={
literal[string] : identifier[name] ,
literal[string] : identifier[project] ,
literal[string] : identifier[isAssigneeTypeValid] }
keyword[if] identifier[description] keyword[is] keyword[not] keyword[None] :
identifier[data] [ literal[string] ]= identifier[description]
keyword[if] identifier[leadUserName] keyword[is] keyword[not] keyword[None] :
identifier[data] [ literal[string] ]= identifier[leadUserName]
keyword[if] identifier[assigneeType] keyword[is] keyword[not] keyword[None] :
identifier[data] [ literal[string] ]= identifier[assigneeType]
identifier[url] = identifier[self] . identifier[_get_url] ( literal[string] )
identifier[r] = identifier[self] . identifier[_session] . identifier[post] (
identifier[url] , identifier[data] = identifier[json] . identifier[dumps] ( identifier[data] ))
identifier[component] = identifier[Component] ( identifier[self] . identifier[_options] , identifier[self] . identifier[_session] , identifier[raw] = identifier[json_loads] ( identifier[r] ))
keyword[return] identifier[component]
|
def create_component(self, name, project, description=None, leadUserName=None, assigneeType=None, isAssigneeTypeValid=False):
"""Create a component inside a project and return a Resource for it.
:param name: name of the component
:type name: str
:param project: key of the project to create the component in
:type project: str
:param description: a description of the component
:type description: str
:param leadUserName: the username of the user responsible for this component
:type leadUserName: Optional[str]
:param assigneeType: see the ComponentBean.AssigneeType class for valid values
:type assigneeType: Optional[str]
:param isAssigneeTypeValid: boolean specifying whether the assignee type is acceptable (Default: False)
:type isAssigneeTypeValid: bool
:rtype: Component
"""
data = {'name': name, 'project': project, 'isAssigneeTypeValid': isAssigneeTypeValid}
if description is not None:
data['description'] = description # depends on [control=['if'], data=['description']]
if leadUserName is not None:
data['leadUserName'] = leadUserName # depends on [control=['if'], data=['leadUserName']]
if assigneeType is not None:
data['assigneeType'] = assigneeType # depends on [control=['if'], data=['assigneeType']]
url = self._get_url('component')
r = self._session.post(url, data=json.dumps(data))
component = Component(self._options, self._session, raw=json_loads(r))
return component
|
def shlex_process_stdin(process_command, helptext):
"""
Use shlex to process stdin line-by-line.
Also prints help text.
Requires that @process_command be a Click command object, used for
processing single lines of input. helptext is prepended to the standard
message printed to interactive sessions.
"""
# if input is interactive, print help to stderr
if sys.stdin.isatty():
safeprint(
(
"{}\n".format(helptext) + "Lines are split with shlex in POSIX mode: "
"https://docs.python.org/library/shlex.html#parsing-rules\n"
"Terminate input with Ctrl+D or <EOF>\n"
),
write_to_stderr=True,
)
# use readlines() rather than implicit file read line looping to force
# python to properly capture EOF (otherwise, EOF acts as a flush and
# things get weird)
for line in sys.stdin.readlines():
# get the argument vector:
# do a shlex split to handle quoted paths with spaces in them
# also lets us have comments with #
argv = shlex.split(line, comments=True)
if argv:
try:
process_command.main(args=argv)
except SystemExit as e:
if e.code != 0:
raise
|
def function[shlex_process_stdin, parameter[process_command, helptext]]:
constant[
Use shlex to process stdin line-by-line.
Also prints help text.
Requires that @process_command be a Click command object, used for
processing single lines of input. helptext is prepended to the standard
message printed to interactive sessions.
]
if call[name[sys].stdin.isatty, parameter[]] begin[:]
call[name[safeprint], parameter[binary_operation[call[constant[{}
].format, parameter[name[helptext]]] + constant[Lines are split with shlex in POSIX mode: https://docs.python.org/library/shlex.html#parsing-rules
Terminate input with Ctrl+D or <EOF>
]]]]
for taget[name[line]] in starred[call[name[sys].stdin.readlines, parameter[]]] begin[:]
variable[argv] assign[=] call[name[shlex].split, parameter[name[line]]]
if name[argv] begin[:]
<ast.Try object at 0x7da20c991ba0>
|
keyword[def] identifier[shlex_process_stdin] ( identifier[process_command] , identifier[helptext] ):
literal[string]
keyword[if] identifier[sys] . identifier[stdin] . identifier[isatty] ():
identifier[safeprint] (
(
literal[string] . identifier[format] ( identifier[helptext] )+ literal[string]
literal[string]
literal[string]
),
identifier[write_to_stderr] = keyword[True] ,
)
keyword[for] identifier[line] keyword[in] identifier[sys] . identifier[stdin] . identifier[readlines] ():
identifier[argv] = identifier[shlex] . identifier[split] ( identifier[line] , identifier[comments] = keyword[True] )
keyword[if] identifier[argv] :
keyword[try] :
identifier[process_command] . identifier[main] ( identifier[args] = identifier[argv] )
keyword[except] identifier[SystemExit] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[code] != literal[int] :
keyword[raise]
|
def shlex_process_stdin(process_command, helptext):
"""
Use shlex to process stdin line-by-line.
Also prints help text.
Requires that @process_command be a Click command object, used for
processing single lines of input. helptext is prepended to the standard
message printed to interactive sessions.
"""
# if input is interactive, print help to stderr
if sys.stdin.isatty():
safeprint('{}\n'.format(helptext) + 'Lines are split with shlex in POSIX mode: https://docs.python.org/library/shlex.html#parsing-rules\nTerminate input with Ctrl+D or <EOF>\n', write_to_stderr=True) # depends on [control=['if'], data=[]]
# use readlines() rather than implicit file read line looping to force
# python to properly capture EOF (otherwise, EOF acts as a flush and
# things get weird)
for line in sys.stdin.readlines():
# get the argument vector:
# do a shlex split to handle quoted paths with spaces in them
# also lets us have comments with #
argv = shlex.split(line, comments=True)
if argv:
try:
process_command.main(args=argv) # depends on [control=['try'], data=[]]
except SystemExit as e:
if e.code != 0:
raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
|
def sortDictList(dictList,**kwargs):
'''
students = [
{'name':'john','class':'A', 'year':15},
{'name':'jane','class':'B', 'year':12},
{'name':'dave','class':'B', 'year':10}
]
rslt = sortDictList(students,cond_keys=['name','class','year'])
pobj(rslt)
rslt = sortDictList(students,cond_keys=['name','year','class'])
pobj(rslt)
rslt = sortDictList(students,cond_keys=['class','name','year'])
pobj(rslt)
rslt = sortDictList(students,cond_keys=['class','year','name'])
pobj(rslt)
rslt = sortDictList(students,cond_keys=['year','name','class'])
pobj(rslt)
rslt = sortDictList(students,cond_keys=['year','name','class'])
pobj(rslt)
'''
def default_eq_func(value1,value2):
cond = (value1 == value2)
return(cond)
def default_gt_func(value1,value2):
cond = (value1 > value2)
return(cond)
def default_lt_func(value1,value2):
cond = (value1 < value2)
return(cond)
if('eq_func' in kwargs):
eq_func = kwargs['eq_func']
else:
eq_func = default_eq_func
if('gt_func' in kwargs):
gt_func = kwargs['gt_func']
else:
gt_func = default_gt_func
if('lt_func' in kwargs):
lt_func = kwargs['lt_func']
else:
lt_func = default_lt_func
if('reverse' in kwargs):
reverse = kwargs['reverse']
else:
reverse = False
keys = kwargs['cond_keys']
def cmp_dict(d1,d2):
'''
'''
length = keys.__len__()
for i in range(0,length):
key = keys[i]
cond = eq_func(d1[key],d2[key])
if(cond):
pass
else:
cond = gt_func(d1[key],d2[key])
if(cond):
return(1)
else:
return(-1)
return(0)
ndl = dictList
ndl = sorted(ndl,key=functools.cmp_to_key(cmp_dict),reverse=reverse)
return(ndl)
|
def function[sortDictList, parameter[dictList]]:
constant[
students = [
{'name':'john','class':'A', 'year':15},
{'name':'jane','class':'B', 'year':12},
{'name':'dave','class':'B', 'year':10}
]
rslt = sortDictList(students,cond_keys=['name','class','year'])
pobj(rslt)
rslt = sortDictList(students,cond_keys=['name','year','class'])
pobj(rslt)
rslt = sortDictList(students,cond_keys=['class','name','year'])
pobj(rslt)
rslt = sortDictList(students,cond_keys=['class','year','name'])
pobj(rslt)
rslt = sortDictList(students,cond_keys=['year','name','class'])
pobj(rslt)
rslt = sortDictList(students,cond_keys=['year','name','class'])
pobj(rslt)
]
def function[default_eq_func, parameter[value1, value2]]:
variable[cond] assign[=] compare[name[value1] equal[==] name[value2]]
return[name[cond]]
def function[default_gt_func, parameter[value1, value2]]:
variable[cond] assign[=] compare[name[value1] greater[>] name[value2]]
return[name[cond]]
def function[default_lt_func, parameter[value1, value2]]:
variable[cond] assign[=] compare[name[value1] less[<] name[value2]]
return[name[cond]]
if compare[constant[eq_func] in name[kwargs]] begin[:]
variable[eq_func] assign[=] call[name[kwargs]][constant[eq_func]]
if compare[constant[gt_func] in name[kwargs]] begin[:]
variable[gt_func] assign[=] call[name[kwargs]][constant[gt_func]]
if compare[constant[lt_func] in name[kwargs]] begin[:]
variable[lt_func] assign[=] call[name[kwargs]][constant[lt_func]]
if compare[constant[reverse] in name[kwargs]] begin[:]
variable[reverse] assign[=] call[name[kwargs]][constant[reverse]]
variable[keys] assign[=] call[name[kwargs]][constant[cond_keys]]
def function[cmp_dict, parameter[d1, d2]]:
constant[
]
variable[length] assign[=] call[name[keys].__len__, parameter[]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], name[length]]]] begin[:]
variable[key] assign[=] call[name[keys]][name[i]]
variable[cond] assign[=] call[name[eq_func], parameter[call[name[d1]][name[key]], call[name[d2]][name[key]]]]
if name[cond] begin[:]
pass
return[constant[0]]
variable[ndl] assign[=] name[dictList]
variable[ndl] assign[=] call[name[sorted], parameter[name[ndl]]]
return[name[ndl]]
|
keyword[def] identifier[sortDictList] ( identifier[dictList] ,** identifier[kwargs] ):
literal[string]
keyword[def] identifier[default_eq_func] ( identifier[value1] , identifier[value2] ):
identifier[cond] =( identifier[value1] == identifier[value2] )
keyword[return] ( identifier[cond] )
keyword[def] identifier[default_gt_func] ( identifier[value1] , identifier[value2] ):
identifier[cond] =( identifier[value1] > identifier[value2] )
keyword[return] ( identifier[cond] )
keyword[def] identifier[default_lt_func] ( identifier[value1] , identifier[value2] ):
identifier[cond] =( identifier[value1] < identifier[value2] )
keyword[return] ( identifier[cond] )
keyword[if] ( literal[string] keyword[in] identifier[kwargs] ):
identifier[eq_func] = identifier[kwargs] [ literal[string] ]
keyword[else] :
identifier[eq_func] = identifier[default_eq_func]
keyword[if] ( literal[string] keyword[in] identifier[kwargs] ):
identifier[gt_func] = identifier[kwargs] [ literal[string] ]
keyword[else] :
identifier[gt_func] = identifier[default_gt_func]
keyword[if] ( literal[string] keyword[in] identifier[kwargs] ):
identifier[lt_func] = identifier[kwargs] [ literal[string] ]
keyword[else] :
identifier[lt_func] = identifier[default_lt_func]
keyword[if] ( literal[string] keyword[in] identifier[kwargs] ):
identifier[reverse] = identifier[kwargs] [ literal[string] ]
keyword[else] :
identifier[reverse] = keyword[False]
identifier[keys] = identifier[kwargs] [ literal[string] ]
keyword[def] identifier[cmp_dict] ( identifier[d1] , identifier[d2] ):
literal[string]
identifier[length] = identifier[keys] . identifier[__len__] ()
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[length] ):
identifier[key] = identifier[keys] [ identifier[i] ]
identifier[cond] = identifier[eq_func] ( identifier[d1] [ identifier[key] ], identifier[d2] [ identifier[key] ])
keyword[if] ( identifier[cond] ):
keyword[pass]
keyword[else] :
identifier[cond] = identifier[gt_func] ( identifier[d1] [ identifier[key] ], identifier[d2] [ identifier[key] ])
keyword[if] ( identifier[cond] ):
keyword[return] ( literal[int] )
keyword[else] :
keyword[return] (- literal[int] )
keyword[return] ( literal[int] )
identifier[ndl] = identifier[dictList]
identifier[ndl] = identifier[sorted] ( identifier[ndl] , identifier[key] = identifier[functools] . identifier[cmp_to_key] ( identifier[cmp_dict] ), identifier[reverse] = identifier[reverse] )
keyword[return] ( identifier[ndl] )
|
def sortDictList(dictList, **kwargs):
"""
students = [
{'name':'john','class':'A', 'year':15},
{'name':'jane','class':'B', 'year':12},
{'name':'dave','class':'B', 'year':10}
]
rslt = sortDictList(students,cond_keys=['name','class','year'])
pobj(rslt)
rslt = sortDictList(students,cond_keys=['name','year','class'])
pobj(rslt)
rslt = sortDictList(students,cond_keys=['class','name','year'])
pobj(rslt)
rslt = sortDictList(students,cond_keys=['class','year','name'])
pobj(rslt)
rslt = sortDictList(students,cond_keys=['year','name','class'])
pobj(rslt)
rslt = sortDictList(students,cond_keys=['year','name','class'])
pobj(rslt)
"""
def default_eq_func(value1, value2):
cond = value1 == value2
return cond
def default_gt_func(value1, value2):
cond = value1 > value2
return cond
def default_lt_func(value1, value2):
cond = value1 < value2
return cond
if 'eq_func' in kwargs:
eq_func = kwargs['eq_func'] # depends on [control=['if'], data=['kwargs']]
else:
eq_func = default_eq_func
if 'gt_func' in kwargs:
gt_func = kwargs['gt_func'] # depends on [control=['if'], data=['kwargs']]
else:
gt_func = default_gt_func
if 'lt_func' in kwargs:
lt_func = kwargs['lt_func'] # depends on [control=['if'], data=['kwargs']]
else:
lt_func = default_lt_func
if 'reverse' in kwargs:
reverse = kwargs['reverse'] # depends on [control=['if'], data=['kwargs']]
else:
reverse = False
keys = kwargs['cond_keys']
def cmp_dict(d1, d2):
"""
"""
length = keys.__len__()
for i in range(0, length):
key = keys[i]
cond = eq_func(d1[key], d2[key])
if cond:
pass # depends on [control=['if'], data=[]]
else:
cond = gt_func(d1[key], d2[key])
if cond:
return 1 # depends on [control=['if'], data=[]]
else:
return -1 # depends on [control=['for'], data=['i']]
return 0
ndl = dictList
ndl = sorted(ndl, key=functools.cmp_to_key(cmp_dict), reverse=reverse)
return ndl
|
def _purge_jobs(timestamp):
'''
Purge records from the returner tables.
:param job_age_in_seconds: Purge jobs older than this
:return:
'''
with _get_serv() as cursor:
try:
sql = 'delete from jids where jid in (select distinct jid from salt_returns where alter_time < %s)'
cursor.execute(sql, (timestamp,))
cursor.execute('COMMIT')
except psycopg2.DatabaseError as err:
error = err.args
sys.stderr.write(six.text_type(error))
cursor.execute("ROLLBACK")
raise err
try:
sql = 'delete from salt_returns where alter_time < %s'
cursor.execute(sql, (timestamp,))
cursor.execute('COMMIT')
except psycopg2.DatabaseError as err:
error = err.args
sys.stderr.write(six.text_type(error))
cursor.execute("ROLLBACK")
raise err
try:
sql = 'delete from salt_events where alter_time < %s'
cursor.execute(sql, (timestamp,))
cursor.execute('COMMIT')
except psycopg2.DatabaseError as err:
error = err.args
sys.stderr.write(six.text_type(error))
cursor.execute("ROLLBACK")
raise err
return True
|
def function[_purge_jobs, parameter[timestamp]]:
constant[
Purge records from the returner tables.
:param job_age_in_seconds: Purge jobs older than this
:return:
]
with call[name[_get_serv], parameter[]] begin[:]
<ast.Try object at 0x7da20c795540>
<ast.Try object at 0x7da18bc70130>
<ast.Try object at 0x7da18bc70460>
return[constant[True]]
|
keyword[def] identifier[_purge_jobs] ( identifier[timestamp] ):
literal[string]
keyword[with] identifier[_get_serv] () keyword[as] identifier[cursor] :
keyword[try] :
identifier[sql] = literal[string]
identifier[cursor] . identifier[execute] ( identifier[sql] ,( identifier[timestamp] ,))
identifier[cursor] . identifier[execute] ( literal[string] )
keyword[except] identifier[psycopg2] . identifier[DatabaseError] keyword[as] identifier[err] :
identifier[error] = identifier[err] . identifier[args]
identifier[sys] . identifier[stderr] . identifier[write] ( identifier[six] . identifier[text_type] ( identifier[error] ))
identifier[cursor] . identifier[execute] ( literal[string] )
keyword[raise] identifier[err]
keyword[try] :
identifier[sql] = literal[string]
identifier[cursor] . identifier[execute] ( identifier[sql] ,( identifier[timestamp] ,))
identifier[cursor] . identifier[execute] ( literal[string] )
keyword[except] identifier[psycopg2] . identifier[DatabaseError] keyword[as] identifier[err] :
identifier[error] = identifier[err] . identifier[args]
identifier[sys] . identifier[stderr] . identifier[write] ( identifier[six] . identifier[text_type] ( identifier[error] ))
identifier[cursor] . identifier[execute] ( literal[string] )
keyword[raise] identifier[err]
keyword[try] :
identifier[sql] = literal[string]
identifier[cursor] . identifier[execute] ( identifier[sql] ,( identifier[timestamp] ,))
identifier[cursor] . identifier[execute] ( literal[string] )
keyword[except] identifier[psycopg2] . identifier[DatabaseError] keyword[as] identifier[err] :
identifier[error] = identifier[err] . identifier[args]
identifier[sys] . identifier[stderr] . identifier[write] ( identifier[six] . identifier[text_type] ( identifier[error] ))
identifier[cursor] . identifier[execute] ( literal[string] )
keyword[raise] identifier[err]
keyword[return] keyword[True]
|
def _purge_jobs(timestamp):
"""
Purge records from the returner tables.
:param job_age_in_seconds: Purge jobs older than this
:return:
"""
with _get_serv() as cursor:
try:
sql = 'delete from jids where jid in (select distinct jid from salt_returns where alter_time < %s)'
cursor.execute(sql, (timestamp,))
cursor.execute('COMMIT') # depends on [control=['try'], data=[]]
except psycopg2.DatabaseError as err:
error = err.args
sys.stderr.write(six.text_type(error))
cursor.execute('ROLLBACK')
raise err # depends on [control=['except'], data=['err']]
try:
sql = 'delete from salt_returns where alter_time < %s'
cursor.execute(sql, (timestamp,))
cursor.execute('COMMIT') # depends on [control=['try'], data=[]]
except psycopg2.DatabaseError as err:
error = err.args
sys.stderr.write(six.text_type(error))
cursor.execute('ROLLBACK')
raise err # depends on [control=['except'], data=['err']]
try:
sql = 'delete from salt_events where alter_time < %s'
cursor.execute(sql, (timestamp,))
cursor.execute('COMMIT') # depends on [control=['try'], data=[]]
except psycopg2.DatabaseError as err:
error = err.args
sys.stderr.write(six.text_type(error))
cursor.execute('ROLLBACK')
raise err # depends on [control=['except'], data=['err']] # depends on [control=['with'], data=['cursor']]
return True
|
def get_component_attribute_name(component):
"""
Gets given Component attribute name.
Usage::
>>> Manager.get_component_attribute_name("factory.components_manager_ui")
u'factoryComponentsManagerUi'
:param component: Component to get the attribute name.
:type component: unicode
:return: Component attribute name.
:rtype: object
"""
search = re.search(r"(?P<category>\w+)\.(?P<name>\w+)", component)
if search:
name = "{0}{1}{2}".format(
search.group("category"), search.group("name")[0].upper(), search.group("name")[1:])
LOGGER.debug("> Component name: '{0}' to attribute name Active_QLabel: '{1}'.".format(component, name))
else:
name = component
return name
|
def function[get_component_attribute_name, parameter[component]]:
constant[
Gets given Component attribute name.
Usage::
>>> Manager.get_component_attribute_name("factory.components_manager_ui")
u'factoryComponentsManagerUi'
:param component: Component to get the attribute name.
:type component: unicode
:return: Component attribute name.
:rtype: object
]
variable[search] assign[=] call[name[re].search, parameter[constant[(?P<category>\w+)\.(?P<name>\w+)], name[component]]]
if name[search] begin[:]
variable[name] assign[=] call[constant[{0}{1}{2}].format, parameter[call[name[search].group, parameter[constant[category]]], call[call[call[name[search].group, parameter[constant[name]]]][constant[0]].upper, parameter[]], call[call[name[search].group, parameter[constant[name]]]][<ast.Slice object at 0x7da20e9b1330>]]]
call[name[LOGGER].debug, parameter[call[constant[> Component name: '{0}' to attribute name Active_QLabel: '{1}'.].format, parameter[name[component], name[name]]]]]
return[name[name]]
|
keyword[def] identifier[get_component_attribute_name] ( identifier[component] ):
literal[string]
identifier[search] = identifier[re] . identifier[search] ( literal[string] , identifier[component] )
keyword[if] identifier[search] :
identifier[name] = literal[string] . identifier[format] (
identifier[search] . identifier[group] ( literal[string] ), identifier[search] . identifier[group] ( literal[string] )[ literal[int] ]. identifier[upper] (), identifier[search] . identifier[group] ( literal[string] )[ literal[int] :])
identifier[LOGGER] . identifier[debug] ( literal[string] . identifier[format] ( identifier[component] , identifier[name] ))
keyword[else] :
identifier[name] = identifier[component]
keyword[return] identifier[name]
|
def get_component_attribute_name(component):
"""
Gets given Component attribute name.
Usage::
>>> Manager.get_component_attribute_name("factory.components_manager_ui")
u'factoryComponentsManagerUi'
:param component: Component to get the attribute name.
:type component: unicode
:return: Component attribute name.
:rtype: object
"""
search = re.search('(?P<category>\\w+)\\.(?P<name>\\w+)', component)
if search:
name = '{0}{1}{2}'.format(search.group('category'), search.group('name')[0].upper(), search.group('name')[1:])
LOGGER.debug("> Component name: '{0}' to attribute name Active_QLabel: '{1}'.".format(component, name)) # depends on [control=['if'], data=[]]
else:
name = component
return name
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.