code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def expMenu(groups,folder):
"""read experiment.txt and return a dict with [firstOfNewExp, color, star, comments]."""
### GENERATE THE MENU DATA BASED ON EXPERIMENT FILE
orphans = sorted(list(groups.keys()))
menu=[]
if os.path.exists(folder+'/experiment.txt'):
with open(folder+'/experiment.txt') as f:
raw=f.read()
else:
raw=""
for line in raw.split("\n"):
item={}
if len(line)==0:
continue
if line.startswith("~"):
line=line[1:].split(" ",2)
item["ID"]=line[0]
item["symbol"]=''
if len(line)>1:
item["color"]=line[1]
else:
item["color"]="white"
if len(line)>2 and len(line[2]):
item["comment"]=line[2]
if item["comment"][0]=="*":
item["symbol"]='*'
else:
item["comment"]=''
if item["ID"] in orphans:
orphans.remove(item["ID"])
elif line.startswith("###"):
line=line[3:].strip().split(" ",1)
item["title"]=line[0]
item["comment"]=''
if len(line)>1:
if line[1].startswith("- "):
line[1]=line[1][2:]
item["comment"]=line[1]
else:
item["unknown"]=line
menu.append(item)
menu.append({"title":"orphans","comment":""})
for ophan in orphans:
menu.append({"orphan":ophan,"ID":ophan,"color":'',"symbol":'',"comment":''})
return menu
|
def function[expMenu, parameter[groups, folder]]:
constant[read experiment.txt and return a dict with [firstOfNewExp, color, star, comments].]
variable[orphans] assign[=] call[name[sorted], parameter[call[name[list], parameter[call[name[groups].keys, parameter[]]]]]]
variable[menu] assign[=] list[[]]
if call[name[os].path.exists, parameter[binary_operation[name[folder] + constant[/experiment.txt]]]] begin[:]
with call[name[open], parameter[binary_operation[name[folder] + constant[/experiment.txt]]]] begin[:]
variable[raw] assign[=] call[name[f].read, parameter[]]
for taget[name[line]] in starred[call[name[raw].split, parameter[constant[
]]]] begin[:]
variable[item] assign[=] dictionary[[], []]
if compare[call[name[len], parameter[name[line]]] equal[==] constant[0]] begin[:]
continue
if call[name[line].startswith, parameter[constant[~]]] begin[:]
variable[line] assign[=] call[call[name[line]][<ast.Slice object at 0x7da1afe7a5c0>].split, parameter[constant[ ], constant[2]]]
call[name[item]][constant[ID]] assign[=] call[name[line]][constant[0]]
call[name[item]][constant[symbol]] assign[=] constant[]
if compare[call[name[len], parameter[name[line]]] greater[>] constant[1]] begin[:]
call[name[item]][constant[color]] assign[=] call[name[line]][constant[1]]
if <ast.BoolOp object at 0x7da1afe7bd30> begin[:]
call[name[item]][constant[comment]] assign[=] call[name[line]][constant[2]]
if compare[call[call[name[item]][constant[comment]]][constant[0]] equal[==] constant[*]] begin[:]
call[name[item]][constant[symbol]] assign[=] constant[*]
if compare[call[name[item]][constant[ID]] in name[orphans]] begin[:]
call[name[orphans].remove, parameter[call[name[item]][constant[ID]]]]
call[name[menu].append, parameter[name[item]]]
call[name[menu].append, parameter[dictionary[[<ast.Constant object at 0x7da1afea4e50>, <ast.Constant object at 0x7da1afea4dc0>], [<ast.Constant object at 0x7da1afea64a0>, <ast.Constant object at 0x7da1afea5870>]]]]
for taget[name[ophan]] in starred[name[orphans]] begin[:]
call[name[menu].append, parameter[dictionary[[<ast.Constant object at 0x7da1afea6200>, <ast.Constant object at 0x7da1afea5900>, <ast.Constant object at 0x7da1afea6920>, <ast.Constant object at 0x7da1afea5b10>, <ast.Constant object at 0x7da1afea5150>], [<ast.Name object at 0x7da1afea4d90>, <ast.Name object at 0x7da1afea5510>, <ast.Constant object at 0x7da1afea4b20>, <ast.Constant object at 0x7da1afea6830>, <ast.Constant object at 0x7da1afea53c0>]]]]
return[name[menu]]
|
keyword[def] identifier[expMenu] ( identifier[groups] , identifier[folder] ):
literal[string]
identifier[orphans] = identifier[sorted] ( identifier[list] ( identifier[groups] . identifier[keys] ()))
identifier[menu] =[]
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[folder] + literal[string] ):
keyword[with] identifier[open] ( identifier[folder] + literal[string] ) keyword[as] identifier[f] :
identifier[raw] = identifier[f] . identifier[read] ()
keyword[else] :
identifier[raw] = literal[string]
keyword[for] identifier[line] keyword[in] identifier[raw] . identifier[split] ( literal[string] ):
identifier[item] ={}
keyword[if] identifier[len] ( identifier[line] )== literal[int] :
keyword[continue]
keyword[if] identifier[line] . identifier[startswith] ( literal[string] ):
identifier[line] = identifier[line] [ literal[int] :]. identifier[split] ( literal[string] , literal[int] )
identifier[item] [ literal[string] ]= identifier[line] [ literal[int] ]
identifier[item] [ literal[string] ]= literal[string]
keyword[if] identifier[len] ( identifier[line] )> literal[int] :
identifier[item] [ literal[string] ]= identifier[line] [ literal[int] ]
keyword[else] :
identifier[item] [ literal[string] ]= literal[string]
keyword[if] identifier[len] ( identifier[line] )> literal[int] keyword[and] identifier[len] ( identifier[line] [ literal[int] ]):
identifier[item] [ literal[string] ]= identifier[line] [ literal[int] ]
keyword[if] identifier[item] [ literal[string] ][ literal[int] ]== literal[string] :
identifier[item] [ literal[string] ]= literal[string]
keyword[else] :
identifier[item] [ literal[string] ]= literal[string]
keyword[if] identifier[item] [ literal[string] ] keyword[in] identifier[orphans] :
identifier[orphans] . identifier[remove] ( identifier[item] [ literal[string] ])
keyword[elif] identifier[line] . identifier[startswith] ( literal[string] ):
identifier[line] = identifier[line] [ literal[int] :]. identifier[strip] (). identifier[split] ( literal[string] , literal[int] )
identifier[item] [ literal[string] ]= identifier[line] [ literal[int] ]
identifier[item] [ literal[string] ]= literal[string]
keyword[if] identifier[len] ( identifier[line] )> literal[int] :
keyword[if] identifier[line] [ literal[int] ]. identifier[startswith] ( literal[string] ):
identifier[line] [ literal[int] ]= identifier[line] [ literal[int] ][ literal[int] :]
identifier[item] [ literal[string] ]= identifier[line] [ literal[int] ]
keyword[else] :
identifier[item] [ literal[string] ]= identifier[line]
identifier[menu] . identifier[append] ( identifier[item] )
identifier[menu] . identifier[append] ({ literal[string] : literal[string] , literal[string] : literal[string] })
keyword[for] identifier[ophan] keyword[in] identifier[orphans] :
identifier[menu] . identifier[append] ({ literal[string] : identifier[ophan] , literal[string] : identifier[ophan] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] })
keyword[return] identifier[menu]
|
def expMenu(groups, folder):
"""read experiment.txt and return a dict with [firstOfNewExp, color, star, comments]."""
### GENERATE THE MENU DATA BASED ON EXPERIMENT FILE
orphans = sorted(list(groups.keys()))
menu = []
if os.path.exists(folder + '/experiment.txt'):
with open(folder + '/experiment.txt') as f:
raw = f.read() # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]]
else:
raw = ''
for line in raw.split('\n'):
item = {}
if len(line) == 0:
continue # depends on [control=['if'], data=[]]
if line.startswith('~'):
line = line[1:].split(' ', 2)
item['ID'] = line[0]
item['symbol'] = ''
if len(line) > 1:
item['color'] = line[1] # depends on [control=['if'], data=[]]
else:
item['color'] = 'white'
if len(line) > 2 and len(line[2]):
item['comment'] = line[2]
if item['comment'][0] == '*':
item['symbol'] = '*' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
item['comment'] = ''
if item['ID'] in orphans:
orphans.remove(item['ID']) # depends on [control=['if'], data=['orphans']] # depends on [control=['if'], data=[]]
elif line.startswith('###'):
line = line[3:].strip().split(' ', 1)
item['title'] = line[0]
item['comment'] = ''
if len(line) > 1:
if line[1].startswith('- '):
line[1] = line[1][2:] # depends on [control=['if'], data=[]]
item['comment'] = line[1] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
item['unknown'] = line
menu.append(item) # depends on [control=['for'], data=['line']]
menu.append({'title': 'orphans', 'comment': ''})
for ophan in orphans:
menu.append({'orphan': ophan, 'ID': ophan, 'color': '', 'symbol': '', 'comment': ''}) # depends on [control=['for'], data=['ophan']]
return menu
|
def __Logout(si):
"""
Disconnect (logout) service instance
@param si: Service instance (returned from Connect)
"""
try:
if si:
content = si.RetrieveContent()
content.sessionManager.Logout()
except Exception as e:
pass
|
def function[__Logout, parameter[si]]:
constant[
Disconnect (logout) service instance
@param si: Service instance (returned from Connect)
]
<ast.Try object at 0x7da18ede53c0>
|
keyword[def] identifier[__Logout] ( identifier[si] ):
literal[string]
keyword[try] :
keyword[if] identifier[si] :
identifier[content] = identifier[si] . identifier[RetrieveContent] ()
identifier[content] . identifier[sessionManager] . identifier[Logout] ()
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[pass]
|
def __Logout(si):
"""
Disconnect (logout) service instance
@param si: Service instance (returned from Connect)
"""
try:
if si:
content = si.RetrieveContent()
content.sessionManager.Logout() # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except Exception as e:
pass # depends on [control=['except'], data=[]]
|
def clausius_clapeyron(T):
"""Compute saturation vapor pressure as function of temperature T.
Input: T is temperature in Kelvin
Output: saturation vapor pressure in mb or hPa
Formula from Rogers and Yau "A Short Course in Cloud Physics" (Pergammon Press), p. 16
claimed to be accurate to within 0.1% between -30degC and 35 degC
Based on the paper by Bolton (1980, Monthly Weather Review).
"""
Tcel = T - tempCtoK
es = 6.112 * exp(17.67*Tcel/(Tcel+243.5))
return es
|
def function[clausius_clapeyron, parameter[T]]:
constant[Compute saturation vapor pressure as function of temperature T.
Input: T is temperature in Kelvin
Output: saturation vapor pressure in mb or hPa
Formula from Rogers and Yau "A Short Course in Cloud Physics" (Pergammon Press), p. 16
claimed to be accurate to within 0.1% between -30degC and 35 degC
Based on the paper by Bolton (1980, Monthly Weather Review).
]
variable[Tcel] assign[=] binary_operation[name[T] - name[tempCtoK]]
variable[es] assign[=] binary_operation[constant[6.112] * call[name[exp], parameter[binary_operation[binary_operation[constant[17.67] * name[Tcel]] / binary_operation[name[Tcel] + constant[243.5]]]]]]
return[name[es]]
|
keyword[def] identifier[clausius_clapeyron] ( identifier[T] ):
literal[string]
identifier[Tcel] = identifier[T] - identifier[tempCtoK]
identifier[es] = literal[int] * identifier[exp] ( literal[int] * identifier[Tcel] /( identifier[Tcel] + literal[int] ))
keyword[return] identifier[es]
|
def clausius_clapeyron(T):
"""Compute saturation vapor pressure as function of temperature T.
Input: T is temperature in Kelvin
Output: saturation vapor pressure in mb or hPa
Formula from Rogers and Yau "A Short Course in Cloud Physics" (Pergammon Press), p. 16
claimed to be accurate to within 0.1% between -30degC and 35 degC
Based on the paper by Bolton (1980, Monthly Weather Review).
"""
Tcel = T - tempCtoK
es = 6.112 * exp(17.67 * Tcel / (Tcel + 243.5))
return es
|
def get_top_drawdowns(returns, top=10):
"""
Finds top drawdowns, sorted by drawdown amount.
Parameters
----------
returns : pd.Series
Daily returns of the strategy, noncumulative.
- See full explanation in tears.create_full_tear_sheet.
top : int, optional
The amount of top drawdowns to find (default 10).
Returns
-------
drawdowns : list
List of drawdown peaks, valleys, and recoveries. See get_max_drawdown.
"""
returns = returns.copy()
df_cum = ep.cum_returns(returns, 1.0)
running_max = np.maximum.accumulate(df_cum)
underwater = df_cum / running_max - 1
drawdowns = []
for t in range(top):
peak, valley, recovery = get_max_drawdown_underwater(underwater)
# Slice out draw-down period
if not pd.isnull(recovery):
underwater.drop(underwater[peak: recovery].index[1:-1],
inplace=True)
else:
# drawdown has not ended yet
underwater = underwater.loc[:peak]
drawdowns.append((peak, valley, recovery))
if (len(returns) == 0) or (len(underwater) == 0):
break
return drawdowns
|
def function[get_top_drawdowns, parameter[returns, top]]:
constant[
Finds top drawdowns, sorted by drawdown amount.
Parameters
----------
returns : pd.Series
Daily returns of the strategy, noncumulative.
- See full explanation in tears.create_full_tear_sheet.
top : int, optional
The amount of top drawdowns to find (default 10).
Returns
-------
drawdowns : list
List of drawdown peaks, valleys, and recoveries. See get_max_drawdown.
]
variable[returns] assign[=] call[name[returns].copy, parameter[]]
variable[df_cum] assign[=] call[name[ep].cum_returns, parameter[name[returns], constant[1.0]]]
variable[running_max] assign[=] call[name[np].maximum.accumulate, parameter[name[df_cum]]]
variable[underwater] assign[=] binary_operation[binary_operation[name[df_cum] / name[running_max]] - constant[1]]
variable[drawdowns] assign[=] list[[]]
for taget[name[t]] in starred[call[name[range], parameter[name[top]]]] begin[:]
<ast.Tuple object at 0x7da1b003c490> assign[=] call[name[get_max_drawdown_underwater], parameter[name[underwater]]]
if <ast.UnaryOp object at 0x7da1b003f100> begin[:]
call[name[underwater].drop, parameter[call[call[name[underwater]][<ast.Slice object at 0x7da1b003e2f0>].index][<ast.Slice object at 0x7da1b003c820>]]]
call[name[drawdowns].append, parameter[tuple[[<ast.Name object at 0x7da1b003f820>, <ast.Name object at 0x7da1b003f760>, <ast.Name object at 0x7da1b003f7c0>]]]]
if <ast.BoolOp object at 0x7da1b003f7f0> begin[:]
break
return[name[drawdowns]]
|
keyword[def] identifier[get_top_drawdowns] ( identifier[returns] , identifier[top] = literal[int] ):
literal[string]
identifier[returns] = identifier[returns] . identifier[copy] ()
identifier[df_cum] = identifier[ep] . identifier[cum_returns] ( identifier[returns] , literal[int] )
identifier[running_max] = identifier[np] . identifier[maximum] . identifier[accumulate] ( identifier[df_cum] )
identifier[underwater] = identifier[df_cum] / identifier[running_max] - literal[int]
identifier[drawdowns] =[]
keyword[for] identifier[t] keyword[in] identifier[range] ( identifier[top] ):
identifier[peak] , identifier[valley] , identifier[recovery] = identifier[get_max_drawdown_underwater] ( identifier[underwater] )
keyword[if] keyword[not] identifier[pd] . identifier[isnull] ( identifier[recovery] ):
identifier[underwater] . identifier[drop] ( identifier[underwater] [ identifier[peak] : identifier[recovery] ]. identifier[index] [ literal[int] :- literal[int] ],
identifier[inplace] = keyword[True] )
keyword[else] :
identifier[underwater] = identifier[underwater] . identifier[loc] [: identifier[peak] ]
identifier[drawdowns] . identifier[append] (( identifier[peak] , identifier[valley] , identifier[recovery] ))
keyword[if] ( identifier[len] ( identifier[returns] )== literal[int] ) keyword[or] ( identifier[len] ( identifier[underwater] )== literal[int] ):
keyword[break]
keyword[return] identifier[drawdowns]
|
def get_top_drawdowns(returns, top=10):
"""
Finds top drawdowns, sorted by drawdown amount.
Parameters
----------
returns : pd.Series
Daily returns of the strategy, noncumulative.
- See full explanation in tears.create_full_tear_sheet.
top : int, optional
The amount of top drawdowns to find (default 10).
Returns
-------
drawdowns : list
List of drawdown peaks, valleys, and recoveries. See get_max_drawdown.
"""
returns = returns.copy()
df_cum = ep.cum_returns(returns, 1.0)
running_max = np.maximum.accumulate(df_cum)
underwater = df_cum / running_max - 1
drawdowns = []
for t in range(top):
(peak, valley, recovery) = get_max_drawdown_underwater(underwater)
# Slice out draw-down period
if not pd.isnull(recovery):
underwater.drop(underwater[peak:recovery].index[1:-1], inplace=True) # depends on [control=['if'], data=[]]
else:
# drawdown has not ended yet
underwater = underwater.loc[:peak]
drawdowns.append((peak, valley, recovery))
if len(returns) == 0 or len(underwater) == 0:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return drawdowns
|
def get_domain_config(self, domain):
"""Makes a discovery of domain name and resolves configuration of DNS provider
:param domain: str
domain name
:return: DomainConnectConfig
domain connect config
:raises: NoDomainConnectRecordException
when no _domainconnect record found
:raises: NoDomainConnectSettingsException
when settings are not found
"""
domain_root = self.identify_domain_root(domain)
host = ''
if len(domain_root) != len(domain):
host = domain.replace('.' + domain_root, '')
domain_connect_api = self._identify_domain_connect_api(domain_root)
ret = self._get_domain_config_for_root(domain_root, domain_connect_api)
return DomainConnectConfig(domain, domain_root, host, ret)
|
def function[get_domain_config, parameter[self, domain]]:
constant[Makes a discovery of domain name and resolves configuration of DNS provider
:param domain: str
domain name
:return: DomainConnectConfig
domain connect config
:raises: NoDomainConnectRecordException
when no _domainconnect record found
:raises: NoDomainConnectSettingsException
when settings are not found
]
variable[domain_root] assign[=] call[name[self].identify_domain_root, parameter[name[domain]]]
variable[host] assign[=] constant[]
if compare[call[name[len], parameter[name[domain_root]]] not_equal[!=] call[name[len], parameter[name[domain]]]] begin[:]
variable[host] assign[=] call[name[domain].replace, parameter[binary_operation[constant[.] + name[domain_root]], constant[]]]
variable[domain_connect_api] assign[=] call[name[self]._identify_domain_connect_api, parameter[name[domain_root]]]
variable[ret] assign[=] call[name[self]._get_domain_config_for_root, parameter[name[domain_root], name[domain_connect_api]]]
return[call[name[DomainConnectConfig], parameter[name[domain], name[domain_root], name[host], name[ret]]]]
|
keyword[def] identifier[get_domain_config] ( identifier[self] , identifier[domain] ):
literal[string]
identifier[domain_root] = identifier[self] . identifier[identify_domain_root] ( identifier[domain] )
identifier[host] = literal[string]
keyword[if] identifier[len] ( identifier[domain_root] )!= identifier[len] ( identifier[domain] ):
identifier[host] = identifier[domain] . identifier[replace] ( literal[string] + identifier[domain_root] , literal[string] )
identifier[domain_connect_api] = identifier[self] . identifier[_identify_domain_connect_api] ( identifier[domain_root] )
identifier[ret] = identifier[self] . identifier[_get_domain_config_for_root] ( identifier[domain_root] , identifier[domain_connect_api] )
keyword[return] identifier[DomainConnectConfig] ( identifier[domain] , identifier[domain_root] , identifier[host] , identifier[ret] )
|
def get_domain_config(self, domain):
"""Makes a discovery of domain name and resolves configuration of DNS provider
:param domain: str
domain name
:return: DomainConnectConfig
domain connect config
:raises: NoDomainConnectRecordException
when no _domainconnect record found
:raises: NoDomainConnectSettingsException
when settings are not found
"""
domain_root = self.identify_domain_root(domain)
host = ''
if len(domain_root) != len(domain):
host = domain.replace('.' + domain_root, '') # depends on [control=['if'], data=[]]
domain_connect_api = self._identify_domain_connect_api(domain_root)
ret = self._get_domain_config_for_root(domain_root, domain_connect_api)
return DomainConnectConfig(domain, domain_root, host, ret)
|
def value(self):
'''Return single value or list of values from the attribute.
If FORCE_ATTRIBUTE_VALUE_AS_LIST is True, always return a
list with values.
'''
if len(self.__dict__['values']) == 1 and current_app.config['FORCE_ATTRIBUTE_VALUE_AS_LIST'] is False:
return self.__dict__['values'][0]
else:
return self.__dict__['values']
|
def function[value, parameter[self]]:
constant[Return single value or list of values from the attribute.
If FORCE_ATTRIBUTE_VALUE_AS_LIST is True, always return a
list with values.
]
if <ast.BoolOp object at 0x7da2045646d0> begin[:]
return[call[call[name[self].__dict__][constant[values]]][constant[0]]]
|
keyword[def] identifier[value] ( identifier[self] ):
literal[string]
keyword[if] identifier[len] ( identifier[self] . identifier[__dict__] [ literal[string] ])== literal[int] keyword[and] identifier[current_app] . identifier[config] [ literal[string] ] keyword[is] keyword[False] :
keyword[return] identifier[self] . identifier[__dict__] [ literal[string] ][ literal[int] ]
keyword[else] :
keyword[return] identifier[self] . identifier[__dict__] [ literal[string] ]
|
def value(self):
"""Return single value or list of values from the attribute.
If FORCE_ATTRIBUTE_VALUE_AS_LIST is True, always return a
list with values.
"""
if len(self.__dict__['values']) == 1 and current_app.config['FORCE_ATTRIBUTE_VALUE_AS_LIST'] is False:
return self.__dict__['values'][0] # depends on [control=['if'], data=[]]
else:
return self.__dict__['values']
|
def AddGroupTags(r, group, tags, dry_run=False):
"""
Adds tags to a node group.
@type group: str
@param group: group to add tags to
@type tags: list of string
@param tags: tags to add to the group
@type dry_run: bool
@param dry_run: whether to perform a dry run
@rtype: string
@return: job id
"""
query = {
"dry-run": dry_run,
"tag": tags,
}
return r.request("put", "/2/groups/%s/tags" % group, query=query)
|
def function[AddGroupTags, parameter[r, group, tags, dry_run]]:
constant[
Adds tags to a node group.
@type group: str
@param group: group to add tags to
@type tags: list of string
@param tags: tags to add to the group
@type dry_run: bool
@param dry_run: whether to perform a dry run
@rtype: string
@return: job id
]
variable[query] assign[=] dictionary[[<ast.Constant object at 0x7da2054a5e70>, <ast.Constant object at 0x7da2054a6c20>], [<ast.Name object at 0x7da2054a7220>, <ast.Name object at 0x7da2054a5c00>]]
return[call[name[r].request, parameter[constant[put], binary_operation[constant[/2/groups/%s/tags] <ast.Mod object at 0x7da2590d6920> name[group]]]]]
|
keyword[def] identifier[AddGroupTags] ( identifier[r] , identifier[group] , identifier[tags] , identifier[dry_run] = keyword[False] ):
literal[string]
identifier[query] ={
literal[string] : identifier[dry_run] ,
literal[string] : identifier[tags] ,
}
keyword[return] identifier[r] . identifier[request] ( literal[string] , literal[string] % identifier[group] , identifier[query] = identifier[query] )
|
def AddGroupTags(r, group, tags, dry_run=False):
"""
Adds tags to a node group.
@type group: str
@param group: group to add tags to
@type tags: list of string
@param tags: tags to add to the group
@type dry_run: bool
@param dry_run: whether to perform a dry run
@rtype: string
@return: job id
"""
query = {'dry-run': dry_run, 'tag': tags}
return r.request('put', '/2/groups/%s/tags' % group, query=query)
|
def _nuke_set_zero_margins(widget_object):
"""Remove Nuke margins when docked UI
.. _More info:
https://gist.github.com/maty974/4739917
"""
parentApp = QtWidgets.QApplication.allWidgets()
parentWidgetList = []
for parent in parentApp:
for child in parent.children():
if widget_object.__class__.__name__ == child.__class__.__name__:
parentWidgetList.append(
parent.parentWidget())
parentWidgetList.append(
parent.parentWidget().parentWidget())
parentWidgetList.append(
parent.parentWidget().parentWidget().parentWidget())
for sub in parentWidgetList:
for tinychild in sub.children():
try:
tinychild.setContentsMargins(0, 0, 0, 0)
except Exception:
pass
|
def function[_nuke_set_zero_margins, parameter[widget_object]]:
constant[Remove Nuke margins when docked UI
.. _More info:
https://gist.github.com/maty974/4739917
]
variable[parentApp] assign[=] call[name[QtWidgets].QApplication.allWidgets, parameter[]]
variable[parentWidgetList] assign[=] list[[]]
for taget[name[parent]] in starred[name[parentApp]] begin[:]
for taget[name[child]] in starred[call[name[parent].children, parameter[]]] begin[:]
if compare[name[widget_object].__class__.__name__ equal[==] name[child].__class__.__name__] begin[:]
call[name[parentWidgetList].append, parameter[call[name[parent].parentWidget, parameter[]]]]
call[name[parentWidgetList].append, parameter[call[call[name[parent].parentWidget, parameter[]].parentWidget, parameter[]]]]
call[name[parentWidgetList].append, parameter[call[call[call[name[parent].parentWidget, parameter[]].parentWidget, parameter[]].parentWidget, parameter[]]]]
for taget[name[sub]] in starred[name[parentWidgetList]] begin[:]
for taget[name[tinychild]] in starred[call[name[sub].children, parameter[]]] begin[:]
<ast.Try object at 0x7da1b0f3aaa0>
|
keyword[def] identifier[_nuke_set_zero_margins] ( identifier[widget_object] ):
literal[string]
identifier[parentApp] = identifier[QtWidgets] . identifier[QApplication] . identifier[allWidgets] ()
identifier[parentWidgetList] =[]
keyword[for] identifier[parent] keyword[in] identifier[parentApp] :
keyword[for] identifier[child] keyword[in] identifier[parent] . identifier[children] ():
keyword[if] identifier[widget_object] . identifier[__class__] . identifier[__name__] == identifier[child] . identifier[__class__] . identifier[__name__] :
identifier[parentWidgetList] . identifier[append] (
identifier[parent] . identifier[parentWidget] ())
identifier[parentWidgetList] . identifier[append] (
identifier[parent] . identifier[parentWidget] (). identifier[parentWidget] ())
identifier[parentWidgetList] . identifier[append] (
identifier[parent] . identifier[parentWidget] (). identifier[parentWidget] (). identifier[parentWidget] ())
keyword[for] identifier[sub] keyword[in] identifier[parentWidgetList] :
keyword[for] identifier[tinychild] keyword[in] identifier[sub] . identifier[children] ():
keyword[try] :
identifier[tinychild] . identifier[setContentsMargins] ( literal[int] , literal[int] , literal[int] , literal[int] )
keyword[except] identifier[Exception] :
keyword[pass]
|
def _nuke_set_zero_margins(widget_object):
"""Remove Nuke margins when docked UI
.. _More info:
https://gist.github.com/maty974/4739917
"""
parentApp = QtWidgets.QApplication.allWidgets()
parentWidgetList = []
for parent in parentApp:
for child in parent.children():
if widget_object.__class__.__name__ == child.__class__.__name__:
parentWidgetList.append(parent.parentWidget())
parentWidgetList.append(parent.parentWidget().parentWidget())
parentWidgetList.append(parent.parentWidget().parentWidget().parentWidget())
for sub in parentWidgetList:
for tinychild in sub.children():
try:
tinychild.setContentsMargins(0, 0, 0, 0) # depends on [control=['try'], data=[]]
except Exception:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['tinychild']] # depends on [control=['for'], data=['sub']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['child']] # depends on [control=['for'], data=['parent']]
|
def _set_logging_rate_memory(self, v, load=False):
"""
Setter method for logging_rate_memory, mapped from YANG variable /rbridge_id/resource_monitor/memory/logging_rate_memory (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_logging_rate_memory is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_logging_rate_memory() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'10 .. 120']}), is_leaf=True, yang_name="logging-rate-memory", rest_name="logging-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Logging rate for CPU usage monitoring', u'hidden': u'debug', u'alt-name': u'logging-rate', u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-resource-monitor', defining_module='brocade-resource-monitor', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """logging_rate_memory must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'10 .. 120']}), is_leaf=True, yang_name="logging-rate-memory", rest_name="logging-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Logging rate for CPU usage monitoring', u'hidden': u'debug', u'alt-name': u'logging-rate', u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-resource-monitor', defining_module='brocade-resource-monitor', yang_type='uint32', is_config=True)""",
})
self.__logging_rate_memory = t
if hasattr(self, '_set'):
self._set()
|
def function[_set_logging_rate_memory, parameter[self, v, load]]:
constant[
Setter method for logging_rate_memory, mapped from YANG variable /rbridge_id/resource_monitor/memory/logging_rate_memory (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_logging_rate_memory is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_logging_rate_memory() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da207f9a800>
name[self].__logging_rate_memory assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]]
|
keyword[def] identifier[_set_logging_rate_memory] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[long] , identifier[restriction_dict] ={ literal[string] :[ literal[string] ]}, identifier[int_size] = literal[int] ), identifier[restriction_dict] ={ literal[string] :[ literal[string] ]}), identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : keyword[None] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__logging_rate_memory] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] ()
|
def _set_logging_rate_memory(self, v, load=False):
"""
Setter method for logging_rate_memory, mapped from YANG variable /rbridge_id/resource_monitor/memory/logging_rate_memory (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_logging_rate_memory is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_logging_rate_memory() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'10 .. 120']}), is_leaf=True, yang_name='logging-rate-memory', rest_name='logging-rate', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Logging rate for CPU usage monitoring', u'hidden': u'debug', u'alt-name': u'logging-rate', u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-resource-monitor', defining_module='brocade-resource-monitor', yang_type='uint32', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'logging_rate_memory must be of a type compatible with uint32', 'defined-type': 'uint32', 'generated-type': 'YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={\'range\': [\'0..4294967295\']}, int_size=32), restriction_dict={\'range\': [u\'10 .. 120\']}), is_leaf=True, yang_name="logging-rate-memory", rest_name="logging-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Logging rate for CPU usage monitoring\', u\'hidden\': u\'debug\', u\'alt-name\': u\'logging-rate\', u\'cli-suppress-no\': None}}, namespace=\'urn:brocade.com:mgmt:brocade-resource-monitor\', defining_module=\'brocade-resource-monitor\', yang_type=\'uint32\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__logging_rate_memory = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]]
|
def fisher_mean(dec=None, inc=None, di_block=None):
"""
Calculates the Fisher mean and associated parameters from either a list of
declination values and a separate list of inclination values or from a
di_block (a nested list a nested list of [dec,inc,1.0]). Returns a
dictionary with the Fisher mean and statistical parameters.
Parameters
----------
dec : list of declinations or longitudes
inc : list of inclinations or latitudes
di_block : a nested list of [dec,inc,1.0]
A di_block can be provided instead of dec, inc lists in which case it
will be used. Either dec, inc lists or a di_block need to be provided.
Returns
-------
fisher_mean : dictionary containing the Fisher mean parameters
Examples
--------
Use lists of declination and inclination to calculate a Fisher mean:
>>> ipmag.fisher_mean(dec=[140,127,142,136],inc=[21,23,19,22])
{'alpha95': 7.292891411309177,
'csd': 6.4097743211340896,
'dec': 136.30838974272072,
'inc': 21.347784026899987,
'k': 159.69251473636305,
'n': 4,
'r': 3.9812138971889026}
Use a di_block to calculate a Fisher mean (will give the same output as the
example with the lists):
>>> ipmag.fisher_mean(di_block=[[140,21],[127,23],[142,19],[136,22]])
"""
if di_block is None:
di_block = make_di_block(dec, inc)
return pmag.fisher_mean(di_block)
else:
return pmag.fisher_mean(di_block)
|
def function[fisher_mean, parameter[dec, inc, di_block]]:
constant[
Calculates the Fisher mean and associated parameters from either a list of
declination values and a separate list of inclination values or from a
di_block (a nested list a nested list of [dec,inc,1.0]). Returns a
dictionary with the Fisher mean and statistical parameters.
Parameters
----------
dec : list of declinations or longitudes
inc : list of inclinations or latitudes
di_block : a nested list of [dec,inc,1.0]
A di_block can be provided instead of dec, inc lists in which case it
will be used. Either dec, inc lists or a di_block need to be provided.
Returns
-------
fisher_mean : dictionary containing the Fisher mean parameters
Examples
--------
Use lists of declination and inclination to calculate a Fisher mean:
>>> ipmag.fisher_mean(dec=[140,127,142,136],inc=[21,23,19,22])
{'alpha95': 7.292891411309177,
'csd': 6.4097743211340896,
'dec': 136.30838974272072,
'inc': 21.347784026899987,
'k': 159.69251473636305,
'n': 4,
'r': 3.9812138971889026}
Use a di_block to calculate a Fisher mean (will give the same output as the
example with the lists):
>>> ipmag.fisher_mean(di_block=[[140,21],[127,23],[142,19],[136,22]])
]
if compare[name[di_block] is constant[None]] begin[:]
variable[di_block] assign[=] call[name[make_di_block], parameter[name[dec], name[inc]]]
return[call[name[pmag].fisher_mean, parameter[name[di_block]]]]
|
keyword[def] identifier[fisher_mean] ( identifier[dec] = keyword[None] , identifier[inc] = keyword[None] , identifier[di_block] = keyword[None] ):
literal[string]
keyword[if] identifier[di_block] keyword[is] keyword[None] :
identifier[di_block] = identifier[make_di_block] ( identifier[dec] , identifier[inc] )
keyword[return] identifier[pmag] . identifier[fisher_mean] ( identifier[di_block] )
keyword[else] :
keyword[return] identifier[pmag] . identifier[fisher_mean] ( identifier[di_block] )
|
def fisher_mean(dec=None, inc=None, di_block=None):
"""
Calculates the Fisher mean and associated parameters from either a list of
declination values and a separate list of inclination values or from a
di_block (a nested list a nested list of [dec,inc,1.0]). Returns a
dictionary with the Fisher mean and statistical parameters.
Parameters
----------
dec : list of declinations or longitudes
inc : list of inclinations or latitudes
di_block : a nested list of [dec,inc,1.0]
A di_block can be provided instead of dec, inc lists in which case it
will be used. Either dec, inc lists or a di_block need to be provided.
Returns
-------
fisher_mean : dictionary containing the Fisher mean parameters
Examples
--------
Use lists of declination and inclination to calculate a Fisher mean:
>>> ipmag.fisher_mean(dec=[140,127,142,136],inc=[21,23,19,22])
{'alpha95': 7.292891411309177,
'csd': 6.4097743211340896,
'dec': 136.30838974272072,
'inc': 21.347784026899987,
'k': 159.69251473636305,
'n': 4,
'r': 3.9812138971889026}
Use a di_block to calculate a Fisher mean (will give the same output as the
example with the lists):
>>> ipmag.fisher_mean(di_block=[[140,21],[127,23],[142,19],[136,22]])
"""
if di_block is None:
di_block = make_di_block(dec, inc)
return pmag.fisher_mean(di_block) # depends on [control=['if'], data=['di_block']]
else:
return pmag.fisher_mean(di_block)
|
def _get_enterprise_customer_users_batch(self, start, end):
"""
Returns a batched queryset of EnterpriseCustomerUser objects.
"""
LOGGER.info('Fetching new batch of enterprise customer users from indexes: %s to %s', start, end)
return User.objects.filter(pk__in=self._get_enterprise_customer_user_ids())[start:end]
|
def function[_get_enterprise_customer_users_batch, parameter[self, start, end]]:
constant[
Returns a batched queryset of EnterpriseCustomerUser objects.
]
call[name[LOGGER].info, parameter[constant[Fetching new batch of enterprise customer users from indexes: %s to %s], name[start], name[end]]]
return[call[call[name[User].objects.filter, parameter[]]][<ast.Slice object at 0x7da1b0053ca0>]]
|
keyword[def] identifier[_get_enterprise_customer_users_batch] ( identifier[self] , identifier[start] , identifier[end] ):
literal[string]
identifier[LOGGER] . identifier[info] ( literal[string] , identifier[start] , identifier[end] )
keyword[return] identifier[User] . identifier[objects] . identifier[filter] ( identifier[pk__in] = identifier[self] . identifier[_get_enterprise_customer_user_ids] ())[ identifier[start] : identifier[end] ]
|
def _get_enterprise_customer_users_batch(self, start, end):
"""
Returns a batched queryset of EnterpriseCustomerUser objects.
"""
LOGGER.info('Fetching new batch of enterprise customer users from indexes: %s to %s', start, end)
return User.objects.filter(pk__in=self._get_enterprise_customer_user_ids())[start:end]
|
def format_survival_rate():
"""cr-rate
Usage: cr-rate <session-file>
Calculate the survival rate of a session.
"""
arguments = docopt.docopt(
format_survival_rate.__doc__, version='cr-rate 1.0')
with use_db(arguments['<session-file>'], WorkDB.Mode.open) as db:
rate = survival_rate(db)
print('{:.2f}'.format(rate))
|
def function[format_survival_rate, parameter[]]:
constant[cr-rate
Usage: cr-rate <session-file>
Calculate the survival rate of a session.
]
variable[arguments] assign[=] call[name[docopt].docopt, parameter[name[format_survival_rate].__doc__]]
with call[name[use_db], parameter[call[name[arguments]][constant[<session-file>]], name[WorkDB].Mode.open]] begin[:]
variable[rate] assign[=] call[name[survival_rate], parameter[name[db]]]
call[name[print], parameter[call[constant[{:.2f}].format, parameter[name[rate]]]]]
|
keyword[def] identifier[format_survival_rate] ():
literal[string]
identifier[arguments] = identifier[docopt] . identifier[docopt] (
identifier[format_survival_rate] . identifier[__doc__] , identifier[version] = literal[string] )
keyword[with] identifier[use_db] ( identifier[arguments] [ literal[string] ], identifier[WorkDB] . identifier[Mode] . identifier[open] ) keyword[as] identifier[db] :
identifier[rate] = identifier[survival_rate] ( identifier[db] )
identifier[print] ( literal[string] . identifier[format] ( identifier[rate] ))
|
def format_survival_rate():
"""cr-rate
Usage: cr-rate <session-file>
Calculate the survival rate of a session.
"""
arguments = docopt.docopt(format_survival_rate.__doc__, version='cr-rate 1.0')
with use_db(arguments['<session-file>'], WorkDB.Mode.open) as db:
rate = survival_rate(db) # depends on [control=['with'], data=['db']]
print('{:.2f}'.format(rate))
|
def hdate(self, date):
"""Set the dates of the HDate object based on a given Hebrew date."""
# Sanity checks
if date is None and isinstance(self.gdate, datetime.date):
# Calculate the value since gdate has been set
date = self.hdate
if not isinstance(date, HebrewDate):
raise TypeError('date: {} is not of type HebrewDate'.format(date))
if not 0 < date.month < 15:
raise ValueError(
'month ({}) legal values are 1-14'.format(date.month))
if not 0 < date.day < 31:
raise ValueError('day ({}) legal values are 1-31'.format(date.day))
self._last_updated = "hdate"
self._hdate = date
|
def function[hdate, parameter[self, date]]:
constant[Set the dates of the HDate object based on a given Hebrew date.]
if <ast.BoolOp object at 0x7da1b0aa4eb0> begin[:]
variable[date] assign[=] name[self].hdate
if <ast.UnaryOp object at 0x7da1b092f9d0> begin[:]
<ast.Raise object at 0x7da1b0aa7b80>
if <ast.UnaryOp object at 0x7da1b0aa4160> begin[:]
<ast.Raise object at 0x7da1b0aa7310>
if <ast.UnaryOp object at 0x7da1b0aa50c0> begin[:]
<ast.Raise object at 0x7da1b0aa76a0>
name[self]._last_updated assign[=] constant[hdate]
name[self]._hdate assign[=] name[date]
|
keyword[def] identifier[hdate] ( identifier[self] , identifier[date] ):
literal[string]
keyword[if] identifier[date] keyword[is] keyword[None] keyword[and] identifier[isinstance] ( identifier[self] . identifier[gdate] , identifier[datetime] . identifier[date] ):
identifier[date] = identifier[self] . identifier[hdate]
keyword[if] keyword[not] identifier[isinstance] ( identifier[date] , identifier[HebrewDate] ):
keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[date] ))
keyword[if] keyword[not] literal[int] < identifier[date] . identifier[month] < literal[int] :
keyword[raise] identifier[ValueError] (
literal[string] . identifier[format] ( identifier[date] . identifier[month] ))
keyword[if] keyword[not] literal[int] < identifier[date] . identifier[day] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[date] . identifier[day] ))
identifier[self] . identifier[_last_updated] = literal[string]
identifier[self] . identifier[_hdate] = identifier[date]
|
def hdate(self, date):
"""Set the dates of the HDate object based on a given Hebrew date."""
# Sanity checks
if date is None and isinstance(self.gdate, datetime.date):
# Calculate the value since gdate has been set
date = self.hdate # depends on [control=['if'], data=[]]
if not isinstance(date, HebrewDate):
raise TypeError('date: {} is not of type HebrewDate'.format(date)) # depends on [control=['if'], data=[]]
if not 0 < date.month < 15:
raise ValueError('month ({}) legal values are 1-14'.format(date.month)) # depends on [control=['if'], data=[]]
if not 0 < date.day < 31:
raise ValueError('day ({}) legal values are 1-31'.format(date.day)) # depends on [control=['if'], data=[]]
self._last_updated = 'hdate'
self._hdate = date
|
def draw_normal_initial(self):
"""
Draw parameters from a mean-field normal family
"""
means, scale = self.get_means_and_scales_from_q()
return np.random.normal(means,scale,size=[self.sims,means.shape[0]]).T
|
def function[draw_normal_initial, parameter[self]]:
constant[
Draw parameters from a mean-field normal family
]
<ast.Tuple object at 0x7da18f00f520> assign[=] call[name[self].get_means_and_scales_from_q, parameter[]]
return[call[name[np].random.normal, parameter[name[means], name[scale]]].T]
|
keyword[def] identifier[draw_normal_initial] ( identifier[self] ):
literal[string]
identifier[means] , identifier[scale] = identifier[self] . identifier[get_means_and_scales_from_q] ()
keyword[return] identifier[np] . identifier[random] . identifier[normal] ( identifier[means] , identifier[scale] , identifier[size] =[ identifier[self] . identifier[sims] , identifier[means] . identifier[shape] [ literal[int] ]]). identifier[T]
|
def draw_normal_initial(self):
"""
Draw parameters from a mean-field normal family
"""
(means, scale) = self.get_means_and_scales_from_q()
return np.random.normal(means, scale, size=[self.sims, means.shape[0]]).T
|
def vlan_classifier_group_groupid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
vlan = ET.SubElement(config, "vlan", xmlns="urn:brocade.com:mgmt:brocade-vlan")
classifier = ET.SubElement(vlan, "classifier")
group = ET.SubElement(classifier, "group")
oper_key = ET.SubElement(group, "oper")
oper_key.text = kwargs.pop('oper')
rule_name_key = ET.SubElement(group, "rule-name")
rule_name_key.text = kwargs.pop('rule_name')
ruleid_key = ET.SubElement(group, "ruleid")
ruleid_key.text = kwargs.pop('ruleid')
groupid = ET.SubElement(group, "groupid")
groupid.text = kwargs.pop('groupid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
def function[vlan_classifier_group_groupid, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[vlan] assign[=] call[name[ET].SubElement, parameter[name[config], constant[vlan]]]
variable[classifier] assign[=] call[name[ET].SubElement, parameter[name[vlan], constant[classifier]]]
variable[group] assign[=] call[name[ET].SubElement, parameter[name[classifier], constant[group]]]
variable[oper_key] assign[=] call[name[ET].SubElement, parameter[name[group], constant[oper]]]
name[oper_key].text assign[=] call[name[kwargs].pop, parameter[constant[oper]]]
variable[rule_name_key] assign[=] call[name[ET].SubElement, parameter[name[group], constant[rule-name]]]
name[rule_name_key].text assign[=] call[name[kwargs].pop, parameter[constant[rule_name]]]
variable[ruleid_key] assign[=] call[name[ET].SubElement, parameter[name[group], constant[ruleid]]]
name[ruleid_key].text assign[=] call[name[kwargs].pop, parameter[constant[ruleid]]]
variable[groupid] assign[=] call[name[ET].SubElement, parameter[name[group], constant[groupid]]]
name[groupid].text assign[=] call[name[kwargs].pop, parameter[constant[groupid]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]]
|
keyword[def] identifier[vlan_classifier_group_groupid] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[vlan] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[classifier] = identifier[ET] . identifier[SubElement] ( identifier[vlan] , literal[string] )
identifier[group] = identifier[ET] . identifier[SubElement] ( identifier[classifier] , literal[string] )
identifier[oper_key] = identifier[ET] . identifier[SubElement] ( identifier[group] , literal[string] )
identifier[oper_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[rule_name_key] = identifier[ET] . identifier[SubElement] ( identifier[group] , literal[string] )
identifier[rule_name_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[ruleid_key] = identifier[ET] . identifier[SubElement] ( identifier[group] , literal[string] )
identifier[ruleid_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[groupid] = identifier[ET] . identifier[SubElement] ( identifier[group] , literal[string] )
identifier[groupid] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] )
|
def vlan_classifier_group_groupid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
vlan = ET.SubElement(config, 'vlan', xmlns='urn:brocade.com:mgmt:brocade-vlan')
classifier = ET.SubElement(vlan, 'classifier')
group = ET.SubElement(classifier, 'group')
oper_key = ET.SubElement(group, 'oper')
oper_key.text = kwargs.pop('oper')
rule_name_key = ET.SubElement(group, 'rule-name')
rule_name_key.text = kwargs.pop('rule_name')
ruleid_key = ET.SubElement(group, 'ruleid')
ruleid_key.text = kwargs.pop('ruleid')
groupid = ET.SubElement(group, 'groupid')
groupid.text = kwargs.pop('groupid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
def timeout_callback(self):
"""过期回调函数.
如果设置了timeout则会启动一个协程按当前时间和最近的响应时间只差递归的执行这个回调
"""
# Check if elapsed time since last response exceeds our configured
# maximum keep alive timeout value
now = time()
time_elapsed = now - self._last_response_time
if time_elapsed < self.timeout:
time_left = self.timeout - time_elapsed
self._timeout_handler = (
self._loop.call_later(
time_left,
self.timeout_callback
)
)
else:
logger.info('KeepAlive Timeout. Closing connection.')
responseb = self.encoder({
"MPRPC": self.VERSION,
"CODE": 504
})
self._stream_writer.write(responseb)
self.close()
|
def function[timeout_callback, parameter[self]]:
constant[过期回调函数.
如果设置了timeout则会启动一个协程按当前时间和最近的响应时间只差递归的执行这个回调
]
variable[now] assign[=] call[name[time], parameter[]]
variable[time_elapsed] assign[=] binary_operation[name[now] - name[self]._last_response_time]
if compare[name[time_elapsed] less[<] name[self].timeout] begin[:]
variable[time_left] assign[=] binary_operation[name[self].timeout - name[time_elapsed]]
name[self]._timeout_handler assign[=] call[name[self]._loop.call_later, parameter[name[time_left], name[self].timeout_callback]]
|
keyword[def] identifier[timeout_callback] ( identifier[self] ):
literal[string]
identifier[now] = identifier[time] ()
identifier[time_elapsed] = identifier[now] - identifier[self] . identifier[_last_response_time]
keyword[if] identifier[time_elapsed] < identifier[self] . identifier[timeout] :
identifier[time_left] = identifier[self] . identifier[timeout] - identifier[time_elapsed]
identifier[self] . identifier[_timeout_handler] =(
identifier[self] . identifier[_loop] . identifier[call_later] (
identifier[time_left] ,
identifier[self] . identifier[timeout_callback]
)
)
keyword[else] :
identifier[logger] . identifier[info] ( literal[string] )
identifier[responseb] = identifier[self] . identifier[encoder] ({
literal[string] : identifier[self] . identifier[VERSION] ,
literal[string] : literal[int]
})
identifier[self] . identifier[_stream_writer] . identifier[write] ( identifier[responseb] )
identifier[self] . identifier[close] ()
|
def timeout_callback(self):
"""过期回调函数.
如果设置了timeout则会启动一个协程按当前时间和最近的响应时间只差递归的执行这个回调
"""
# Check if elapsed time since last response exceeds our configured
# maximum keep alive timeout value
now = time()
time_elapsed = now - self._last_response_time
if time_elapsed < self.timeout:
time_left = self.timeout - time_elapsed
self._timeout_handler = self._loop.call_later(time_left, self.timeout_callback) # depends on [control=['if'], data=['time_elapsed']]
else:
logger.info('KeepAlive Timeout. Closing connection.')
responseb = self.encoder({'MPRPC': self.VERSION, 'CODE': 504})
self._stream_writer.write(responseb)
self.close()
|
def populations_slices(particles, num_pop_list):
"""2-tuple of slices for selection of two populations.
"""
slices = []
i_prev = 0
for num_pop in num_pop_list:
slices.append(slice(i_prev, i_prev + num_pop))
i_prev += num_pop
return slices
|
def function[populations_slices, parameter[particles, num_pop_list]]:
constant[2-tuple of slices for selection of two populations.
]
variable[slices] assign[=] list[[]]
variable[i_prev] assign[=] constant[0]
for taget[name[num_pop]] in starred[name[num_pop_list]] begin[:]
call[name[slices].append, parameter[call[name[slice], parameter[name[i_prev], binary_operation[name[i_prev] + name[num_pop]]]]]]
<ast.AugAssign object at 0x7da2041d8e20>
return[name[slices]]
|
keyword[def] identifier[populations_slices] ( identifier[particles] , identifier[num_pop_list] ):
literal[string]
identifier[slices] =[]
identifier[i_prev] = literal[int]
keyword[for] identifier[num_pop] keyword[in] identifier[num_pop_list] :
identifier[slices] . identifier[append] ( identifier[slice] ( identifier[i_prev] , identifier[i_prev] + identifier[num_pop] ))
identifier[i_prev] += identifier[num_pop]
keyword[return] identifier[slices]
|
def populations_slices(particles, num_pop_list):
"""2-tuple of slices for selection of two populations.
"""
slices = []
i_prev = 0
for num_pop in num_pop_list:
slices.append(slice(i_prev, i_prev + num_pop))
i_prev += num_pop # depends on [control=['for'], data=['num_pop']]
return slices
|
def validate(self, value):
"""Validate field value."""
if value is not None:
if not isinstance(value, list):
raise ValidationError("field must be a list")
for index, element in enumerate(value):
try:
self.inner.validate(element)
except ValidationError as error:
raise ValidationError("invalid element {}: {}".format(
index,
error.args[0],
))
super().validate(value)
|
def function[validate, parameter[self, value]]:
constant[Validate field value.]
if compare[name[value] is_not constant[None]] begin[:]
if <ast.UnaryOp object at 0x7da1b1990640> begin[:]
<ast.Raise object at 0x7da1b1990dc0>
for taget[tuple[[<ast.Name object at 0x7da1b1993c70>, <ast.Name object at 0x7da1b1993eb0>]]] in starred[call[name[enumerate], parameter[name[value]]]] begin[:]
<ast.Try object at 0x7da1b1991ff0>
call[call[name[super], parameter[]].validate, parameter[name[value]]]
|
keyword[def] identifier[validate] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[list] ):
keyword[raise] identifier[ValidationError] ( literal[string] )
keyword[for] identifier[index] , identifier[element] keyword[in] identifier[enumerate] ( identifier[value] ):
keyword[try] :
identifier[self] . identifier[inner] . identifier[validate] ( identifier[element] )
keyword[except] identifier[ValidationError] keyword[as] identifier[error] :
keyword[raise] identifier[ValidationError] ( literal[string] . identifier[format] (
identifier[index] ,
identifier[error] . identifier[args] [ literal[int] ],
))
identifier[super] (). identifier[validate] ( identifier[value] )
|
def validate(self, value):
"""Validate field value."""
if value is not None:
if not isinstance(value, list):
raise ValidationError('field must be a list') # depends on [control=['if'], data=[]]
for (index, element) in enumerate(value):
try:
self.inner.validate(element) # depends on [control=['try'], data=[]]
except ValidationError as error:
raise ValidationError('invalid element {}: {}'.format(index, error.args[0])) # depends on [control=['except'], data=['error']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['value']]
super().validate(value)
|
def machine(self):
"""Return the flavour attribute of the BFD file being processed."""
if not self._ptr:
raise BfdException("BFD not initialized")
return _bfd.get_bfd_attribute(self._ptr, BfdAttributes.FLAVOUR)
|
def function[machine, parameter[self]]:
constant[Return the flavour attribute of the BFD file being processed.]
if <ast.UnaryOp object at 0x7da20c6e6fb0> begin[:]
<ast.Raise object at 0x7da20c6e5b70>
return[call[name[_bfd].get_bfd_attribute, parameter[name[self]._ptr, name[BfdAttributes].FLAVOUR]]]
|
keyword[def] identifier[machine] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_ptr] :
keyword[raise] identifier[BfdException] ( literal[string] )
keyword[return] identifier[_bfd] . identifier[get_bfd_attribute] ( identifier[self] . identifier[_ptr] , identifier[BfdAttributes] . identifier[FLAVOUR] )
|
def machine(self):
"""Return the flavour attribute of the BFD file being processed."""
if not self._ptr:
raise BfdException('BFD not initialized') # depends on [control=['if'], data=[]]
return _bfd.get_bfd_attribute(self._ptr, BfdAttributes.FLAVOUR)
|
def json(self, answer):
"""
Set the response content type to application/json and serialize
the content.
:param anwser The response as a Python object
"""
self.content_type = "application/json"
if hasattr(answer, '__json__'):
answer = answer.__json__()
elif isinstance(answer, list):
newanswer = []
for elem in answer:
if hasattr(elem, '__json__'):
elem = elem.__json__()
newanswer.append(elem)
answer = newanswer
if self._output_schema is not None:
try:
jsonschema.validate(answer, self._output_schema)
except jsonschema.ValidationError as e:
log.error("Invalid output query. JSON schema error: {}".format(e.message))
raise aiohttp.web.HTTPBadRequest(text="{}".format(e))
self.body = json.dumps(answer, indent=4, sort_keys=True).encode('utf-8')
|
def function[json, parameter[self, answer]]:
constant[
Set the response content type to application/json and serialize
the content.
:param anwser The response as a Python object
]
name[self].content_type assign[=] constant[application/json]
if call[name[hasattr], parameter[name[answer], constant[__json__]]] begin[:]
variable[answer] assign[=] call[name[answer].__json__, parameter[]]
if compare[name[self]._output_schema is_not constant[None]] begin[:]
<ast.Try object at 0x7da20c6c7220>
name[self].body assign[=] call[call[name[json].dumps, parameter[name[answer]]].encode, parameter[constant[utf-8]]]
|
keyword[def] identifier[json] ( identifier[self] , identifier[answer] ):
literal[string]
identifier[self] . identifier[content_type] = literal[string]
keyword[if] identifier[hasattr] ( identifier[answer] , literal[string] ):
identifier[answer] = identifier[answer] . identifier[__json__] ()
keyword[elif] identifier[isinstance] ( identifier[answer] , identifier[list] ):
identifier[newanswer] =[]
keyword[for] identifier[elem] keyword[in] identifier[answer] :
keyword[if] identifier[hasattr] ( identifier[elem] , literal[string] ):
identifier[elem] = identifier[elem] . identifier[__json__] ()
identifier[newanswer] . identifier[append] ( identifier[elem] )
identifier[answer] = identifier[newanswer]
keyword[if] identifier[self] . identifier[_output_schema] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[jsonschema] . identifier[validate] ( identifier[answer] , identifier[self] . identifier[_output_schema] )
keyword[except] identifier[jsonschema] . identifier[ValidationError] keyword[as] identifier[e] :
identifier[log] . identifier[error] ( literal[string] . identifier[format] ( identifier[e] . identifier[message] ))
keyword[raise] identifier[aiohttp] . identifier[web] . identifier[HTTPBadRequest] ( identifier[text] = literal[string] . identifier[format] ( identifier[e] ))
identifier[self] . identifier[body] = identifier[json] . identifier[dumps] ( identifier[answer] , identifier[indent] = literal[int] , identifier[sort_keys] = keyword[True] ). identifier[encode] ( literal[string] )
|
def json(self, answer):
"""
Set the response content type to application/json and serialize
the content.
:param anwser The response as a Python object
"""
self.content_type = 'application/json'
if hasattr(answer, '__json__'):
answer = answer.__json__() # depends on [control=['if'], data=[]]
elif isinstance(answer, list):
newanswer = []
for elem in answer:
if hasattr(elem, '__json__'):
elem = elem.__json__() # depends on [control=['if'], data=[]]
newanswer.append(elem) # depends on [control=['for'], data=['elem']]
answer = newanswer # depends on [control=['if'], data=[]]
if self._output_schema is not None:
try:
jsonschema.validate(answer, self._output_schema) # depends on [control=['try'], data=[]]
except jsonschema.ValidationError as e:
log.error('Invalid output query. JSON schema error: {}'.format(e.message))
raise aiohttp.web.HTTPBadRequest(text='{}'.format(e)) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]]
self.body = json.dumps(answer, indent=4, sort_keys=True).encode('utf-8')
|
def _choose_base_case(is_accepted,
accepted,
rejected,
name=None):
"""Helper to `choose` which expand_dims `is_accepted` and applies tf.where."""
def _expand_is_accepted_like(x):
"""Helper to expand `is_accepted` like the shape of some input arg."""
with tf.compat.v1.name_scope('expand_is_accepted_like'):
expand_shape = tf.concat([
tf.shape(input=is_accepted),
tf.ones([tf.rank(x) - tf.rank(is_accepted)], dtype=tf.int32),
],
axis=0)
multiples = tf.concat([
tf.ones([tf.rank(is_accepted)], dtype=tf.int32),
tf.shape(input=x)[tf.rank(is_accepted):],
],
axis=0)
m = tf.tile(tf.reshape(is_accepted, expand_shape),
multiples)
m.set_shape(m.shape.merge_with(x.shape))
return m
def _where(accepted, rejected):
if accepted is rejected:
return accepted
accepted = tf.convert_to_tensor(value=accepted, name='accepted')
rejected = tf.convert_to_tensor(value=rejected, name='rejected')
r = tf.where(_expand_is_accepted_like(accepted), accepted, rejected)
r.set_shape(r.shape.merge_with(accepted.shape.merge_with(rejected.shape)))
return r
with tf.compat.v1.name_scope(
name, 'choose', values=[is_accepted, accepted, rejected]):
if not is_list_like(accepted):
return _where(accepted, rejected)
return [(choose(is_accepted, a, r, name=name) if is_namedtuple_like(a)
else _where(a, r))
for a, r in zip(accepted, rejected)]
|
def function[_choose_base_case, parameter[is_accepted, accepted, rejected, name]]:
constant[Helper to `choose` which expand_dims `is_accepted` and applies tf.where.]
def function[_expand_is_accepted_like, parameter[x]]:
constant[Helper to expand `is_accepted` like the shape of some input arg.]
with call[name[tf].compat.v1.name_scope, parameter[constant[expand_is_accepted_like]]] begin[:]
variable[expand_shape] assign[=] call[name[tf].concat, parameter[list[[<ast.Call object at 0x7da1b02c83d0>, <ast.Call object at 0x7da1b02c9000>]]]]
variable[multiples] assign[=] call[name[tf].concat, parameter[list[[<ast.Call object at 0x7da1b02c91b0>, <ast.Subscript object at 0x7da1b02c9090>]]]]
variable[m] assign[=] call[name[tf].tile, parameter[call[name[tf].reshape, parameter[name[is_accepted], name[expand_shape]]], name[multiples]]]
call[name[m].set_shape, parameter[call[name[m].shape.merge_with, parameter[name[x].shape]]]]
return[name[m]]
def function[_where, parameter[accepted, rejected]]:
if compare[name[accepted] is name[rejected]] begin[:]
return[name[accepted]]
variable[accepted] assign[=] call[name[tf].convert_to_tensor, parameter[]]
variable[rejected] assign[=] call[name[tf].convert_to_tensor, parameter[]]
variable[r] assign[=] call[name[tf].where, parameter[call[name[_expand_is_accepted_like], parameter[name[accepted]]], name[accepted], name[rejected]]]
call[name[r].set_shape, parameter[call[name[r].shape.merge_with, parameter[call[name[accepted].shape.merge_with, parameter[name[rejected].shape]]]]]]
return[name[r]]
with call[name[tf].compat.v1.name_scope, parameter[name[name], constant[choose]]] begin[:]
if <ast.UnaryOp object at 0x7da1b0235900> begin[:]
return[call[name[_where], parameter[name[accepted], name[rejected]]]]
return[<ast.ListComp object at 0x7da1b02355d0>]
|
keyword[def] identifier[_choose_base_case] ( identifier[is_accepted] ,
identifier[accepted] ,
identifier[rejected] ,
identifier[name] = keyword[None] ):
literal[string]
keyword[def] identifier[_expand_is_accepted_like] ( identifier[x] ):
literal[string]
keyword[with] identifier[tf] . identifier[compat] . identifier[v1] . identifier[name_scope] ( literal[string] ):
identifier[expand_shape] = identifier[tf] . identifier[concat] ([
identifier[tf] . identifier[shape] ( identifier[input] = identifier[is_accepted] ),
identifier[tf] . identifier[ones] ([ identifier[tf] . identifier[rank] ( identifier[x] )- identifier[tf] . identifier[rank] ( identifier[is_accepted] )], identifier[dtype] = identifier[tf] . identifier[int32] ),
],
identifier[axis] = literal[int] )
identifier[multiples] = identifier[tf] . identifier[concat] ([
identifier[tf] . identifier[ones] ([ identifier[tf] . identifier[rank] ( identifier[is_accepted] )], identifier[dtype] = identifier[tf] . identifier[int32] ),
identifier[tf] . identifier[shape] ( identifier[input] = identifier[x] )[ identifier[tf] . identifier[rank] ( identifier[is_accepted] ):],
],
identifier[axis] = literal[int] )
identifier[m] = identifier[tf] . identifier[tile] ( identifier[tf] . identifier[reshape] ( identifier[is_accepted] , identifier[expand_shape] ),
identifier[multiples] )
identifier[m] . identifier[set_shape] ( identifier[m] . identifier[shape] . identifier[merge_with] ( identifier[x] . identifier[shape] ))
keyword[return] identifier[m]
keyword[def] identifier[_where] ( identifier[accepted] , identifier[rejected] ):
keyword[if] identifier[accepted] keyword[is] identifier[rejected] :
keyword[return] identifier[accepted]
identifier[accepted] = identifier[tf] . identifier[convert_to_tensor] ( identifier[value] = identifier[accepted] , identifier[name] = literal[string] )
identifier[rejected] = identifier[tf] . identifier[convert_to_tensor] ( identifier[value] = identifier[rejected] , identifier[name] = literal[string] )
identifier[r] = identifier[tf] . identifier[where] ( identifier[_expand_is_accepted_like] ( identifier[accepted] ), identifier[accepted] , identifier[rejected] )
identifier[r] . identifier[set_shape] ( identifier[r] . identifier[shape] . identifier[merge_with] ( identifier[accepted] . identifier[shape] . identifier[merge_with] ( identifier[rejected] . identifier[shape] )))
keyword[return] identifier[r]
keyword[with] identifier[tf] . identifier[compat] . identifier[v1] . identifier[name_scope] (
identifier[name] , literal[string] , identifier[values] =[ identifier[is_accepted] , identifier[accepted] , identifier[rejected] ]):
keyword[if] keyword[not] identifier[is_list_like] ( identifier[accepted] ):
keyword[return] identifier[_where] ( identifier[accepted] , identifier[rejected] )
keyword[return] [( identifier[choose] ( identifier[is_accepted] , identifier[a] , identifier[r] , identifier[name] = identifier[name] ) keyword[if] identifier[is_namedtuple_like] ( identifier[a] )
keyword[else] identifier[_where] ( identifier[a] , identifier[r] ))
keyword[for] identifier[a] , identifier[r] keyword[in] identifier[zip] ( identifier[accepted] , identifier[rejected] )]
|
def _choose_base_case(is_accepted, accepted, rejected, name=None):
"""Helper to `choose` which expand_dims `is_accepted` and applies tf.where."""
def _expand_is_accepted_like(x):
"""Helper to expand `is_accepted` like the shape of some input arg."""
with tf.compat.v1.name_scope('expand_is_accepted_like'):
expand_shape = tf.concat([tf.shape(input=is_accepted), tf.ones([tf.rank(x) - tf.rank(is_accepted)], dtype=tf.int32)], axis=0)
multiples = tf.concat([tf.ones([tf.rank(is_accepted)], dtype=tf.int32), tf.shape(input=x)[tf.rank(is_accepted):]], axis=0)
m = tf.tile(tf.reshape(is_accepted, expand_shape), multiples)
m.set_shape(m.shape.merge_with(x.shape))
return m # depends on [control=['with'], data=[]]
def _where(accepted, rejected):
if accepted is rejected:
return accepted # depends on [control=['if'], data=['accepted']]
accepted = tf.convert_to_tensor(value=accepted, name='accepted')
rejected = tf.convert_to_tensor(value=rejected, name='rejected')
r = tf.where(_expand_is_accepted_like(accepted), accepted, rejected)
r.set_shape(r.shape.merge_with(accepted.shape.merge_with(rejected.shape)))
return r
with tf.compat.v1.name_scope(name, 'choose', values=[is_accepted, accepted, rejected]):
if not is_list_like(accepted):
return _where(accepted, rejected) # depends on [control=['if'], data=[]]
return [choose(is_accepted, a, r, name=name) if is_namedtuple_like(a) else _where(a, r) for (a, r) in zip(accepted, rejected)] # depends on [control=['with'], data=[]]
|
def benchmark_hash_data():
"""
CommandLine:
python ~/code/ubelt/dev/bench_hash.py --convert=True --show
python ~/code/ubelt/dev/bench_hash.py --convert=False --show
"""
import ubelt as ub
#ITEM = 'JUST A STRING' * 100
ITEM = [0, 1, 'a', 'b', ['JUST A STRING'] * 4]
HASHERS = ['sha1', 'sha512', 'xxh32', 'xxh64']
scales = list(range(5, 13))
results = ub.AutoDict()
# Use json is faster or at least as fast it most cases
# xxhash is also significantly faster than sha512
convert = ub.argval('--convert', default='True').lower() == 'True'
print('convert = {!r}'.format(convert))
ti = ub.Timerit(9, bestof=3, verbose=1, unit='ms')
for s in ub.ProgIter(scales, desc='benchmark', verbose=3):
N = 2 ** s
print(' --- s={s}, N={N} --- '.format(s=s, N=N))
data = [ITEM] * N
for hasher in HASHERS:
for timer in ti.reset(hasher):
ub.hash_data(data, hasher=hasher, convert=convert)
results[hasher].update({N: ti.mean()})
col = {h: results[h][N] for h in HASHERS}
sortx = ub.argsort(col)
ranking = ub.dict_subset(col, sortx)
print('walltime: ' + ub.repr2(ranking, precision=9, nl=0))
best = next(iter(ranking))
#pairs = list(ub.iter_window( 2))
pairs = [(k, best) for k in ranking]
ratios = [ranking[k1] / ranking[k2] for k1, k2 in pairs]
nicekeys = ['{}/{}'.format(k1, k2) for k1, k2 in pairs]
relratios = ub.odict(zip(nicekeys, ratios))
print('speedup: ' + ub.repr2(relratios, precision=4, nl=0))
# xdoc +REQUIRES(--show)
# import pytest
# pytest.skip()
import pandas as pd
df = pd.DataFrame.from_dict(results)
df.columns.name = 'hasher'
df.index.name = 'N'
ratios = df.copy().drop(columns=df.columns)
for k1, k2 in [('sha512', 'xxh32'), ('sha1', 'xxh32'), ('xxh64', 'xxh32')]:
ratios['{}/{}'.format(k1, k2)] = df[k1] / df[k2]
print()
print('Seconds per iteration')
print(df.to_string(float_format='%.9f'))
print()
print('Ratios of seconds')
print(ratios.to_string(float_format='%.2f'))
print()
print('Average Ratio (over all N)')
print('convert = {!r}'.format(convert))
print(ratios.mean().sort_values())
if ub.argflag('--show'):
import netharn.util as kwel
kwel.autompl()
xdata = sorted(ub.peek(results.values()).keys())
ydata = ub.map_vals(lambda d: [d[x] for x in xdata], results)
kwel.multi_plot(xdata, ydata, xlabel='N', ylabel='seconds', title='convert = {}'.format(convert))
kwel.show_if_requested()
|
def function[benchmark_hash_data, parameter[]]:
constant[
CommandLine:
python ~/code/ubelt/dev/bench_hash.py --convert=True --show
python ~/code/ubelt/dev/bench_hash.py --convert=False --show
]
import module[ubelt] as alias[ub]
variable[ITEM] assign[=] list[[<ast.Constant object at 0x7da1b020fc70>, <ast.Constant object at 0x7da1b020c2e0>, <ast.Constant object at 0x7da1b020d6f0>, <ast.Constant object at 0x7da1b020fb20>, <ast.BinOp object at 0x7da1b020e1d0>]]
variable[HASHERS] assign[=] list[[<ast.Constant object at 0x7da1b020e4d0>, <ast.Constant object at 0x7da1b020d1b0>, <ast.Constant object at 0x7da1b020e8f0>, <ast.Constant object at 0x7da1b020e080>]]
variable[scales] assign[=] call[name[list], parameter[call[name[range], parameter[constant[5], constant[13]]]]]
variable[results] assign[=] call[name[ub].AutoDict, parameter[]]
variable[convert] assign[=] compare[call[call[name[ub].argval, parameter[constant[--convert]]].lower, parameter[]] equal[==] constant[True]]
call[name[print], parameter[call[constant[convert = {!r}].format, parameter[name[convert]]]]]
variable[ti] assign[=] call[name[ub].Timerit, parameter[constant[9]]]
for taget[name[s]] in starred[call[name[ub].ProgIter, parameter[name[scales]]]] begin[:]
variable[N] assign[=] binary_operation[constant[2] ** name[s]]
call[name[print], parameter[call[constant[ --- s={s}, N={N} --- ].format, parameter[]]]]
variable[data] assign[=] binary_operation[list[[<ast.Name object at 0x7da1b020d780>]] * name[N]]
for taget[name[hasher]] in starred[name[HASHERS]] begin[:]
for taget[name[timer]] in starred[call[name[ti].reset, parameter[name[hasher]]]] begin[:]
call[name[ub].hash_data, parameter[name[data]]]
call[call[name[results]][name[hasher]].update, parameter[dictionary[[<ast.Name object at 0x7da1b020f610>], [<ast.Call object at 0x7da1b020e290>]]]]
variable[col] assign[=] <ast.DictComp object at 0x7da1b020d120>
variable[sortx] assign[=] call[name[ub].argsort, parameter[name[col]]]
variable[ranking] assign[=] call[name[ub].dict_subset, parameter[name[col], name[sortx]]]
call[name[print], parameter[binary_operation[constant[walltime: ] + call[name[ub].repr2, parameter[name[ranking]]]]]]
variable[best] assign[=] call[name[next], parameter[call[name[iter], parameter[name[ranking]]]]]
variable[pairs] assign[=] <ast.ListComp object at 0x7da18ede6290>
variable[ratios] assign[=] <ast.ListComp object at 0x7da18ede5540>
variable[nicekeys] assign[=] <ast.ListComp object at 0x7da18ede5fc0>
variable[relratios] assign[=] call[name[ub].odict, parameter[call[name[zip], parameter[name[nicekeys], name[ratios]]]]]
call[name[print], parameter[binary_operation[constant[speedup: ] + call[name[ub].repr2, parameter[name[relratios]]]]]]
import module[pandas] as alias[pd]
variable[df] assign[=] call[name[pd].DataFrame.from_dict, parameter[name[results]]]
name[df].columns.name assign[=] constant[hasher]
name[df].index.name assign[=] constant[N]
variable[ratios] assign[=] call[call[name[df].copy, parameter[]].drop, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b000f640>, <ast.Name object at 0x7da1b000fe80>]]] in starred[list[[<ast.Tuple object at 0x7da1b000ed70>, <ast.Tuple object at 0x7da1b000ded0>, <ast.Tuple object at 0x7da1b000f160>]]] begin[:]
call[name[ratios]][call[constant[{}/{}].format, parameter[name[k1], name[k2]]]] assign[=] binary_operation[call[name[df]][name[k1]] / call[name[df]][name[k2]]]
call[name[print], parameter[]]
call[name[print], parameter[constant[Seconds per iteration]]]
call[name[print], parameter[call[name[df].to_string, parameter[]]]]
call[name[print], parameter[]]
call[name[print], parameter[constant[Ratios of seconds]]]
call[name[print], parameter[call[name[ratios].to_string, parameter[]]]]
call[name[print], parameter[]]
call[name[print], parameter[constant[Average Ratio (over all N)]]]
call[name[print], parameter[call[constant[convert = {!r}].format, parameter[name[convert]]]]]
call[name[print], parameter[call[call[name[ratios].mean, parameter[]].sort_values, parameter[]]]]
if call[name[ub].argflag, parameter[constant[--show]]] begin[:]
import module[netharn.util] as alias[kwel]
call[name[kwel].autompl, parameter[]]
variable[xdata] assign[=] call[name[sorted], parameter[call[call[name[ub].peek, parameter[call[name[results].values, parameter[]]]].keys, parameter[]]]]
variable[ydata] assign[=] call[name[ub].map_vals, parameter[<ast.Lambda object at 0x7da20e956ce0>, name[results]]]
call[name[kwel].multi_plot, parameter[name[xdata], name[ydata]]]
call[name[kwel].show_if_requested, parameter[]]
|
keyword[def] identifier[benchmark_hash_data] ():
literal[string]
keyword[import] identifier[ubelt] keyword[as] identifier[ub]
identifier[ITEM] =[ literal[int] , literal[int] , literal[string] , literal[string] ,[ literal[string] ]* literal[int] ]
identifier[HASHERS] =[ literal[string] , literal[string] , literal[string] , literal[string] ]
identifier[scales] = identifier[list] ( identifier[range] ( literal[int] , literal[int] ))
identifier[results] = identifier[ub] . identifier[AutoDict] ()
identifier[convert] = identifier[ub] . identifier[argval] ( literal[string] , identifier[default] = literal[string] ). identifier[lower] ()== literal[string]
identifier[print] ( literal[string] . identifier[format] ( identifier[convert] ))
identifier[ti] = identifier[ub] . identifier[Timerit] ( literal[int] , identifier[bestof] = literal[int] , identifier[verbose] = literal[int] , identifier[unit] = literal[string] )
keyword[for] identifier[s] keyword[in] identifier[ub] . identifier[ProgIter] ( identifier[scales] , identifier[desc] = literal[string] , identifier[verbose] = literal[int] ):
identifier[N] = literal[int] ** identifier[s]
identifier[print] ( literal[string] . identifier[format] ( identifier[s] = identifier[s] , identifier[N] = identifier[N] ))
identifier[data] =[ identifier[ITEM] ]* identifier[N]
keyword[for] identifier[hasher] keyword[in] identifier[HASHERS] :
keyword[for] identifier[timer] keyword[in] identifier[ti] . identifier[reset] ( identifier[hasher] ):
identifier[ub] . identifier[hash_data] ( identifier[data] , identifier[hasher] = identifier[hasher] , identifier[convert] = identifier[convert] )
identifier[results] [ identifier[hasher] ]. identifier[update] ({ identifier[N] : identifier[ti] . identifier[mean] ()})
identifier[col] ={ identifier[h] : identifier[results] [ identifier[h] ][ identifier[N] ] keyword[for] identifier[h] keyword[in] identifier[HASHERS] }
identifier[sortx] = identifier[ub] . identifier[argsort] ( identifier[col] )
identifier[ranking] = identifier[ub] . identifier[dict_subset] ( identifier[col] , identifier[sortx] )
identifier[print] ( literal[string] + identifier[ub] . identifier[repr2] ( identifier[ranking] , identifier[precision] = literal[int] , identifier[nl] = literal[int] ))
identifier[best] = identifier[next] ( identifier[iter] ( identifier[ranking] ))
identifier[pairs] =[( identifier[k] , identifier[best] ) keyword[for] identifier[k] keyword[in] identifier[ranking] ]
identifier[ratios] =[ identifier[ranking] [ identifier[k1] ]/ identifier[ranking] [ identifier[k2] ] keyword[for] identifier[k1] , identifier[k2] keyword[in] identifier[pairs] ]
identifier[nicekeys] =[ literal[string] . identifier[format] ( identifier[k1] , identifier[k2] ) keyword[for] identifier[k1] , identifier[k2] keyword[in] identifier[pairs] ]
identifier[relratios] = identifier[ub] . identifier[odict] ( identifier[zip] ( identifier[nicekeys] , identifier[ratios] ))
identifier[print] ( literal[string] + identifier[ub] . identifier[repr2] ( identifier[relratios] , identifier[precision] = literal[int] , identifier[nl] = literal[int] ))
keyword[import] identifier[pandas] keyword[as] identifier[pd]
identifier[df] = identifier[pd] . identifier[DataFrame] . identifier[from_dict] ( identifier[results] )
identifier[df] . identifier[columns] . identifier[name] = literal[string]
identifier[df] . identifier[index] . identifier[name] = literal[string]
identifier[ratios] = identifier[df] . identifier[copy] (). identifier[drop] ( identifier[columns] = identifier[df] . identifier[columns] )
keyword[for] identifier[k1] , identifier[k2] keyword[in] [( literal[string] , literal[string] ),( literal[string] , literal[string] ),( literal[string] , literal[string] )]:
identifier[ratios] [ literal[string] . identifier[format] ( identifier[k1] , identifier[k2] )]= identifier[df] [ identifier[k1] ]/ identifier[df] [ identifier[k2] ]
identifier[print] ()
identifier[print] ( literal[string] )
identifier[print] ( identifier[df] . identifier[to_string] ( identifier[float_format] = literal[string] ))
identifier[print] ()
identifier[print] ( literal[string] )
identifier[print] ( identifier[ratios] . identifier[to_string] ( identifier[float_format] = literal[string] ))
identifier[print] ()
identifier[print] ( literal[string] )
identifier[print] ( literal[string] . identifier[format] ( identifier[convert] ))
identifier[print] ( identifier[ratios] . identifier[mean] (). identifier[sort_values] ())
keyword[if] identifier[ub] . identifier[argflag] ( literal[string] ):
keyword[import] identifier[netharn] . identifier[util] keyword[as] identifier[kwel]
identifier[kwel] . identifier[autompl] ()
identifier[xdata] = identifier[sorted] ( identifier[ub] . identifier[peek] ( identifier[results] . identifier[values] ()). identifier[keys] ())
identifier[ydata] = identifier[ub] . identifier[map_vals] ( keyword[lambda] identifier[d] :[ identifier[d] [ identifier[x] ] keyword[for] identifier[x] keyword[in] identifier[xdata] ], identifier[results] )
identifier[kwel] . identifier[multi_plot] ( identifier[xdata] , identifier[ydata] , identifier[xlabel] = literal[string] , identifier[ylabel] = literal[string] , identifier[title] = literal[string] . identifier[format] ( identifier[convert] ))
identifier[kwel] . identifier[show_if_requested] ()
|
def benchmark_hash_data():
"""
CommandLine:
python ~/code/ubelt/dev/bench_hash.py --convert=True --show
python ~/code/ubelt/dev/bench_hash.py --convert=False --show
"""
import ubelt as ub
#ITEM = 'JUST A STRING' * 100
ITEM = [0, 1, 'a', 'b', ['JUST A STRING'] * 4]
HASHERS = ['sha1', 'sha512', 'xxh32', 'xxh64']
scales = list(range(5, 13))
results = ub.AutoDict()
# Use json is faster or at least as fast it most cases
# xxhash is also significantly faster than sha512
convert = ub.argval('--convert', default='True').lower() == 'True'
print('convert = {!r}'.format(convert))
ti = ub.Timerit(9, bestof=3, verbose=1, unit='ms')
for s in ub.ProgIter(scales, desc='benchmark', verbose=3):
N = 2 ** s
print(' --- s={s}, N={N} --- '.format(s=s, N=N))
data = [ITEM] * N
for hasher in HASHERS:
for timer in ti.reset(hasher):
ub.hash_data(data, hasher=hasher, convert=convert) # depends on [control=['for'], data=[]]
results[hasher].update({N: ti.mean()}) # depends on [control=['for'], data=['hasher']]
col = {h: results[h][N] for h in HASHERS}
sortx = ub.argsort(col)
ranking = ub.dict_subset(col, sortx)
print('walltime: ' + ub.repr2(ranking, precision=9, nl=0))
best = next(iter(ranking))
#pairs = list(ub.iter_window( 2))
pairs = [(k, best) for k in ranking]
ratios = [ranking[k1] / ranking[k2] for (k1, k2) in pairs]
nicekeys = ['{}/{}'.format(k1, k2) for (k1, k2) in pairs]
relratios = ub.odict(zip(nicekeys, ratios))
print('speedup: ' + ub.repr2(relratios, precision=4, nl=0)) # depends on [control=['for'], data=['s']]
# xdoc +REQUIRES(--show)
# import pytest
# pytest.skip()
import pandas as pd
df = pd.DataFrame.from_dict(results)
df.columns.name = 'hasher'
df.index.name = 'N'
ratios = df.copy().drop(columns=df.columns)
for (k1, k2) in [('sha512', 'xxh32'), ('sha1', 'xxh32'), ('xxh64', 'xxh32')]:
ratios['{}/{}'.format(k1, k2)] = df[k1] / df[k2] # depends on [control=['for'], data=[]]
print()
print('Seconds per iteration')
print(df.to_string(float_format='%.9f'))
print()
print('Ratios of seconds')
print(ratios.to_string(float_format='%.2f'))
print()
print('Average Ratio (over all N)')
print('convert = {!r}'.format(convert))
print(ratios.mean().sort_values())
if ub.argflag('--show'):
import netharn.util as kwel
kwel.autompl()
xdata = sorted(ub.peek(results.values()).keys())
ydata = ub.map_vals(lambda d: [d[x] for x in xdata], results)
kwel.multi_plot(xdata, ydata, xlabel='N', ylabel='seconds', title='convert = {}'.format(convert))
kwel.show_if_requested() # depends on [control=['if'], data=[]]
|
def _show_or_dump(self, dump=False, indent=3,
lvl="", label_lvl="", first_call=True):
""" Reproduced from packet.py """
ct = AnsiColorTheme() if dump else conf.color_theme
s = "%s%s %s %s \n" % (label_lvl, ct.punct("###["),
ct.layer_name(self.name), ct.punct("]###"))
for f in self.fields_desc[:-1]:
ncol = ct.field_name
vcol = ct.field_value
fvalue = self.getfieldval(f.name)
begn = "%s %-10s%s " % (label_lvl + lvl, ncol(f.name),
ct.punct("="),)
reprval = f.i2repr(self, fvalue)
if isinstance(reprval, str):
reprval = reprval.replace("\n", "\n" + " " * (len(label_lvl) +
len(lvl) +
len(f.name) +
4))
s += "%s%s\n" % (begn, vcol(reprval))
f = self.fields_desc[-1]
ncol = ct.field_name
vcol = ct.field_value
fvalue = self.getfieldval(f.name)
begn = "%s %-10s%s " % (label_lvl + lvl, ncol(f.name), ct.punct("="),)
reprval = f.i2repr(self, fvalue)
if isinstance(reprval, str):
reprval = reprval.replace("\n", "\n" + " " * (len(label_lvl) +
len(lvl) +
len(f.name) +
4))
s += "%s%s\n" % (begn, vcol(reprval))
if self.payload:
s += self.payload._show_or_dump(dump=dump, indent=indent,
lvl=lvl + (" " * indent * self.show_indent), # noqa: E501
label_lvl=label_lvl, first_call=False) # noqa: E501
if first_call and not dump:
print(s)
else:
return s
|
def function[_show_or_dump, parameter[self, dump, indent, lvl, label_lvl, first_call]]:
constant[ Reproduced from packet.py ]
variable[ct] assign[=] <ast.IfExp object at 0x7da1b21a19c0>
variable[s] assign[=] binary_operation[constant[%s%s %s %s
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b21a2590>, <ast.Call object at 0x7da1b21a1480>, <ast.Call object at 0x7da1b21a0790>, <ast.Call object at 0x7da1b21a27a0>]]]
for taget[name[f]] in starred[call[name[self].fields_desc][<ast.Slice object at 0x7da1b21a2b00>]] begin[:]
variable[ncol] assign[=] name[ct].field_name
variable[vcol] assign[=] name[ct].field_value
variable[fvalue] assign[=] call[name[self].getfieldval, parameter[name[f].name]]
variable[begn] assign[=] binary_operation[constant[%s %-10s%s ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BinOp object at 0x7da1b21a03a0>, <ast.Call object at 0x7da1b21a1e10>, <ast.Call object at 0x7da1b21a2710>]]]
variable[reprval] assign[=] call[name[f].i2repr, parameter[name[self], name[fvalue]]]
if call[name[isinstance], parameter[name[reprval], name[str]]] begin[:]
variable[reprval] assign[=] call[name[reprval].replace, parameter[constant[
], binary_operation[constant[
] + binary_operation[constant[ ] * binary_operation[binary_operation[binary_operation[call[name[len], parameter[name[label_lvl]]] + call[name[len], parameter[name[lvl]]]] + call[name[len], parameter[name[f].name]]] + constant[4]]]]]]
<ast.AugAssign object at 0x7da1b21a3130>
variable[f] assign[=] call[name[self].fields_desc][<ast.UnaryOp object at 0x7da1b21a3760>]
variable[ncol] assign[=] name[ct].field_name
variable[vcol] assign[=] name[ct].field_value
variable[fvalue] assign[=] call[name[self].getfieldval, parameter[name[f].name]]
variable[begn] assign[=] binary_operation[constant[%s %-10s%s ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BinOp object at 0x7da1b1f976d0>, <ast.Call object at 0x7da1b1f94760>, <ast.Call object at 0x7da1b1f95030>]]]
variable[reprval] assign[=] call[name[f].i2repr, parameter[name[self], name[fvalue]]]
if call[name[isinstance], parameter[name[reprval], name[str]]] begin[:]
variable[reprval] assign[=] call[name[reprval].replace, parameter[constant[
], binary_operation[constant[
] + binary_operation[constant[ ] * binary_operation[binary_operation[binary_operation[call[name[len], parameter[name[label_lvl]]] + call[name[len], parameter[name[lvl]]]] + call[name[len], parameter[name[f].name]]] + constant[4]]]]]]
<ast.AugAssign object at 0x7da1b1f95de0>
if name[self].payload begin[:]
<ast.AugAssign object at 0x7da1b1f972b0>
if <ast.BoolOp object at 0x7da1b1f94280> begin[:]
call[name[print], parameter[name[s]]]
|
keyword[def] identifier[_show_or_dump] ( identifier[self] , identifier[dump] = keyword[False] , identifier[indent] = literal[int] ,
identifier[lvl] = literal[string] , identifier[label_lvl] = literal[string] , identifier[first_call] = keyword[True] ):
literal[string]
identifier[ct] = identifier[AnsiColorTheme] () keyword[if] identifier[dump] keyword[else] identifier[conf] . identifier[color_theme]
identifier[s] = literal[string] %( identifier[label_lvl] , identifier[ct] . identifier[punct] ( literal[string] ),
identifier[ct] . identifier[layer_name] ( identifier[self] . identifier[name] ), identifier[ct] . identifier[punct] ( literal[string] ))
keyword[for] identifier[f] keyword[in] identifier[self] . identifier[fields_desc] [:- literal[int] ]:
identifier[ncol] = identifier[ct] . identifier[field_name]
identifier[vcol] = identifier[ct] . identifier[field_value]
identifier[fvalue] = identifier[self] . identifier[getfieldval] ( identifier[f] . identifier[name] )
identifier[begn] = literal[string] %( identifier[label_lvl] + identifier[lvl] , identifier[ncol] ( identifier[f] . identifier[name] ),
identifier[ct] . identifier[punct] ( literal[string] ),)
identifier[reprval] = identifier[f] . identifier[i2repr] ( identifier[self] , identifier[fvalue] )
keyword[if] identifier[isinstance] ( identifier[reprval] , identifier[str] ):
identifier[reprval] = identifier[reprval] . identifier[replace] ( literal[string] , literal[string] + literal[string] *( identifier[len] ( identifier[label_lvl] )+
identifier[len] ( identifier[lvl] )+
identifier[len] ( identifier[f] . identifier[name] )+
literal[int] ))
identifier[s] += literal[string] %( identifier[begn] , identifier[vcol] ( identifier[reprval] ))
identifier[f] = identifier[self] . identifier[fields_desc] [- literal[int] ]
identifier[ncol] = identifier[ct] . identifier[field_name]
identifier[vcol] = identifier[ct] . identifier[field_value]
identifier[fvalue] = identifier[self] . identifier[getfieldval] ( identifier[f] . identifier[name] )
identifier[begn] = literal[string] %( identifier[label_lvl] + identifier[lvl] , identifier[ncol] ( identifier[f] . identifier[name] ), identifier[ct] . identifier[punct] ( literal[string] ),)
identifier[reprval] = identifier[f] . identifier[i2repr] ( identifier[self] , identifier[fvalue] )
keyword[if] identifier[isinstance] ( identifier[reprval] , identifier[str] ):
identifier[reprval] = identifier[reprval] . identifier[replace] ( literal[string] , literal[string] + literal[string] *( identifier[len] ( identifier[label_lvl] )+
identifier[len] ( identifier[lvl] )+
identifier[len] ( identifier[f] . identifier[name] )+
literal[int] ))
identifier[s] += literal[string] %( identifier[begn] , identifier[vcol] ( identifier[reprval] ))
keyword[if] identifier[self] . identifier[payload] :
identifier[s] += identifier[self] . identifier[payload] . identifier[_show_or_dump] ( identifier[dump] = identifier[dump] , identifier[indent] = identifier[indent] ,
identifier[lvl] = identifier[lvl] +( literal[string] * identifier[indent] * identifier[self] . identifier[show_indent] ),
identifier[label_lvl] = identifier[label_lvl] , identifier[first_call] = keyword[False] )
keyword[if] identifier[first_call] keyword[and] keyword[not] identifier[dump] :
identifier[print] ( identifier[s] )
keyword[else] :
keyword[return] identifier[s]
|
def _show_or_dump(self, dump=False, indent=3, lvl='', label_lvl='', first_call=True):
""" Reproduced from packet.py """
ct = AnsiColorTheme() if dump else conf.color_theme
s = '%s%s %s %s \n' % (label_lvl, ct.punct('###['), ct.layer_name(self.name), ct.punct(']###'))
for f in self.fields_desc[:-1]:
ncol = ct.field_name
vcol = ct.field_value
fvalue = self.getfieldval(f.name)
begn = '%s %-10s%s ' % (label_lvl + lvl, ncol(f.name), ct.punct('='))
reprval = f.i2repr(self, fvalue)
if isinstance(reprval, str):
reprval = reprval.replace('\n', '\n' + ' ' * (len(label_lvl) + len(lvl) + len(f.name) + 4)) # depends on [control=['if'], data=[]]
s += '%s%s\n' % (begn, vcol(reprval)) # depends on [control=['for'], data=['f']]
f = self.fields_desc[-1]
ncol = ct.field_name
vcol = ct.field_value
fvalue = self.getfieldval(f.name)
begn = '%s %-10s%s ' % (label_lvl + lvl, ncol(f.name), ct.punct('='))
reprval = f.i2repr(self, fvalue)
if isinstance(reprval, str):
reprval = reprval.replace('\n', '\n' + ' ' * (len(label_lvl) + len(lvl) + len(f.name) + 4)) # depends on [control=['if'], data=[]]
s += '%s%s\n' % (begn, vcol(reprval))
if self.payload: # noqa: E501
s += self.payload._show_or_dump(dump=dump, indent=indent, lvl=lvl + ' ' * indent * self.show_indent, label_lvl=label_lvl, first_call=False) # noqa: E501 # depends on [control=['if'], data=[]]
if first_call and (not dump):
print(s) # depends on [control=['if'], data=[]]
else:
return s
|
def _get_batch_requests(self, timeout=None):
"""try to get request as fast as possible, once empty and stop falg or time-out, just return Empty"""
reqs = []
s = time()
while len(reqs) < self.batch_size and (time() - s) < timeout:
try:
req = self.queue.get(block=False)
self.queue.task_done()
reqs.append(req)
except Empty as ex:
if self.stop_flag:
break
else:
sleep(0.1)
if not reqs:
raise Empty
elif len(reqs) <= 1:
return reqs[0]
else:
logitems = []
req = reqs[0]
for req in reqs:
logitems.extend(req.get_log_items())
ret = PutLogsRequest(self.project, self.log_store, req.topic, logitems=logitems)
ret.__record__ = req.__record__
return ret
|
def function[_get_batch_requests, parameter[self, timeout]]:
constant[try to get request as fast as possible, once empty and stop falg or time-out, just return Empty]
variable[reqs] assign[=] list[[]]
variable[s] assign[=] call[name[time], parameter[]]
while <ast.BoolOp object at 0x7da204344580> begin[:]
<ast.Try object at 0x7da204345f60>
if <ast.UnaryOp object at 0x7da1b085eb60> begin[:]
<ast.Raise object at 0x7da1b085f5e0>
|
keyword[def] identifier[_get_batch_requests] ( identifier[self] , identifier[timeout] = keyword[None] ):
literal[string]
identifier[reqs] =[]
identifier[s] = identifier[time] ()
keyword[while] identifier[len] ( identifier[reqs] )< identifier[self] . identifier[batch_size] keyword[and] ( identifier[time] ()- identifier[s] )< identifier[timeout] :
keyword[try] :
identifier[req] = identifier[self] . identifier[queue] . identifier[get] ( identifier[block] = keyword[False] )
identifier[self] . identifier[queue] . identifier[task_done] ()
identifier[reqs] . identifier[append] ( identifier[req] )
keyword[except] identifier[Empty] keyword[as] identifier[ex] :
keyword[if] identifier[self] . identifier[stop_flag] :
keyword[break]
keyword[else] :
identifier[sleep] ( literal[int] )
keyword[if] keyword[not] identifier[reqs] :
keyword[raise] identifier[Empty]
keyword[elif] identifier[len] ( identifier[reqs] )<= literal[int] :
keyword[return] identifier[reqs] [ literal[int] ]
keyword[else] :
identifier[logitems] =[]
identifier[req] = identifier[reqs] [ literal[int] ]
keyword[for] identifier[req] keyword[in] identifier[reqs] :
identifier[logitems] . identifier[extend] ( identifier[req] . identifier[get_log_items] ())
identifier[ret] = identifier[PutLogsRequest] ( identifier[self] . identifier[project] , identifier[self] . identifier[log_store] , identifier[req] . identifier[topic] , identifier[logitems] = identifier[logitems] )
identifier[ret] . identifier[__record__] = identifier[req] . identifier[__record__]
keyword[return] identifier[ret]
|
def _get_batch_requests(self, timeout=None):
"""try to get request as fast as possible, once empty and stop falg or time-out, just return Empty"""
reqs = []
s = time()
while len(reqs) < self.batch_size and time() - s < timeout:
try:
req = self.queue.get(block=False)
self.queue.task_done()
reqs.append(req) # depends on [control=['try'], data=[]]
except Empty as ex:
if self.stop_flag:
break # depends on [control=['if'], data=[]]
else:
sleep(0.1) # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]]
if not reqs:
raise Empty # depends on [control=['if'], data=[]]
elif len(reqs) <= 1:
return reqs[0] # depends on [control=['if'], data=[]]
else:
logitems = []
req = reqs[0]
for req in reqs:
logitems.extend(req.get_log_items()) # depends on [control=['for'], data=['req']]
ret = PutLogsRequest(self.project, self.log_store, req.topic, logitems=logitems)
ret.__record__ = req.__record__
return ret
|
def convert_from_latlon_to_utm(points=None,
latitudes=None,
longitudes=None,
false_easting=None,
false_northing=None):
"""Convert latitude and longitude data to UTM as a list of coordinates.
Input
points: list of points given in decimal degrees (latitude, longitude) or
latitudes: list of latitudes and
longitudes: list of longitudes
false_easting (optional)
false_northing (optional)
Output
points: List of converted points
zone: Common UTM zone for converted points
Notes
Assume the false_easting and false_northing are the same for each list.
If points end up in different UTM zones, an ANUGAerror is thrown.
"""
old_geo = Geo_reference()
utm_points = []
if points == None:
assert len(latitudes) == len(longitudes)
points = map(None, latitudes, longitudes)
for point in points:
zone, easting, northing = redfearn(float(point[0]),
float(point[1]),
false_easting=false_easting,
false_northing=false_northing)
new_geo = Geo_reference(zone)
old_geo.reconcile_zones(new_geo)
utm_points.append([easting, northing])
return utm_points, old_geo.get_zone()
|
def function[convert_from_latlon_to_utm, parameter[points, latitudes, longitudes, false_easting, false_northing]]:
constant[Convert latitude and longitude data to UTM as a list of coordinates.
Input
points: list of points given in decimal degrees (latitude, longitude) or
latitudes: list of latitudes and
longitudes: list of longitudes
false_easting (optional)
false_northing (optional)
Output
points: List of converted points
zone: Common UTM zone for converted points
Notes
Assume the false_easting and false_northing are the same for each list.
If points end up in different UTM zones, an ANUGAerror is thrown.
]
variable[old_geo] assign[=] call[name[Geo_reference], parameter[]]
variable[utm_points] assign[=] list[[]]
if compare[name[points] equal[==] constant[None]] begin[:]
assert[compare[call[name[len], parameter[name[latitudes]]] equal[==] call[name[len], parameter[name[longitudes]]]]]
variable[points] assign[=] call[name[map], parameter[constant[None], name[latitudes], name[longitudes]]]
for taget[name[point]] in starred[name[points]] begin[:]
<ast.Tuple object at 0x7da1b26afd90> assign[=] call[name[redfearn], parameter[call[name[float], parameter[call[name[point]][constant[0]]]], call[name[float], parameter[call[name[point]][constant[1]]]]]]
variable[new_geo] assign[=] call[name[Geo_reference], parameter[name[zone]]]
call[name[old_geo].reconcile_zones, parameter[name[new_geo]]]
call[name[utm_points].append, parameter[list[[<ast.Name object at 0x7da2041d9150>, <ast.Name object at 0x7da2041dae30>]]]]
return[tuple[[<ast.Name object at 0x7da204347730>, <ast.Call object at 0x7da204344fa0>]]]
|
keyword[def] identifier[convert_from_latlon_to_utm] ( identifier[points] = keyword[None] ,
identifier[latitudes] = keyword[None] ,
identifier[longitudes] = keyword[None] ,
identifier[false_easting] = keyword[None] ,
identifier[false_northing] = keyword[None] ):
literal[string]
identifier[old_geo] = identifier[Geo_reference] ()
identifier[utm_points] =[]
keyword[if] identifier[points] == keyword[None] :
keyword[assert] identifier[len] ( identifier[latitudes] )== identifier[len] ( identifier[longitudes] )
identifier[points] = identifier[map] ( keyword[None] , identifier[latitudes] , identifier[longitudes] )
keyword[for] identifier[point] keyword[in] identifier[points] :
identifier[zone] , identifier[easting] , identifier[northing] = identifier[redfearn] ( identifier[float] ( identifier[point] [ literal[int] ]),
identifier[float] ( identifier[point] [ literal[int] ]),
identifier[false_easting] = identifier[false_easting] ,
identifier[false_northing] = identifier[false_northing] )
identifier[new_geo] = identifier[Geo_reference] ( identifier[zone] )
identifier[old_geo] . identifier[reconcile_zones] ( identifier[new_geo] )
identifier[utm_points] . identifier[append] ([ identifier[easting] , identifier[northing] ])
keyword[return] identifier[utm_points] , identifier[old_geo] . identifier[get_zone] ()
|
def convert_from_latlon_to_utm(points=None, latitudes=None, longitudes=None, false_easting=None, false_northing=None):
"""Convert latitude and longitude data to UTM as a list of coordinates.
Input
points: list of points given in decimal degrees (latitude, longitude) or
latitudes: list of latitudes and
longitudes: list of longitudes
false_easting (optional)
false_northing (optional)
Output
points: List of converted points
zone: Common UTM zone for converted points
Notes
Assume the false_easting and false_northing are the same for each list.
If points end up in different UTM zones, an ANUGAerror is thrown.
"""
old_geo = Geo_reference()
utm_points = []
if points == None:
assert len(latitudes) == len(longitudes)
points = map(None, latitudes, longitudes) # depends on [control=['if'], data=['points']]
for point in points:
(zone, easting, northing) = redfearn(float(point[0]), float(point[1]), false_easting=false_easting, false_northing=false_northing)
new_geo = Geo_reference(zone)
old_geo.reconcile_zones(new_geo)
utm_points.append([easting, northing]) # depends on [control=['for'], data=['point']]
return (utm_points, old_geo.get_zone())
|
def _parse_term(_rawterms):
"""Parse a term line.
The term is organized as a succesion of ``key:value`` pairs
that are extracted into the same dictionnary until a new
header is encountered
Arguments:
line (str): the line containing a term statement
"""
line = yield
_rawterms.append(collections.defaultdict(list))
while True:
line = yield
if "[Term]" in line:
_rawterms.append(collections.defaultdict(list))
else:
key, value = line.split(':', 1)
_rawterms[-1][key.strip()].append(value.strip())
|
def function[_parse_term, parameter[_rawterms]]:
constant[Parse a term line.
The term is organized as a succesion of ``key:value`` pairs
that are extracted into the same dictionnary until a new
header is encountered
Arguments:
line (str): the line containing a term statement
]
variable[line] assign[=] <ast.Yield object at 0x7da20c796860>
call[name[_rawterms].append, parameter[call[name[collections].defaultdict, parameter[name[list]]]]]
while constant[True] begin[:]
variable[line] assign[=] <ast.Yield object at 0x7da20c795030>
if compare[constant[[Term]] in name[line]] begin[:]
call[name[_rawterms].append, parameter[call[name[collections].defaultdict, parameter[name[list]]]]]
|
keyword[def] identifier[_parse_term] ( identifier[_rawterms] ):
literal[string]
identifier[line] = keyword[yield]
identifier[_rawterms] . identifier[append] ( identifier[collections] . identifier[defaultdict] ( identifier[list] ))
keyword[while] keyword[True] :
identifier[line] = keyword[yield]
keyword[if] literal[string] keyword[in] identifier[line] :
identifier[_rawterms] . identifier[append] ( identifier[collections] . identifier[defaultdict] ( identifier[list] ))
keyword[else] :
identifier[key] , identifier[value] = identifier[line] . identifier[split] ( literal[string] , literal[int] )
identifier[_rawterms] [- literal[int] ][ identifier[key] . identifier[strip] ()]. identifier[append] ( identifier[value] . identifier[strip] ())
|
def _parse_term(_rawterms):
"""Parse a term line.
The term is organized as a succesion of ``key:value`` pairs
that are extracted into the same dictionnary until a new
header is encountered
Arguments:
line (str): the line containing a term statement
"""
line = (yield)
_rawterms.append(collections.defaultdict(list))
while True:
line = (yield)
if '[Term]' in line:
_rawterms.append(collections.defaultdict(list)) # depends on [control=['if'], data=[]]
else:
(key, value) = line.split(':', 1)
_rawterms[-1][key.strip()].append(value.strip()) # depends on [control=['while'], data=[]]
|
def get_model_custom_fields(self):
""" Return a list of custom fields for this model, directly callable
without an instance. Use like Foo.get_model_custom_fields(Foo)
"""
return CustomField.objects.filter(
content_type=ContentType.objects.get_for_model(self))
|
def function[get_model_custom_fields, parameter[self]]:
constant[ Return a list of custom fields for this model, directly callable
without an instance. Use like Foo.get_model_custom_fields(Foo)
]
return[call[name[CustomField].objects.filter, parameter[]]]
|
keyword[def] identifier[get_model_custom_fields] ( identifier[self] ):
literal[string]
keyword[return] identifier[CustomField] . identifier[objects] . identifier[filter] (
identifier[content_type] = identifier[ContentType] . identifier[objects] . identifier[get_for_model] ( identifier[self] ))
|
def get_model_custom_fields(self):
""" Return a list of custom fields for this model, directly callable
without an instance. Use like Foo.get_model_custom_fields(Foo)
"""
return CustomField.objects.filter(content_type=ContentType.objects.get_for_model(self))
|
def parse_headers(self, req, name, field):
"""Pull a header value from the request."""
# Use req.get_headers rather than req.headers for performance
return req.get_header(name, required=False) or core.missing
|
def function[parse_headers, parameter[self, req, name, field]]:
constant[Pull a header value from the request.]
return[<ast.BoolOp object at 0x7da1b22ea020>]
|
keyword[def] identifier[parse_headers] ( identifier[self] , identifier[req] , identifier[name] , identifier[field] ):
literal[string]
keyword[return] identifier[req] . identifier[get_header] ( identifier[name] , identifier[required] = keyword[False] ) keyword[or] identifier[core] . identifier[missing]
|
def parse_headers(self, req, name, field):
"""Pull a header value from the request."""
# Use req.get_headers rather than req.headers for performance
return req.get_header(name, required=False) or core.missing
|
async def run_task(self, container, task, newthread = False):
"Run task() in task pool. Raise an exception or return the return value"
e = TaskEvent(self, task=task, newthread = newthread)
await container.wait_for_send(e)
ev = await TaskDoneEvent.createMatcher(e)
if hasattr(ev, 'exception'):
raise ev.exception
else:
return ev.result
|
<ast.AsyncFunctionDef object at 0x7da18dc99f90>
|
keyword[async] keyword[def] identifier[run_task] ( identifier[self] , identifier[container] , identifier[task] , identifier[newthread] = keyword[False] ):
literal[string]
identifier[e] = identifier[TaskEvent] ( identifier[self] , identifier[task] = identifier[task] , identifier[newthread] = identifier[newthread] )
keyword[await] identifier[container] . identifier[wait_for_send] ( identifier[e] )
identifier[ev] = keyword[await] identifier[TaskDoneEvent] . identifier[createMatcher] ( identifier[e] )
keyword[if] identifier[hasattr] ( identifier[ev] , literal[string] ):
keyword[raise] identifier[ev] . identifier[exception]
keyword[else] :
keyword[return] identifier[ev] . identifier[result]
|
async def run_task(self, container, task, newthread=False):
"""Run task() in task pool. Raise an exception or return the return value"""
e = TaskEvent(self, task=task, newthread=newthread)
await container.wait_for_send(e)
ev = await TaskDoneEvent.createMatcher(e)
if hasattr(ev, 'exception'):
raise ev.exception # depends on [control=['if'], data=[]]
else:
return ev.result
|
def all(cls):
"""
Get all consts
:return: list
"""
result = []
for name in dir(cls):
if not name.isupper():
continue
value = getattr(cls, name)
if isinstance(value, ItemsList):
result.append(value[0])
else:
result.append(value)
return result
|
def function[all, parameter[cls]]:
constant[
Get all consts
:return: list
]
variable[result] assign[=] list[[]]
for taget[name[name]] in starred[call[name[dir], parameter[name[cls]]]] begin[:]
if <ast.UnaryOp object at 0x7da1b18322c0> begin[:]
continue
variable[value] assign[=] call[name[getattr], parameter[name[cls], name[name]]]
if call[name[isinstance], parameter[name[value], name[ItemsList]]] begin[:]
call[name[result].append, parameter[call[name[value]][constant[0]]]]
return[name[result]]
|
keyword[def] identifier[all] ( identifier[cls] ):
literal[string]
identifier[result] =[]
keyword[for] identifier[name] keyword[in] identifier[dir] ( identifier[cls] ):
keyword[if] keyword[not] identifier[name] . identifier[isupper] ():
keyword[continue]
identifier[value] = identifier[getattr] ( identifier[cls] , identifier[name] )
keyword[if] identifier[isinstance] ( identifier[value] , identifier[ItemsList] ):
identifier[result] . identifier[append] ( identifier[value] [ literal[int] ])
keyword[else] :
identifier[result] . identifier[append] ( identifier[value] )
keyword[return] identifier[result]
|
def all(cls):
"""
Get all consts
:return: list
"""
result = []
for name in dir(cls):
if not name.isupper():
continue # depends on [control=['if'], data=[]]
value = getattr(cls, name)
if isinstance(value, ItemsList):
result.append(value[0]) # depends on [control=['if'], data=[]]
else:
result.append(value) # depends on [control=['for'], data=['name']]
return result
|
def get_set(self, project, articleset, **filters):
"""List the articlesets in a project"""
url = URL.articleset.format(**locals())
return self.request(url, **filters)
|
def function[get_set, parameter[self, project, articleset]]:
constant[List the articlesets in a project]
variable[url] assign[=] call[name[URL].articleset.format, parameter[]]
return[call[name[self].request, parameter[name[url]]]]
|
keyword[def] identifier[get_set] ( identifier[self] , identifier[project] , identifier[articleset] ,** identifier[filters] ):
literal[string]
identifier[url] = identifier[URL] . identifier[articleset] . identifier[format] (** identifier[locals] ())
keyword[return] identifier[self] . identifier[request] ( identifier[url] ,** identifier[filters] )
|
def get_set(self, project, articleset, **filters):
"""List the articlesets in a project"""
url = URL.articleset.format(**locals())
return self.request(url, **filters)
|
def parse_bcftools_stats(self):
"""
Find bcftools stats logs and parse their data
Bcftools stats reports contain 'sets' of data, which can
have multiple vcf files each (but usually don't). Here,
we treat each 'set' as a MultiQC sample, taking the first
input filename for each set as the name.
"""
collapse_complementary = getattr(config, 'bcftools', {}).get('collapse_complementary_changes', False)
if collapse_complementary:
types = ['A>C', 'A>G', 'A>T', 'C>A', 'C>G', 'C>T']
else:
types = ['A>C', 'A>G', 'A>T', 'C>A', 'C>G', 'C>T',
'G>A', 'G>C', 'G>T', 'T>A', 'T>C', 'T>G']
self.bcftools_stats = dict()
self.bcftools_stats_indels = dict()
self.bcftools_stats_vqc_snp = dict()
self.bcftools_stats_vqc_transi = dict()
self.bcftools_stats_vqc_transv = dict()
self.bcftools_stats_vqc_indels = dict()
depth_data = dict()
for f in self.find_log_files('bcftools/stats'):
s_names = list()
for line in f['f'].splitlines():
s = line.split("\t")
# Get the sample names - one per 'set'
if s[0] == "ID":
s_name = self.clean_s_name(s[2], f['root'])
s_names.append(s_name)
if s_name in self.bcftools_stats:
log.debug("Duplicate sample name found! Overwriting: {}".format(s_name))
self.add_data_source(f, s_name, section='stats')
self.bcftools_stats[s_name] = dict()
self.bcftools_stats_indels[s_name] = dict()
self.bcftools_stats_vqc_snp[s_name] = dict()
self.bcftools_stats_vqc_transi[s_name] = dict()
self.bcftools_stats_vqc_transv[s_name] = dict()
self.bcftools_stats_vqc_indels[s_name] = dict()
depth_data[s_name] = OrderedDict()
self.bcftools_stats_indels[s_name][0] = None # Avoid joining line across missing 0
# Parse key stats
if s[0] == "SN" and len(s_names) > 0:
s_name = s_names[int(s[1])]
field = s[2].strip()[:-1]
field = field.replace(' ', '_')
value = float(s[3].strip())
self.bcftools_stats[s_name][field] = value
# Parse transitions/transversions stats
if s[0] == "TSTV" and len(s_names) > 0:
s_name = s_names[int(s[1])]
fields = ['ts', 'tv', 'tstv', 'ts_1st_ALT', 'tv_1st_ALT', 'tstv_1st_ALT']
for i, f in enumerate(fields):
value = float(s[i+2].strip())
self.bcftools_stats[s_name][f] = value
# Parse substitution types
if s[0] == "ST" and len(s_names) > 0:
s_name = s_names[int(s[1])]
rc = {'A': 'T', 'C': 'G', 'G': 'C', 'T': 'A'}
change = s[2].strip()
if change not in types:
change = '>'.join(rc[n] for n in change.split('>'))
field = 'substitution_type_{}'.format(change)
value = float(s[3].strip())
if field not in self.bcftools_stats[s_name]:
self.bcftools_stats[s_name][field] = 0
self.bcftools_stats[s_name][field] += value
# Indel length distributions
if s[0] == "IDD" and len(s_names) > 0:
s_name = s_names[int(s[1])]
length = float(s[2].strip())
count = float(s[3].strip())
self.bcftools_stats_indels[s_name][length] = count
# Per-sample counts
if s[0] == "PSC" and len(s_names) > 0:
s_name = s_names[int(s[1])]
fields = ['variations_hom', 'variations_het']
for i, f in enumerate(fields):
self.bcftools_stats[s_name][f] = int(s[i + 4].strip())
# Depth plots
if s[0] == "DP" and len(s_names) > 0:
s_name = s_names[int(s[1])]
bin_name = s[2].strip()
percent_sites = float(s[-1].strip())
depth_data[s_name][bin_name] = percent_sites
# Variant Qualities
if s[0] == "QUAL" and len(s_names) > 0:
s_name = s_names[int(s[1])]
quality = float(s[2].strip())
self.bcftools_stats_vqc_snp[s_name][quality] = float(s[3].strip())
self.bcftools_stats_vqc_transi[s_name][quality] = float(s[4].strip())
self.bcftools_stats_vqc_transv[s_name][quality] = float(s[5].strip())
self.bcftools_stats_vqc_indels[s_name][quality] = float(s[6].strip())
# Filter to strip out ignored sample names
self.bcftools_stats = self.ignore_samples(self.bcftools_stats)
if len(self.bcftools_stats) > 0:
# Write parsed report data to a file
self.write_data_file(self.bcftools_stats, 'multiqc_bcftools_stats')
# Stats Table
stats_headers = self.bcftools_stats_genstats_headers()
if getattr(config, 'bcftools', {}).get('write_general_stats', True):
self.general_stats_addcols(self.bcftools_stats, stats_headers, 'Bcftools Stats')
if getattr(config, 'bcftools', {}).get('write_separate_table', False):
self.add_section(
name='Bcftools Stats',
anchor='bcftools-stats',
plot=table.plot(self.bcftools_stats, stats_headers))
# Make bargraph plot of substitution types
keys = OrderedDict()
for t in types:
keys['substitution_type_{}'.format(t)] = {'name': t}
pconfig = {
'id': 'bcftools-stats-subtypes',
'title': 'Bcftools Stats: Substitutions',
'ylab': '# Substitutions',
'cpswitch_counts_label': 'Number of Substitutions'
}
self.add_section (
name = 'Variant Substitution Types',
anchor = 'bcftools-stats',
plot = bargraph.plot(self.bcftools_stats, keys, pconfig)
)
# Make histograms of variant quality
if len(self.bcftools_stats_vqc_snp) > 0:
pconfig = {
'id': 'bcftools_stats_vqc',
'title': 'Bcftools Stats: Variant Quality Count',
'ylab': 'Count',
'xlab': 'Quality',
'xDecimals': False,
'ymin': 0,
'smooth_points': 600,
# 'tt_label': '<b>{point.x} bp trimmed</b>: {point.y:.0f}',
'data_labels': [
{'name': 'Count SNP', 'ylab': 'Quality'},
{'name': 'Count Transitions', 'ylab': 'Quality'},
{'name': 'Count Transversions', 'ylab': 'Quality'},
{'name': 'Count Indels', 'ylab': 'Quality'}
]
}
self.add_section (
name = 'Variant Quality',
anchor = 'bcftools-stats_variant_quality_plot',
plot = linegraph.plot (
[self.bcftools_stats_vqc_snp,
self.bcftools_stats_vqc_transi,
self.bcftools_stats_vqc_transv,
self.bcftools_stats_vqc_indels], pconfig)
)
# Make line graph of indel lengths
if len(self.bcftools_stats_indels) > 0:
pconfig = {
'id': 'bcftools_stats_indel-lengths',
'title': 'Bcftools Stats: Indel Distribution',
'ylab': 'Count',
'xlab': 'InDel Length (bp)',
'xDecimals': False,
'ymin': 0,
}
self.add_section (
name = 'Indel Distribution',
anchor = 'bcftools-stats_indel_plot',
plot = linegraph.plot(self.bcftools_stats_indels, pconfig)
)
# Make line graph of variants per depth
if len(depth_data) > 0:
pconfig = {
'id': 'bcftools_stats_depth',
'title': 'Bcftools Stats: Variant depths',
'ylab': 'Fraction of sites (%)',
'xlab': 'Variant depth',
'ymin': 0,
'ymax': 100,
'categories': True
}
self.add_section (
name = 'Variant depths',
anchor = 'bcftools-stats_depth_plot',
description = 'Read depth support distribution for called variants',
plot = linegraph.plot(depth_data, pconfig)
)
# Return the number of logs that were found
return len(self.bcftools_stats)
|
def function[parse_bcftools_stats, parameter[self]]:
constant[
Find bcftools stats logs and parse their data
Bcftools stats reports contain 'sets' of data, which can
have multiple vcf files each (but usually don't). Here,
we treat each 'set' as a MultiQC sample, taking the first
input filename for each set as the name.
]
variable[collapse_complementary] assign[=] call[call[name[getattr], parameter[name[config], constant[bcftools], dictionary[[], []]]].get, parameter[constant[collapse_complementary_changes], constant[False]]]
if name[collapse_complementary] begin[:]
variable[types] assign[=] list[[<ast.Constant object at 0x7da207f98af0>, <ast.Constant object at 0x7da207f9be50>, <ast.Constant object at 0x7da207f9b160>, <ast.Constant object at 0x7da207f9a8f0>, <ast.Constant object at 0x7da207f98ac0>, <ast.Constant object at 0x7da207f98c40>]]
name[self].bcftools_stats assign[=] call[name[dict], parameter[]]
name[self].bcftools_stats_indels assign[=] call[name[dict], parameter[]]
name[self].bcftools_stats_vqc_snp assign[=] call[name[dict], parameter[]]
name[self].bcftools_stats_vqc_transi assign[=] call[name[dict], parameter[]]
name[self].bcftools_stats_vqc_transv assign[=] call[name[dict], parameter[]]
name[self].bcftools_stats_vqc_indels assign[=] call[name[dict], parameter[]]
variable[depth_data] assign[=] call[name[dict], parameter[]]
for taget[name[f]] in starred[call[name[self].find_log_files, parameter[constant[bcftools/stats]]]] begin[:]
variable[s_names] assign[=] call[name[list], parameter[]]
for taget[name[line]] in starred[call[call[name[f]][constant[f]].splitlines, parameter[]]] begin[:]
variable[s] assign[=] call[name[line].split, parameter[constant[ ]]]
if compare[call[name[s]][constant[0]] equal[==] constant[ID]] begin[:]
variable[s_name] assign[=] call[name[self].clean_s_name, parameter[call[name[s]][constant[2]], call[name[f]][constant[root]]]]
call[name[s_names].append, parameter[name[s_name]]]
if compare[name[s_name] in name[self].bcftools_stats] begin[:]
call[name[log].debug, parameter[call[constant[Duplicate sample name found! Overwriting: {}].format, parameter[name[s_name]]]]]
call[name[self].add_data_source, parameter[name[f], name[s_name]]]
call[name[self].bcftools_stats][name[s_name]] assign[=] call[name[dict], parameter[]]
call[name[self].bcftools_stats_indels][name[s_name]] assign[=] call[name[dict], parameter[]]
call[name[self].bcftools_stats_vqc_snp][name[s_name]] assign[=] call[name[dict], parameter[]]
call[name[self].bcftools_stats_vqc_transi][name[s_name]] assign[=] call[name[dict], parameter[]]
call[name[self].bcftools_stats_vqc_transv][name[s_name]] assign[=] call[name[dict], parameter[]]
call[name[self].bcftools_stats_vqc_indels][name[s_name]] assign[=] call[name[dict], parameter[]]
call[name[depth_data]][name[s_name]] assign[=] call[name[OrderedDict], parameter[]]
call[call[name[self].bcftools_stats_indels][name[s_name]]][constant[0]] assign[=] constant[None]
if <ast.BoolOp object at 0x7da18eb55090> begin[:]
variable[s_name] assign[=] call[name[s_names]][call[name[int], parameter[call[name[s]][constant[1]]]]]
variable[field] assign[=] call[call[call[name[s]][constant[2]].strip, parameter[]]][<ast.Slice object at 0x7da18eb55960>]
variable[field] assign[=] call[name[field].replace, parameter[constant[ ], constant[_]]]
variable[value] assign[=] call[name[float], parameter[call[call[name[s]][constant[3]].strip, parameter[]]]]
call[call[name[self].bcftools_stats][name[s_name]]][name[field]] assign[=] name[value]
if <ast.BoolOp object at 0x7da18eb54490> begin[:]
variable[s_name] assign[=] call[name[s_names]][call[name[int], parameter[call[name[s]][constant[1]]]]]
variable[fields] assign[=] list[[<ast.Constant object at 0x7da18eb57c10>, <ast.Constant object at 0x7da18eb576a0>, <ast.Constant object at 0x7da18eb56050>, <ast.Constant object at 0x7da18eb56e60>, <ast.Constant object at 0x7da18eb55420>, <ast.Constant object at 0x7da18eb55fc0>]]
for taget[tuple[[<ast.Name object at 0x7da18eb553f0>, <ast.Name object at 0x7da18eb54190>]]] in starred[call[name[enumerate], parameter[name[fields]]]] begin[:]
variable[value] assign[=] call[name[float], parameter[call[call[name[s]][binary_operation[name[i] + constant[2]]].strip, parameter[]]]]
call[call[name[self].bcftools_stats][name[s_name]]][name[f]] assign[=] name[value]
if <ast.BoolOp object at 0x7da18eb555a0> begin[:]
variable[s_name] assign[=] call[name[s_names]][call[name[int], parameter[call[name[s]][constant[1]]]]]
variable[rc] assign[=] dictionary[[<ast.Constant object at 0x7da18eb564d0>, <ast.Constant object at 0x7da18eb55210>, <ast.Constant object at 0x7da18eb556c0>, <ast.Constant object at 0x7da18eb57d90>], [<ast.Constant object at 0x7da18eb54820>, <ast.Constant object at 0x7da18eb55bd0>, <ast.Constant object at 0x7da18eb551e0>, <ast.Constant object at 0x7da18eb57250>]]
variable[change] assign[=] call[call[name[s]][constant[2]].strip, parameter[]]
if compare[name[change] <ast.NotIn object at 0x7da2590d7190> name[types]] begin[:]
variable[change] assign[=] call[constant[>].join, parameter[<ast.GeneratorExp object at 0x7da18eb56380>]]
variable[field] assign[=] call[constant[substitution_type_{}].format, parameter[name[change]]]
variable[value] assign[=] call[name[float], parameter[call[call[name[s]][constant[3]].strip, parameter[]]]]
if compare[name[field] <ast.NotIn object at 0x7da2590d7190> call[name[self].bcftools_stats][name[s_name]]] begin[:]
call[call[name[self].bcftools_stats][name[s_name]]][name[field]] assign[=] constant[0]
<ast.AugAssign object at 0x7da18eb57400>
if <ast.BoolOp object at 0x7da18eb54280> begin[:]
variable[s_name] assign[=] call[name[s_names]][call[name[int], parameter[call[name[s]][constant[1]]]]]
variable[length] assign[=] call[name[float], parameter[call[call[name[s]][constant[2]].strip, parameter[]]]]
variable[count] assign[=] call[name[float], parameter[call[call[name[s]][constant[3]].strip, parameter[]]]]
call[call[name[self].bcftools_stats_indels][name[s_name]]][name[length]] assign[=] name[count]
if <ast.BoolOp object at 0x7da18eb56110> begin[:]
variable[s_name] assign[=] call[name[s_names]][call[name[int], parameter[call[name[s]][constant[1]]]]]
variable[fields] assign[=] list[[<ast.Constant object at 0x7da1b1e5e680>, <ast.Constant object at 0x7da1b1e5f3d0>]]
for taget[tuple[[<ast.Name object at 0x7da1b1e5d570>, <ast.Name object at 0x7da1b1e5eaa0>]]] in starred[call[name[enumerate], parameter[name[fields]]]] begin[:]
call[call[name[self].bcftools_stats][name[s_name]]][name[f]] assign[=] call[name[int], parameter[call[call[name[s]][binary_operation[name[i] + constant[4]]].strip, parameter[]]]]
if <ast.BoolOp object at 0x7da1b1e5cf70> begin[:]
variable[s_name] assign[=] call[name[s_names]][call[name[int], parameter[call[name[s]][constant[1]]]]]
variable[bin_name] assign[=] call[call[name[s]][constant[2]].strip, parameter[]]
variable[percent_sites] assign[=] call[name[float], parameter[call[call[name[s]][<ast.UnaryOp object at 0x7da1b1e5d090>].strip, parameter[]]]]
call[call[name[depth_data]][name[s_name]]][name[bin_name]] assign[=] name[percent_sites]
if <ast.BoolOp object at 0x7da1b1e5dd20> begin[:]
variable[s_name] assign[=] call[name[s_names]][call[name[int], parameter[call[name[s]][constant[1]]]]]
variable[quality] assign[=] call[name[float], parameter[call[call[name[s]][constant[2]].strip, parameter[]]]]
call[call[name[self].bcftools_stats_vqc_snp][name[s_name]]][name[quality]] assign[=] call[name[float], parameter[call[call[name[s]][constant[3]].strip, parameter[]]]]
call[call[name[self].bcftools_stats_vqc_transi][name[s_name]]][name[quality]] assign[=] call[name[float], parameter[call[call[name[s]][constant[4]].strip, parameter[]]]]
call[call[name[self].bcftools_stats_vqc_transv][name[s_name]]][name[quality]] assign[=] call[name[float], parameter[call[call[name[s]][constant[5]].strip, parameter[]]]]
call[call[name[self].bcftools_stats_vqc_indels][name[s_name]]][name[quality]] assign[=] call[name[float], parameter[call[call[name[s]][constant[6]].strip, parameter[]]]]
name[self].bcftools_stats assign[=] call[name[self].ignore_samples, parameter[name[self].bcftools_stats]]
if compare[call[name[len], parameter[name[self].bcftools_stats]] greater[>] constant[0]] begin[:]
call[name[self].write_data_file, parameter[name[self].bcftools_stats, constant[multiqc_bcftools_stats]]]
variable[stats_headers] assign[=] call[name[self].bcftools_stats_genstats_headers, parameter[]]
if call[call[name[getattr], parameter[name[config], constant[bcftools], dictionary[[], []]]].get, parameter[constant[write_general_stats], constant[True]]] begin[:]
call[name[self].general_stats_addcols, parameter[name[self].bcftools_stats, name[stats_headers], constant[Bcftools Stats]]]
if call[call[name[getattr], parameter[name[config], constant[bcftools], dictionary[[], []]]].get, parameter[constant[write_separate_table], constant[False]]] begin[:]
call[name[self].add_section, parameter[]]
variable[keys] assign[=] call[name[OrderedDict], parameter[]]
for taget[name[t]] in starred[name[types]] begin[:]
call[name[keys]][call[constant[substitution_type_{}].format, parameter[name[t]]]] assign[=] dictionary[[<ast.Constant object at 0x7da20cabfd90>], [<ast.Name object at 0x7da20cabd0c0>]]
variable[pconfig] assign[=] dictionary[[<ast.Constant object at 0x7da20cabc700>, <ast.Constant object at 0x7da20cabd480>, <ast.Constant object at 0x7da20cabfa00>, <ast.Constant object at 0x7da20cabe860>], [<ast.Constant object at 0x7da20cabe410>, <ast.Constant object at 0x7da20cabdb40>, <ast.Constant object at 0x7da20cabf9a0>, <ast.Constant object at 0x7da20cabf400>]]
call[name[self].add_section, parameter[]]
if compare[call[name[len], parameter[name[self].bcftools_stats_vqc_snp]] greater[>] constant[0]] begin[:]
variable[pconfig] assign[=] dictionary[[<ast.Constant object at 0x7da20cabcb50>, <ast.Constant object at 0x7da20cabf460>, <ast.Constant object at 0x7da20cabe080>, <ast.Constant object at 0x7da20cabf730>, <ast.Constant object at 0x7da20cabeb30>, <ast.Constant object at 0x7da20cabd8d0>, <ast.Constant object at 0x7da20cabe890>, <ast.Constant object at 0x7da20cabfeb0>], [<ast.Constant object at 0x7da20cabf700>, <ast.Constant object at 0x7da20cabe0b0>, <ast.Constant object at 0x7da20cabf490>, <ast.Constant object at 0x7da20cabfa90>, <ast.Constant object at 0x7da20cabedd0>, <ast.Constant object at 0x7da20cabc5e0>, <ast.Constant object at 0x7da20cabfa30>, <ast.List object at 0x7da20cabdd20>]]
call[name[self].add_section, parameter[]]
if compare[call[name[len], parameter[name[self].bcftools_stats_indels]] greater[>] constant[0]] begin[:]
variable[pconfig] assign[=] dictionary[[<ast.Constant object at 0x7da20cabce80>, <ast.Constant object at 0x7da20cabc640>, <ast.Constant object at 0x7da20cabc550>, <ast.Constant object at 0x7da20cabdff0>, <ast.Constant object at 0x7da20cabeec0>, <ast.Constant object at 0x7da20cabd120>], [<ast.Constant object at 0x7da18bcc9660>, <ast.Constant object at 0x7da18bcc9bd0>, <ast.Constant object at 0x7da18bcc9f60>, <ast.Constant object at 0x7da18bccaf50>, <ast.Constant object at 0x7da18bcca590>, <ast.Constant object at 0x7da18bcca9e0>]]
call[name[self].add_section, parameter[]]
if compare[call[name[len], parameter[name[depth_data]]] greater[>] constant[0]] begin[:]
variable[pconfig] assign[=] dictionary[[<ast.Constant object at 0x7da18bcc8070>, <ast.Constant object at 0x7da18bcc8310>, <ast.Constant object at 0x7da18bcc95a0>, <ast.Constant object at 0x7da18bccb310>, <ast.Constant object at 0x7da18bcca650>, <ast.Constant object at 0x7da18bcc8be0>, <ast.Constant object at 0x7da18bccb7f0>], [<ast.Constant object at 0x7da18bcc90c0>, <ast.Constant object at 0x7da18bccace0>, <ast.Constant object at 0x7da18bcca6e0>, <ast.Constant object at 0x7da18bcc9b70>, <ast.Constant object at 0x7da18bcc91e0>, <ast.Constant object at 0x7da18bcc8d60>, <ast.Constant object at 0x7da18bcca140>]]
call[name[self].add_section, parameter[]]
return[call[name[len], parameter[name[self].bcftools_stats]]]
|
keyword[def] identifier[parse_bcftools_stats] ( identifier[self] ):
literal[string]
identifier[collapse_complementary] = identifier[getattr] ( identifier[config] , literal[string] ,{}). identifier[get] ( literal[string] , keyword[False] )
keyword[if] identifier[collapse_complementary] :
identifier[types] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[else] :
identifier[types] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
identifier[self] . identifier[bcftools_stats] = identifier[dict] ()
identifier[self] . identifier[bcftools_stats_indels] = identifier[dict] ()
identifier[self] . identifier[bcftools_stats_vqc_snp] = identifier[dict] ()
identifier[self] . identifier[bcftools_stats_vqc_transi] = identifier[dict] ()
identifier[self] . identifier[bcftools_stats_vqc_transv] = identifier[dict] ()
identifier[self] . identifier[bcftools_stats_vqc_indels] = identifier[dict] ()
identifier[depth_data] = identifier[dict] ()
keyword[for] identifier[f] keyword[in] identifier[self] . identifier[find_log_files] ( literal[string] ):
identifier[s_names] = identifier[list] ()
keyword[for] identifier[line] keyword[in] identifier[f] [ literal[string] ]. identifier[splitlines] ():
identifier[s] = identifier[line] . identifier[split] ( literal[string] )
keyword[if] identifier[s] [ literal[int] ]== literal[string] :
identifier[s_name] = identifier[self] . identifier[clean_s_name] ( identifier[s] [ literal[int] ], identifier[f] [ literal[string] ])
identifier[s_names] . identifier[append] ( identifier[s_name] )
keyword[if] identifier[s_name] keyword[in] identifier[self] . identifier[bcftools_stats] :
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[s_name] ))
identifier[self] . identifier[add_data_source] ( identifier[f] , identifier[s_name] , identifier[section] = literal[string] )
identifier[self] . identifier[bcftools_stats] [ identifier[s_name] ]= identifier[dict] ()
identifier[self] . identifier[bcftools_stats_indels] [ identifier[s_name] ]= identifier[dict] ()
identifier[self] . identifier[bcftools_stats_vqc_snp] [ identifier[s_name] ]= identifier[dict] ()
identifier[self] . identifier[bcftools_stats_vqc_transi] [ identifier[s_name] ]= identifier[dict] ()
identifier[self] . identifier[bcftools_stats_vqc_transv] [ identifier[s_name] ]= identifier[dict] ()
identifier[self] . identifier[bcftools_stats_vqc_indels] [ identifier[s_name] ]= identifier[dict] ()
identifier[depth_data] [ identifier[s_name] ]= identifier[OrderedDict] ()
identifier[self] . identifier[bcftools_stats_indels] [ identifier[s_name] ][ literal[int] ]= keyword[None]
keyword[if] identifier[s] [ literal[int] ]== literal[string] keyword[and] identifier[len] ( identifier[s_names] )> literal[int] :
identifier[s_name] = identifier[s_names] [ identifier[int] ( identifier[s] [ literal[int] ])]
identifier[field] = identifier[s] [ literal[int] ]. identifier[strip] ()[:- literal[int] ]
identifier[field] = identifier[field] . identifier[replace] ( literal[string] , literal[string] )
identifier[value] = identifier[float] ( identifier[s] [ literal[int] ]. identifier[strip] ())
identifier[self] . identifier[bcftools_stats] [ identifier[s_name] ][ identifier[field] ]= identifier[value]
keyword[if] identifier[s] [ literal[int] ]== literal[string] keyword[and] identifier[len] ( identifier[s_names] )> literal[int] :
identifier[s_name] = identifier[s_names] [ identifier[int] ( identifier[s] [ literal[int] ])]
identifier[fields] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[for] identifier[i] , identifier[f] keyword[in] identifier[enumerate] ( identifier[fields] ):
identifier[value] = identifier[float] ( identifier[s] [ identifier[i] + literal[int] ]. identifier[strip] ())
identifier[self] . identifier[bcftools_stats] [ identifier[s_name] ][ identifier[f] ]= identifier[value]
keyword[if] identifier[s] [ literal[int] ]== literal[string] keyword[and] identifier[len] ( identifier[s_names] )> literal[int] :
identifier[s_name] = identifier[s_names] [ identifier[int] ( identifier[s] [ literal[int] ])]
identifier[rc] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] }
identifier[change] = identifier[s] [ literal[int] ]. identifier[strip] ()
keyword[if] identifier[change] keyword[not] keyword[in] identifier[types] :
identifier[change] = literal[string] . identifier[join] ( identifier[rc] [ identifier[n] ] keyword[for] identifier[n] keyword[in] identifier[change] . identifier[split] ( literal[string] ))
identifier[field] = literal[string] . identifier[format] ( identifier[change] )
identifier[value] = identifier[float] ( identifier[s] [ literal[int] ]. identifier[strip] ())
keyword[if] identifier[field] keyword[not] keyword[in] identifier[self] . identifier[bcftools_stats] [ identifier[s_name] ]:
identifier[self] . identifier[bcftools_stats] [ identifier[s_name] ][ identifier[field] ]= literal[int]
identifier[self] . identifier[bcftools_stats] [ identifier[s_name] ][ identifier[field] ]+= identifier[value]
keyword[if] identifier[s] [ literal[int] ]== literal[string] keyword[and] identifier[len] ( identifier[s_names] )> literal[int] :
identifier[s_name] = identifier[s_names] [ identifier[int] ( identifier[s] [ literal[int] ])]
identifier[length] = identifier[float] ( identifier[s] [ literal[int] ]. identifier[strip] ())
identifier[count] = identifier[float] ( identifier[s] [ literal[int] ]. identifier[strip] ())
identifier[self] . identifier[bcftools_stats_indels] [ identifier[s_name] ][ identifier[length] ]= identifier[count]
keyword[if] identifier[s] [ literal[int] ]== literal[string] keyword[and] identifier[len] ( identifier[s_names] )> literal[int] :
identifier[s_name] = identifier[s_names] [ identifier[int] ( identifier[s] [ literal[int] ])]
identifier[fields] =[ literal[string] , literal[string] ]
keyword[for] identifier[i] , identifier[f] keyword[in] identifier[enumerate] ( identifier[fields] ):
identifier[self] . identifier[bcftools_stats] [ identifier[s_name] ][ identifier[f] ]= identifier[int] ( identifier[s] [ identifier[i] + literal[int] ]. identifier[strip] ())
keyword[if] identifier[s] [ literal[int] ]== literal[string] keyword[and] identifier[len] ( identifier[s_names] )> literal[int] :
identifier[s_name] = identifier[s_names] [ identifier[int] ( identifier[s] [ literal[int] ])]
identifier[bin_name] = identifier[s] [ literal[int] ]. identifier[strip] ()
identifier[percent_sites] = identifier[float] ( identifier[s] [- literal[int] ]. identifier[strip] ())
identifier[depth_data] [ identifier[s_name] ][ identifier[bin_name] ]= identifier[percent_sites]
keyword[if] identifier[s] [ literal[int] ]== literal[string] keyword[and] identifier[len] ( identifier[s_names] )> literal[int] :
identifier[s_name] = identifier[s_names] [ identifier[int] ( identifier[s] [ literal[int] ])]
identifier[quality] = identifier[float] ( identifier[s] [ literal[int] ]. identifier[strip] ())
identifier[self] . identifier[bcftools_stats_vqc_snp] [ identifier[s_name] ][ identifier[quality] ]= identifier[float] ( identifier[s] [ literal[int] ]. identifier[strip] ())
identifier[self] . identifier[bcftools_stats_vqc_transi] [ identifier[s_name] ][ identifier[quality] ]= identifier[float] ( identifier[s] [ literal[int] ]. identifier[strip] ())
identifier[self] . identifier[bcftools_stats_vqc_transv] [ identifier[s_name] ][ identifier[quality] ]= identifier[float] ( identifier[s] [ literal[int] ]. identifier[strip] ())
identifier[self] . identifier[bcftools_stats_vqc_indels] [ identifier[s_name] ][ identifier[quality] ]= identifier[float] ( identifier[s] [ literal[int] ]. identifier[strip] ())
identifier[self] . identifier[bcftools_stats] = identifier[self] . identifier[ignore_samples] ( identifier[self] . identifier[bcftools_stats] )
keyword[if] identifier[len] ( identifier[self] . identifier[bcftools_stats] )> literal[int] :
identifier[self] . identifier[write_data_file] ( identifier[self] . identifier[bcftools_stats] , literal[string] )
identifier[stats_headers] = identifier[self] . identifier[bcftools_stats_genstats_headers] ()
keyword[if] identifier[getattr] ( identifier[config] , literal[string] ,{}). identifier[get] ( literal[string] , keyword[True] ):
identifier[self] . identifier[general_stats_addcols] ( identifier[self] . identifier[bcftools_stats] , identifier[stats_headers] , literal[string] )
keyword[if] identifier[getattr] ( identifier[config] , literal[string] ,{}). identifier[get] ( literal[string] , keyword[False] ):
identifier[self] . identifier[add_section] (
identifier[name] = literal[string] ,
identifier[anchor] = literal[string] ,
identifier[plot] = identifier[table] . identifier[plot] ( identifier[self] . identifier[bcftools_stats] , identifier[stats_headers] ))
identifier[keys] = identifier[OrderedDict] ()
keyword[for] identifier[t] keyword[in] identifier[types] :
identifier[keys] [ literal[string] . identifier[format] ( identifier[t] )]={ literal[string] : identifier[t] }
identifier[pconfig] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string]
}
identifier[self] . identifier[add_section] (
identifier[name] = literal[string] ,
identifier[anchor] = literal[string] ,
identifier[plot] = identifier[bargraph] . identifier[plot] ( identifier[self] . identifier[bcftools_stats] , identifier[keys] , identifier[pconfig] )
)
keyword[if] identifier[len] ( identifier[self] . identifier[bcftools_stats_vqc_snp] )> literal[int] :
identifier[pconfig] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : keyword[False] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] :[
{ literal[string] : literal[string] , literal[string] : literal[string] },
{ literal[string] : literal[string] , literal[string] : literal[string] },
{ literal[string] : literal[string] , literal[string] : literal[string] },
{ literal[string] : literal[string] , literal[string] : literal[string] }
]
}
identifier[self] . identifier[add_section] (
identifier[name] = literal[string] ,
identifier[anchor] = literal[string] ,
identifier[plot] = identifier[linegraph] . identifier[plot] (
[ identifier[self] . identifier[bcftools_stats_vqc_snp] ,
identifier[self] . identifier[bcftools_stats_vqc_transi] ,
identifier[self] . identifier[bcftools_stats_vqc_transv] ,
identifier[self] . identifier[bcftools_stats_vqc_indels] ], identifier[pconfig] )
)
keyword[if] identifier[len] ( identifier[self] . identifier[bcftools_stats_indels] )> literal[int] :
identifier[pconfig] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : keyword[False] ,
literal[string] : literal[int] ,
}
identifier[self] . identifier[add_section] (
identifier[name] = literal[string] ,
identifier[anchor] = literal[string] ,
identifier[plot] = identifier[linegraph] . identifier[plot] ( identifier[self] . identifier[bcftools_stats_indels] , identifier[pconfig] )
)
keyword[if] identifier[len] ( identifier[depth_data] )> literal[int] :
identifier[pconfig] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : keyword[True]
}
identifier[self] . identifier[add_section] (
identifier[name] = literal[string] ,
identifier[anchor] = literal[string] ,
identifier[description] = literal[string] ,
identifier[plot] = identifier[linegraph] . identifier[plot] ( identifier[depth_data] , identifier[pconfig] )
)
keyword[return] identifier[len] ( identifier[self] . identifier[bcftools_stats] )
|
def parse_bcftools_stats(self):
"""
Find bcftools stats logs and parse their data
Bcftools stats reports contain 'sets' of data, which can
have multiple vcf files each (but usually don't). Here,
we treat each 'set' as a MultiQC sample, taking the first
input filename for each set as the name.
"""
collapse_complementary = getattr(config, 'bcftools', {}).get('collapse_complementary_changes', False)
if collapse_complementary:
types = ['A>C', 'A>G', 'A>T', 'C>A', 'C>G', 'C>T'] # depends on [control=['if'], data=[]]
else:
types = ['A>C', 'A>G', 'A>T', 'C>A', 'C>G', 'C>T', 'G>A', 'G>C', 'G>T', 'T>A', 'T>C', 'T>G']
self.bcftools_stats = dict()
self.bcftools_stats_indels = dict()
self.bcftools_stats_vqc_snp = dict()
self.bcftools_stats_vqc_transi = dict()
self.bcftools_stats_vqc_transv = dict()
self.bcftools_stats_vqc_indels = dict()
depth_data = dict()
for f in self.find_log_files('bcftools/stats'):
s_names = list()
for line in f['f'].splitlines():
s = line.split('\t')
# Get the sample names - one per 'set'
if s[0] == 'ID':
s_name = self.clean_s_name(s[2], f['root'])
s_names.append(s_name)
if s_name in self.bcftools_stats:
log.debug('Duplicate sample name found! Overwriting: {}'.format(s_name)) # depends on [control=['if'], data=['s_name']]
self.add_data_source(f, s_name, section='stats')
self.bcftools_stats[s_name] = dict()
self.bcftools_stats_indels[s_name] = dict()
self.bcftools_stats_vqc_snp[s_name] = dict()
self.bcftools_stats_vqc_transi[s_name] = dict()
self.bcftools_stats_vqc_transv[s_name] = dict()
self.bcftools_stats_vqc_indels[s_name] = dict()
depth_data[s_name] = OrderedDict()
self.bcftools_stats_indels[s_name][0] = None # Avoid joining line across missing 0 # depends on [control=['if'], data=[]]
# Parse key stats
if s[0] == 'SN' and len(s_names) > 0:
s_name = s_names[int(s[1])]
field = s[2].strip()[:-1]
field = field.replace(' ', '_')
value = float(s[3].strip())
self.bcftools_stats[s_name][field] = value # depends on [control=['if'], data=[]]
# Parse transitions/transversions stats
if s[0] == 'TSTV' and len(s_names) > 0:
s_name = s_names[int(s[1])]
fields = ['ts', 'tv', 'tstv', 'ts_1st_ALT', 'tv_1st_ALT', 'tstv_1st_ALT']
for (i, f) in enumerate(fields):
value = float(s[i + 2].strip())
self.bcftools_stats[s_name][f] = value # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
# Parse substitution types
if s[0] == 'ST' and len(s_names) > 0:
s_name = s_names[int(s[1])]
rc = {'A': 'T', 'C': 'G', 'G': 'C', 'T': 'A'}
change = s[2].strip()
if change not in types:
change = '>'.join((rc[n] for n in change.split('>'))) # depends on [control=['if'], data=['change']]
field = 'substitution_type_{}'.format(change)
value = float(s[3].strip())
if field not in self.bcftools_stats[s_name]:
self.bcftools_stats[s_name][field] = 0 # depends on [control=['if'], data=['field']]
self.bcftools_stats[s_name][field] += value # depends on [control=['if'], data=[]]
# Indel length distributions
if s[0] == 'IDD' and len(s_names) > 0:
s_name = s_names[int(s[1])]
length = float(s[2].strip())
count = float(s[3].strip())
self.bcftools_stats_indels[s_name][length] = count # depends on [control=['if'], data=[]]
# Per-sample counts
if s[0] == 'PSC' and len(s_names) > 0:
s_name = s_names[int(s[1])]
fields = ['variations_hom', 'variations_het']
for (i, f) in enumerate(fields):
self.bcftools_stats[s_name][f] = int(s[i + 4].strip()) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
# Depth plots
if s[0] == 'DP' and len(s_names) > 0:
s_name = s_names[int(s[1])]
bin_name = s[2].strip()
percent_sites = float(s[-1].strip())
depth_data[s_name][bin_name] = percent_sites # depends on [control=['if'], data=[]]
# Variant Qualities
if s[0] == 'QUAL' and len(s_names) > 0:
s_name = s_names[int(s[1])]
quality = float(s[2].strip())
self.bcftools_stats_vqc_snp[s_name][quality] = float(s[3].strip())
self.bcftools_stats_vqc_transi[s_name][quality] = float(s[4].strip())
self.bcftools_stats_vqc_transv[s_name][quality] = float(s[5].strip())
self.bcftools_stats_vqc_indels[s_name][quality] = float(s[6].strip()) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] # depends on [control=['for'], data=['f']]
# Filter to strip out ignored sample names
self.bcftools_stats = self.ignore_samples(self.bcftools_stats)
if len(self.bcftools_stats) > 0:
# Write parsed report data to a file
self.write_data_file(self.bcftools_stats, 'multiqc_bcftools_stats')
# Stats Table
stats_headers = self.bcftools_stats_genstats_headers()
if getattr(config, 'bcftools', {}).get('write_general_stats', True):
self.general_stats_addcols(self.bcftools_stats, stats_headers, 'Bcftools Stats') # depends on [control=['if'], data=[]]
if getattr(config, 'bcftools', {}).get('write_separate_table', False):
self.add_section(name='Bcftools Stats', anchor='bcftools-stats', plot=table.plot(self.bcftools_stats, stats_headers)) # depends on [control=['if'], data=[]]
# Make bargraph plot of substitution types
keys = OrderedDict()
for t in types:
keys['substitution_type_{}'.format(t)] = {'name': t} # depends on [control=['for'], data=['t']]
pconfig = {'id': 'bcftools-stats-subtypes', 'title': 'Bcftools Stats: Substitutions', 'ylab': '# Substitutions', 'cpswitch_counts_label': 'Number of Substitutions'}
self.add_section(name='Variant Substitution Types', anchor='bcftools-stats', plot=bargraph.plot(self.bcftools_stats, keys, pconfig))
# Make histograms of variant quality
if len(self.bcftools_stats_vqc_snp) > 0:
# 'tt_label': '<b>{point.x} bp trimmed</b>: {point.y:.0f}',
pconfig = {'id': 'bcftools_stats_vqc', 'title': 'Bcftools Stats: Variant Quality Count', 'ylab': 'Count', 'xlab': 'Quality', 'xDecimals': False, 'ymin': 0, 'smooth_points': 600, 'data_labels': [{'name': 'Count SNP', 'ylab': 'Quality'}, {'name': 'Count Transitions', 'ylab': 'Quality'}, {'name': 'Count Transversions', 'ylab': 'Quality'}, {'name': 'Count Indels', 'ylab': 'Quality'}]}
self.add_section(name='Variant Quality', anchor='bcftools-stats_variant_quality_plot', plot=linegraph.plot([self.bcftools_stats_vqc_snp, self.bcftools_stats_vqc_transi, self.bcftools_stats_vqc_transv, self.bcftools_stats_vqc_indels], pconfig)) # depends on [control=['if'], data=[]]
# Make line graph of indel lengths
if len(self.bcftools_stats_indels) > 0:
pconfig = {'id': 'bcftools_stats_indel-lengths', 'title': 'Bcftools Stats: Indel Distribution', 'ylab': 'Count', 'xlab': 'InDel Length (bp)', 'xDecimals': False, 'ymin': 0}
self.add_section(name='Indel Distribution', anchor='bcftools-stats_indel_plot', plot=linegraph.plot(self.bcftools_stats_indels, pconfig)) # depends on [control=['if'], data=[]]
# Make line graph of variants per depth
if len(depth_data) > 0:
pconfig = {'id': 'bcftools_stats_depth', 'title': 'Bcftools Stats: Variant depths', 'ylab': 'Fraction of sites (%)', 'xlab': 'Variant depth', 'ymin': 0, 'ymax': 100, 'categories': True}
self.add_section(name='Variant depths', anchor='bcftools-stats_depth_plot', description='Read depth support distribution for called variants', plot=linegraph.plot(depth_data, pconfig)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Return the number of logs that were found
return len(self.bcftools_stats)
|
def to_mono(y):
'''Force an audio signal down to mono.
Parameters
----------
y : np.ndarray [shape=(2,n) or shape=(n,)]
audio time series, either stereo or mono
Returns
-------
y_mono : np.ndarray [shape=(n,)]
`y` as a monophonic time-series
Notes
-----
This function caches at level 20.
Examples
--------
>>> y, sr = librosa.load(librosa.util.example_audio_file(), mono=False)
>>> y.shape
(2, 1355168)
>>> y_mono = librosa.to_mono(y)
>>> y_mono.shape
(1355168,)
'''
# Validate the buffer. Stereo is ok here.
util.valid_audio(y, mono=False)
if y.ndim > 1:
y = np.mean(y, axis=0)
return y
|
def function[to_mono, parameter[y]]:
constant[Force an audio signal down to mono.
Parameters
----------
y : np.ndarray [shape=(2,n) or shape=(n,)]
audio time series, either stereo or mono
Returns
-------
y_mono : np.ndarray [shape=(n,)]
`y` as a monophonic time-series
Notes
-----
This function caches at level 20.
Examples
--------
>>> y, sr = librosa.load(librosa.util.example_audio_file(), mono=False)
>>> y.shape
(2, 1355168)
>>> y_mono = librosa.to_mono(y)
>>> y_mono.shape
(1355168,)
]
call[name[util].valid_audio, parameter[name[y]]]
if compare[name[y].ndim greater[>] constant[1]] begin[:]
variable[y] assign[=] call[name[np].mean, parameter[name[y]]]
return[name[y]]
|
keyword[def] identifier[to_mono] ( identifier[y] ):
literal[string]
identifier[util] . identifier[valid_audio] ( identifier[y] , identifier[mono] = keyword[False] )
keyword[if] identifier[y] . identifier[ndim] > literal[int] :
identifier[y] = identifier[np] . identifier[mean] ( identifier[y] , identifier[axis] = literal[int] )
keyword[return] identifier[y]
|
def to_mono(y):
"""Force an audio signal down to mono.
Parameters
----------
y : np.ndarray [shape=(2,n) or shape=(n,)]
audio time series, either stereo or mono
Returns
-------
y_mono : np.ndarray [shape=(n,)]
`y` as a monophonic time-series
Notes
-----
This function caches at level 20.
Examples
--------
>>> y, sr = librosa.load(librosa.util.example_audio_file(), mono=False)
>>> y.shape
(2, 1355168)
>>> y_mono = librosa.to_mono(y)
>>> y_mono.shape
(1355168,)
"""
# Validate the buffer. Stereo is ok here.
util.valid_audio(y, mono=False)
if y.ndim > 1:
y = np.mean(y, axis=0) # depends on [control=['if'], data=[]]
return y
|
def _create_socket(self):
"""Creates ssl socket, connects to stream api and
sets timeout.
"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s = ssl.wrap_socket(s)
s.connect((self.host, self.__port))
s.settimeout(self.timeout)
return s
|
def function[_create_socket, parameter[self]]:
constant[Creates ssl socket, connects to stream api and
sets timeout.
]
variable[s] assign[=] call[name[socket].socket, parameter[name[socket].AF_INET, name[socket].SOCK_STREAM]]
variable[s] assign[=] call[name[ssl].wrap_socket, parameter[name[s]]]
call[name[s].connect, parameter[tuple[[<ast.Attribute object at 0x7da204347a60>, <ast.Attribute object at 0x7da204346170>]]]]
call[name[s].settimeout, parameter[name[self].timeout]]
return[name[s]]
|
keyword[def] identifier[_create_socket] ( identifier[self] ):
literal[string]
identifier[s] = identifier[socket] . identifier[socket] ( identifier[socket] . identifier[AF_INET] , identifier[socket] . identifier[SOCK_STREAM] )
identifier[s] = identifier[ssl] . identifier[wrap_socket] ( identifier[s] )
identifier[s] . identifier[connect] (( identifier[self] . identifier[host] , identifier[self] . identifier[__port] ))
identifier[s] . identifier[settimeout] ( identifier[self] . identifier[timeout] )
keyword[return] identifier[s]
|
def _create_socket(self):
"""Creates ssl socket, connects to stream api and
sets timeout.
"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s = ssl.wrap_socket(s)
s.connect((self.host, self.__port))
s.settimeout(self.timeout)
return s
|
def user_info(self, username):
"""
Get info of a specific user.
:param username: the username of the user to get info about
:return:
"""
request_url = "{}/api/0/user/{}".format(self.instance, username)
return_value = self._call_api(request_url)
return return_value
|
def function[user_info, parameter[self, username]]:
constant[
Get info of a specific user.
:param username: the username of the user to get info about
:return:
]
variable[request_url] assign[=] call[constant[{}/api/0/user/{}].format, parameter[name[self].instance, name[username]]]
variable[return_value] assign[=] call[name[self]._call_api, parameter[name[request_url]]]
return[name[return_value]]
|
keyword[def] identifier[user_info] ( identifier[self] , identifier[username] ):
literal[string]
identifier[request_url] = literal[string] . identifier[format] ( identifier[self] . identifier[instance] , identifier[username] )
identifier[return_value] = identifier[self] . identifier[_call_api] ( identifier[request_url] )
keyword[return] identifier[return_value]
|
def user_info(self, username):
"""
Get info of a specific user.
:param username: the username of the user to get info about
:return:
"""
request_url = '{}/api/0/user/{}'.format(self.instance, username)
return_value = self._call_api(request_url)
return return_value
|
def article(request, slug):
"""
The main view of the Django-CMS Articles! Takes a request and a slug,
renders the article.
"""
# Get current CMS Page as article tree
tree = request.current_page.get_public_object()
# Check whether it really is a tree.
# It could also be one of its sub-pages.
if tree.application_urls != 'CMSArticlesApp':
# In such case show regular CMS Page
return page(request, slug)
# Get an Article object from the request
draft = use_draft(request) and request.user.has_perm('cms_articles.change_article')
preview = 'preview' in request.GET and request.user.has_perm('cms_articles.change_article')
site = tree.node.site
article = get_article_from_slug(tree, slug, preview, draft)
if not article:
# raise 404
_handle_no_page(request)
request.current_article = article
if hasattr(request, 'user') and request.user.is_staff:
user_languages = get_language_list(site_id=site.pk)
else:
user_languages = get_public_languages(site_id=site.pk)
request_language = get_language_from_request(request, check_path=True)
# get_published_languages will return all languages in draft mode
# and published only in live mode.
# These languages are then filtered out by the user allowed languages
available_languages = [
language for language in user_languages
if language in list(article.get_published_languages())
]
own_urls = [
request.build_absolute_uri(request.path),
'/%s' % request.path,
request.path,
]
try:
redirect_on_fallback = get_redirect_on_fallback(request_language, site_id=site.pk)
except LanguageError:
redirect_on_fallback = False
if request_language not in user_languages:
# Language is not allowed
# Use the default site language
default_language = get_default_language_for_site(site.pk)
fallbacks = get_fallback_languages(default_language, site_id=site.pk)
fallbacks = [default_language] + fallbacks
else:
fallbacks = get_fallback_languages(request_language, site_id=site.pk)
# Only fallback to languages the user is allowed to see
fallback_languages = [
language for language in fallbacks
if language != request_language and language in available_languages
]
language_is_unavailable = request_language not in available_languages
if language_is_unavailable and not fallback_languages:
# There is no page with the requested language
# and there's no configured fallbacks
return _handle_no_page(request)
elif language_is_unavailable and redirect_on_fallback:
# There is no page with the requested language and
# the user has explicitly requested to redirect on fallbacks,
# so redirect to the first configured / available fallback language
fallback = fallback_languages[0]
redirect_url = article.get_absolute_url(fallback, fallback=False)
else:
redirect_url = False
if redirect_url:
if request.user.is_staff and hasattr(request, 'toolbar') and request.toolbar.edit_mode_active:
request.toolbar.redirect_url = redirect_url
elif redirect_url not in own_urls:
# prevent redirect to self
return HttpResponseRedirect(redirect_url)
# permission checks
if article.login_required and not request.user.is_authenticated():
return redirect_to_login(urlquote(request.get_full_path()), settings.LOGIN_URL)
if hasattr(request, 'toolbar'):
request.toolbar.obj = article
structure_requested = get_cms_setting('CMS_TOOLBAR_URL__BUILD') in request.GET
if article.has_change_permission(request) and structure_requested:
return render_object_structure(request, article)
return render_article(request, article, current_language=request_language, slug=slug)
|
def function[article, parameter[request, slug]]:
constant[
The main view of the Django-CMS Articles! Takes a request and a slug,
renders the article.
]
variable[tree] assign[=] call[name[request].current_page.get_public_object, parameter[]]
if compare[name[tree].application_urls not_equal[!=] constant[CMSArticlesApp]] begin[:]
return[call[name[page], parameter[name[request], name[slug]]]]
variable[draft] assign[=] <ast.BoolOp object at 0x7da2041d8250>
variable[preview] assign[=] <ast.BoolOp object at 0x7da2041db910>
variable[site] assign[=] name[tree].node.site
variable[article] assign[=] call[name[get_article_from_slug], parameter[name[tree], name[slug], name[preview], name[draft]]]
if <ast.UnaryOp object at 0x7da2041d8a60> begin[:]
call[name[_handle_no_page], parameter[name[request]]]
name[request].current_article assign[=] name[article]
if <ast.BoolOp object at 0x7da2041da050> begin[:]
variable[user_languages] assign[=] call[name[get_language_list], parameter[]]
variable[request_language] assign[=] call[name[get_language_from_request], parameter[name[request]]]
variable[available_languages] assign[=] <ast.ListComp object at 0x7da2041db490>
variable[own_urls] assign[=] list[[<ast.Call object at 0x7da2041dbcd0>, <ast.BinOp object at 0x7da2041db550>, <ast.Attribute object at 0x7da2041db850>]]
<ast.Try object at 0x7da2041d9420>
if compare[name[request_language] <ast.NotIn object at 0x7da2590d7190> name[user_languages]] begin[:]
variable[default_language] assign[=] call[name[get_default_language_for_site], parameter[name[site].pk]]
variable[fallbacks] assign[=] call[name[get_fallback_languages], parameter[name[default_language]]]
variable[fallbacks] assign[=] binary_operation[list[[<ast.Name object at 0x7da2041d8190>]] + name[fallbacks]]
variable[fallback_languages] assign[=] <ast.ListComp object at 0x7da2041da3e0>
variable[language_is_unavailable] assign[=] compare[name[request_language] <ast.NotIn object at 0x7da2590d7190> name[available_languages]]
if <ast.BoolOp object at 0x7da2041da230> begin[:]
return[call[name[_handle_no_page], parameter[name[request]]]]
if name[redirect_url] begin[:]
if <ast.BoolOp object at 0x7da20c991ab0> begin[:]
name[request].toolbar.redirect_url assign[=] name[redirect_url]
if <ast.BoolOp object at 0x7da20c990cd0> begin[:]
return[call[name[redirect_to_login], parameter[call[name[urlquote], parameter[call[name[request].get_full_path, parameter[]]]], name[settings].LOGIN_URL]]]
if call[name[hasattr], parameter[name[request], constant[toolbar]]] begin[:]
name[request].toolbar.obj assign[=] name[article]
variable[structure_requested] assign[=] compare[call[name[get_cms_setting], parameter[constant[CMS_TOOLBAR_URL__BUILD]]] in name[request].GET]
if <ast.BoolOp object at 0x7da20c992b30> begin[:]
return[call[name[render_object_structure], parameter[name[request], name[article]]]]
return[call[name[render_article], parameter[name[request], name[article]]]]
|
keyword[def] identifier[article] ( identifier[request] , identifier[slug] ):
literal[string]
identifier[tree] = identifier[request] . identifier[current_page] . identifier[get_public_object] ()
keyword[if] identifier[tree] . identifier[application_urls] != literal[string] :
keyword[return] identifier[page] ( identifier[request] , identifier[slug] )
identifier[draft] = identifier[use_draft] ( identifier[request] ) keyword[and] identifier[request] . identifier[user] . identifier[has_perm] ( literal[string] )
identifier[preview] = literal[string] keyword[in] identifier[request] . identifier[GET] keyword[and] identifier[request] . identifier[user] . identifier[has_perm] ( literal[string] )
identifier[site] = identifier[tree] . identifier[node] . identifier[site]
identifier[article] = identifier[get_article_from_slug] ( identifier[tree] , identifier[slug] , identifier[preview] , identifier[draft] )
keyword[if] keyword[not] identifier[article] :
identifier[_handle_no_page] ( identifier[request] )
identifier[request] . identifier[current_article] = identifier[article]
keyword[if] identifier[hasattr] ( identifier[request] , literal[string] ) keyword[and] identifier[request] . identifier[user] . identifier[is_staff] :
identifier[user_languages] = identifier[get_language_list] ( identifier[site_id] = identifier[site] . identifier[pk] )
keyword[else] :
identifier[user_languages] = identifier[get_public_languages] ( identifier[site_id] = identifier[site] . identifier[pk] )
identifier[request_language] = identifier[get_language_from_request] ( identifier[request] , identifier[check_path] = keyword[True] )
identifier[available_languages] =[
identifier[language] keyword[for] identifier[language] keyword[in] identifier[user_languages]
keyword[if] identifier[language] keyword[in] identifier[list] ( identifier[article] . identifier[get_published_languages] ())
]
identifier[own_urls] =[
identifier[request] . identifier[build_absolute_uri] ( identifier[request] . identifier[path] ),
literal[string] % identifier[request] . identifier[path] ,
identifier[request] . identifier[path] ,
]
keyword[try] :
identifier[redirect_on_fallback] = identifier[get_redirect_on_fallback] ( identifier[request_language] , identifier[site_id] = identifier[site] . identifier[pk] )
keyword[except] identifier[LanguageError] :
identifier[redirect_on_fallback] = keyword[False]
keyword[if] identifier[request_language] keyword[not] keyword[in] identifier[user_languages] :
identifier[default_language] = identifier[get_default_language_for_site] ( identifier[site] . identifier[pk] )
identifier[fallbacks] = identifier[get_fallback_languages] ( identifier[default_language] , identifier[site_id] = identifier[site] . identifier[pk] )
identifier[fallbacks] =[ identifier[default_language] ]+ identifier[fallbacks]
keyword[else] :
identifier[fallbacks] = identifier[get_fallback_languages] ( identifier[request_language] , identifier[site_id] = identifier[site] . identifier[pk] )
identifier[fallback_languages] =[
identifier[language] keyword[for] identifier[language] keyword[in] identifier[fallbacks]
keyword[if] identifier[language] != identifier[request_language] keyword[and] identifier[language] keyword[in] identifier[available_languages]
]
identifier[language_is_unavailable] = identifier[request_language] keyword[not] keyword[in] identifier[available_languages]
keyword[if] identifier[language_is_unavailable] keyword[and] keyword[not] identifier[fallback_languages] :
keyword[return] identifier[_handle_no_page] ( identifier[request] )
keyword[elif] identifier[language_is_unavailable] keyword[and] identifier[redirect_on_fallback] :
identifier[fallback] = identifier[fallback_languages] [ literal[int] ]
identifier[redirect_url] = identifier[article] . identifier[get_absolute_url] ( identifier[fallback] , identifier[fallback] = keyword[False] )
keyword[else] :
identifier[redirect_url] = keyword[False]
keyword[if] identifier[redirect_url] :
keyword[if] identifier[request] . identifier[user] . identifier[is_staff] keyword[and] identifier[hasattr] ( identifier[request] , literal[string] ) keyword[and] identifier[request] . identifier[toolbar] . identifier[edit_mode_active] :
identifier[request] . identifier[toolbar] . identifier[redirect_url] = identifier[redirect_url]
keyword[elif] identifier[redirect_url] keyword[not] keyword[in] identifier[own_urls] :
keyword[return] identifier[HttpResponseRedirect] ( identifier[redirect_url] )
keyword[if] identifier[article] . identifier[login_required] keyword[and] keyword[not] identifier[request] . identifier[user] . identifier[is_authenticated] ():
keyword[return] identifier[redirect_to_login] ( identifier[urlquote] ( identifier[request] . identifier[get_full_path] ()), identifier[settings] . identifier[LOGIN_URL] )
keyword[if] identifier[hasattr] ( identifier[request] , literal[string] ):
identifier[request] . identifier[toolbar] . identifier[obj] = identifier[article]
identifier[structure_requested] = identifier[get_cms_setting] ( literal[string] ) keyword[in] identifier[request] . identifier[GET]
keyword[if] identifier[article] . identifier[has_change_permission] ( identifier[request] ) keyword[and] identifier[structure_requested] :
keyword[return] identifier[render_object_structure] ( identifier[request] , identifier[article] )
keyword[return] identifier[render_article] ( identifier[request] , identifier[article] , identifier[current_language] = identifier[request_language] , identifier[slug] = identifier[slug] )
|
def article(request, slug):
"""
The main view of the Django-CMS Articles! Takes a request and a slug,
renders the article.
"""
# Get current CMS Page as article tree
tree = request.current_page.get_public_object()
# Check whether it really is a tree.
# It could also be one of its sub-pages.
if tree.application_urls != 'CMSArticlesApp':
# In such case show regular CMS Page
return page(request, slug) # depends on [control=['if'], data=[]]
# Get an Article object from the request
draft = use_draft(request) and request.user.has_perm('cms_articles.change_article')
preview = 'preview' in request.GET and request.user.has_perm('cms_articles.change_article')
site = tree.node.site
article = get_article_from_slug(tree, slug, preview, draft)
if not article:
# raise 404
_handle_no_page(request) # depends on [control=['if'], data=[]]
request.current_article = article
if hasattr(request, 'user') and request.user.is_staff:
user_languages = get_language_list(site_id=site.pk) # depends on [control=['if'], data=[]]
else:
user_languages = get_public_languages(site_id=site.pk)
request_language = get_language_from_request(request, check_path=True)
# get_published_languages will return all languages in draft mode
# and published only in live mode.
# These languages are then filtered out by the user allowed languages
available_languages = [language for language in user_languages if language in list(article.get_published_languages())]
own_urls = [request.build_absolute_uri(request.path), '/%s' % request.path, request.path]
try:
redirect_on_fallback = get_redirect_on_fallback(request_language, site_id=site.pk) # depends on [control=['try'], data=[]]
except LanguageError:
redirect_on_fallback = False # depends on [control=['except'], data=[]]
if request_language not in user_languages:
# Language is not allowed
# Use the default site language
default_language = get_default_language_for_site(site.pk)
fallbacks = get_fallback_languages(default_language, site_id=site.pk)
fallbacks = [default_language] + fallbacks # depends on [control=['if'], data=[]]
else:
fallbacks = get_fallback_languages(request_language, site_id=site.pk)
# Only fallback to languages the user is allowed to see
fallback_languages = [language for language in fallbacks if language != request_language and language in available_languages]
language_is_unavailable = request_language not in available_languages
if language_is_unavailable and (not fallback_languages):
# There is no page with the requested language
# and there's no configured fallbacks
return _handle_no_page(request) # depends on [control=['if'], data=[]]
elif language_is_unavailable and redirect_on_fallback:
# There is no page with the requested language and
# the user has explicitly requested to redirect on fallbacks,
# so redirect to the first configured / available fallback language
fallback = fallback_languages[0]
redirect_url = article.get_absolute_url(fallback, fallback=False) # depends on [control=['if'], data=[]]
else:
redirect_url = False
if redirect_url:
if request.user.is_staff and hasattr(request, 'toolbar') and request.toolbar.edit_mode_active:
request.toolbar.redirect_url = redirect_url # depends on [control=['if'], data=[]]
elif redirect_url not in own_urls:
# prevent redirect to self
return HttpResponseRedirect(redirect_url) # depends on [control=['if'], data=['redirect_url']] # depends on [control=['if'], data=[]]
# permission checks
if article.login_required and (not request.user.is_authenticated()):
return redirect_to_login(urlquote(request.get_full_path()), settings.LOGIN_URL) # depends on [control=['if'], data=[]]
if hasattr(request, 'toolbar'):
request.toolbar.obj = article # depends on [control=['if'], data=[]]
structure_requested = get_cms_setting('CMS_TOOLBAR_URL__BUILD') in request.GET
if article.has_change_permission(request) and structure_requested:
return render_object_structure(request, article) # depends on [control=['if'], data=[]]
return render_article(request, article, current_language=request_language, slug=slug)
|
def make_step_lcont (transition):
"""Return a ufunc-like step function that is left-continuous. Returns 1 if
x > transition, 0 otherwise.
"""
if not np.isfinite (transition):
raise ValueError ('"transition" argument must be finite number; got %r' % transition)
def step_lcont (x):
x = np.asarray (x)
x1 = np.atleast_1d (x)
r = (x1 > transition).astype (x.dtype)
if x.ndim == 0:
return np.asscalar (r)
return r
step_lcont.__doc__ = ('Left-continuous step function. Returns 1 if x > %g, '
'0 otherwise.') % (transition,)
return step_lcont
|
def function[make_step_lcont, parameter[transition]]:
constant[Return a ufunc-like step function that is left-continuous. Returns 1 if
x > transition, 0 otherwise.
]
if <ast.UnaryOp object at 0x7da2047e9c90> begin[:]
<ast.Raise object at 0x7da1b26b7be0>
def function[step_lcont, parameter[x]]:
variable[x] assign[=] call[name[np].asarray, parameter[name[x]]]
variable[x1] assign[=] call[name[np].atleast_1d, parameter[name[x]]]
variable[r] assign[=] call[compare[name[x1] greater[>] name[transition]].astype, parameter[name[x].dtype]]
if compare[name[x].ndim equal[==] constant[0]] begin[:]
return[call[name[np].asscalar, parameter[name[r]]]]
return[name[r]]
name[step_lcont].__doc__ assign[=] binary_operation[constant[Left-continuous step function. Returns 1 if x > %g, 0 otherwise.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b26b70d0>]]]
return[name[step_lcont]]
|
keyword[def] identifier[make_step_lcont] ( identifier[transition] ):
literal[string]
keyword[if] keyword[not] identifier[np] . identifier[isfinite] ( identifier[transition] ):
keyword[raise] identifier[ValueError] ( literal[string] % identifier[transition] )
keyword[def] identifier[step_lcont] ( identifier[x] ):
identifier[x] = identifier[np] . identifier[asarray] ( identifier[x] )
identifier[x1] = identifier[np] . identifier[atleast_1d] ( identifier[x] )
identifier[r] =( identifier[x1] > identifier[transition] ). identifier[astype] ( identifier[x] . identifier[dtype] )
keyword[if] identifier[x] . identifier[ndim] == literal[int] :
keyword[return] identifier[np] . identifier[asscalar] ( identifier[r] )
keyword[return] identifier[r]
identifier[step_lcont] . identifier[__doc__] =( literal[string]
literal[string] )%( identifier[transition] ,)
keyword[return] identifier[step_lcont]
|
def make_step_lcont(transition):
"""Return a ufunc-like step function that is left-continuous. Returns 1 if
x > transition, 0 otherwise.
"""
if not np.isfinite(transition):
raise ValueError('"transition" argument must be finite number; got %r' % transition) # depends on [control=['if'], data=[]]
def step_lcont(x):
x = np.asarray(x)
x1 = np.atleast_1d(x)
r = (x1 > transition).astype(x.dtype)
if x.ndim == 0:
return np.asscalar(r) # depends on [control=['if'], data=[]]
return r
step_lcont.__doc__ = 'Left-continuous step function. Returns 1 if x > %g, 0 otherwise.' % (transition,)
return step_lcont
|
def set_edge_label(self, edge, label):
"""
Set the label of an edge.
@type edge: edge
@param edge: One edge.
@type label: string
@param label: Edge label.
"""
self.set_edge_properties(edge, label=label )
if not self.DIRECTED:
self.set_edge_properties((edge[1], edge[0]) , label=label )
|
def function[set_edge_label, parameter[self, edge, label]]:
constant[
Set the label of an edge.
@type edge: edge
@param edge: One edge.
@type label: string
@param label: Edge label.
]
call[name[self].set_edge_properties, parameter[name[edge]]]
if <ast.UnaryOp object at 0x7da1b170e4a0> begin[:]
call[name[self].set_edge_properties, parameter[tuple[[<ast.Subscript object at 0x7da1b170f0a0>, <ast.Subscript object at 0x7da1b170c100>]]]]
|
keyword[def] identifier[set_edge_label] ( identifier[self] , identifier[edge] , identifier[label] ):
literal[string]
identifier[self] . identifier[set_edge_properties] ( identifier[edge] , identifier[label] = identifier[label] )
keyword[if] keyword[not] identifier[self] . identifier[DIRECTED] :
identifier[self] . identifier[set_edge_properties] (( identifier[edge] [ literal[int] ], identifier[edge] [ literal[int] ]), identifier[label] = identifier[label] )
|
def set_edge_label(self, edge, label):
"""
Set the label of an edge.
@type edge: edge
@param edge: One edge.
@type label: string
@param label: Edge label.
"""
self.set_edge_properties(edge, label=label)
if not self.DIRECTED:
self.set_edge_properties((edge[1], edge[0]), label=label) # depends on [control=['if'], data=[]]
|
async def change_presence(self, *, activity=None, status=None, afk=False, shard_id=None):
"""|coro|
Changes the client's presence.
The activity parameter is a :class:`Activity` object (not a string) that represents
the activity being done currently. This could also be the slimmed down versions,
:class:`Game` and :class:`Streaming`.
Example: ::
game = discord.Game("with the API")
await client.change_presence(status=discord.Status.idle, activity=game)
Parameters
----------
activity: Optional[Union[:class:`Game`, :class:`Streaming`, :class:`Activity`]]
The activity being done. ``None`` if no currently active activity is done.
status: Optional[:class:`Status`]
Indicates what status to change to. If None, then
:attr:`Status.online` is used.
afk: :class:`bool`
Indicates if you are going AFK. This allows the discord
client to know how to handle push notifications better
for you in case you are actually idle and not lying.
shard_id: Optional[:class:`int`]
The shard_id to change the presence to. If not specified
or ``None``, then it will change the presence of every
shard the bot can see.
Raises
------
InvalidArgument
If the ``activity`` parameter is not of proper type.
"""
if status is None:
status = 'online'
status_enum = Status.online
elif status is Status.offline:
status = 'invisible'
status_enum = Status.offline
else:
status_enum = status
status = str(status)
if shard_id is None:
for shard in self.shards.values():
await shard.ws.change_presence(activity=activity, status=status, afk=afk)
guilds = self._connection.guilds
else:
shard = self.shards[shard_id]
await shard.ws.change_presence(activity=activity, status=status, afk=afk)
guilds = [g for g in self._connection.guilds if g.shard_id == shard_id]
for guild in guilds:
me = guild.me
if me is None:
continue
me.activities = (activity,)
me.status = status_enum
|
<ast.AsyncFunctionDef object at 0x7da1b1f25f60>
|
keyword[async] keyword[def] identifier[change_presence] ( identifier[self] ,*, identifier[activity] = keyword[None] , identifier[status] = keyword[None] , identifier[afk] = keyword[False] , identifier[shard_id] = keyword[None] ):
literal[string]
keyword[if] identifier[status] keyword[is] keyword[None] :
identifier[status] = literal[string]
identifier[status_enum] = identifier[Status] . identifier[online]
keyword[elif] identifier[status] keyword[is] identifier[Status] . identifier[offline] :
identifier[status] = literal[string]
identifier[status_enum] = identifier[Status] . identifier[offline]
keyword[else] :
identifier[status_enum] = identifier[status]
identifier[status] = identifier[str] ( identifier[status] )
keyword[if] identifier[shard_id] keyword[is] keyword[None] :
keyword[for] identifier[shard] keyword[in] identifier[self] . identifier[shards] . identifier[values] ():
keyword[await] identifier[shard] . identifier[ws] . identifier[change_presence] ( identifier[activity] = identifier[activity] , identifier[status] = identifier[status] , identifier[afk] = identifier[afk] )
identifier[guilds] = identifier[self] . identifier[_connection] . identifier[guilds]
keyword[else] :
identifier[shard] = identifier[self] . identifier[shards] [ identifier[shard_id] ]
keyword[await] identifier[shard] . identifier[ws] . identifier[change_presence] ( identifier[activity] = identifier[activity] , identifier[status] = identifier[status] , identifier[afk] = identifier[afk] )
identifier[guilds] =[ identifier[g] keyword[for] identifier[g] keyword[in] identifier[self] . identifier[_connection] . identifier[guilds] keyword[if] identifier[g] . identifier[shard_id] == identifier[shard_id] ]
keyword[for] identifier[guild] keyword[in] identifier[guilds] :
identifier[me] = identifier[guild] . identifier[me]
keyword[if] identifier[me] keyword[is] keyword[None] :
keyword[continue]
identifier[me] . identifier[activities] =( identifier[activity] ,)
identifier[me] . identifier[status] = identifier[status_enum]
|
async def change_presence(self, *, activity=None, status=None, afk=False, shard_id=None):
"""|coro|
Changes the client's presence.
The activity parameter is a :class:`Activity` object (not a string) that represents
the activity being done currently. This could also be the slimmed down versions,
:class:`Game` and :class:`Streaming`.
Example: ::
game = discord.Game("with the API")
await client.change_presence(status=discord.Status.idle, activity=game)
Parameters
----------
activity: Optional[Union[:class:`Game`, :class:`Streaming`, :class:`Activity`]]
The activity being done. ``None`` if no currently active activity is done.
status: Optional[:class:`Status`]
Indicates what status to change to. If None, then
:attr:`Status.online` is used.
afk: :class:`bool`
Indicates if you are going AFK. This allows the discord
client to know how to handle push notifications better
for you in case you are actually idle and not lying.
shard_id: Optional[:class:`int`]
The shard_id to change the presence to. If not specified
or ``None``, then it will change the presence of every
shard the bot can see.
Raises
------
InvalidArgument
If the ``activity`` parameter is not of proper type.
"""
if status is None:
status = 'online'
status_enum = Status.online # depends on [control=['if'], data=['status']]
elif status is Status.offline:
status = 'invisible'
status_enum = Status.offline # depends on [control=['if'], data=['status']]
else:
status_enum = status
status = str(status)
if shard_id is None:
for shard in self.shards.values():
await shard.ws.change_presence(activity=activity, status=status, afk=afk) # depends on [control=['for'], data=['shard']]
guilds = self._connection.guilds # depends on [control=['if'], data=[]]
else:
shard = self.shards[shard_id]
await shard.ws.change_presence(activity=activity, status=status, afk=afk)
guilds = [g for g in self._connection.guilds if g.shard_id == shard_id]
for guild in guilds:
me = guild.me
if me is None:
continue # depends on [control=['if'], data=[]]
me.activities = (activity,)
me.status = status_enum # depends on [control=['for'], data=['guild']]
|
def create(self):
""" Override method for creating FormBaseNew form """
self.add_handlers({'^T': self.quit, '^Q': self.quit,
'^V': self.toggle_view})
self.add(npyscreen.TitleFixedText, name=self.action['title'], value='')
response = self.action['api_action'].inventory(choices=['repos',
'tools',
'images',
'built',
'running'])
if response[0]:
inventory = response[1]
if len(inventory['repos']) == 0:
value = 'No tools were found.\n'
else:
value = 'Tools for all groups found:\n'
tools = None
if inventory['tools']:
tools = inventory['tools']
for repo in inventory['repos']:
s_value = ''
repo_name = repo.rsplit('/', 2)[1:]
if len(repo_name) == 1:
repo_name = repo.split('/')
if tools:
p_value = '\n Plugin: ' + repo + '\n'
for tool in tools:
t_name = tool.split(':')
if (t_name[0] == repo_name[0] and
t_name[1] == repo_name[1]):
s_value += ' ' + tools[tool] + '\n Built: '
s_value += inventory['built'][tool] + '\n'
s_value += ' Image name: '
s_value += inventory['images'][tool] + '\n'
s_value += ' Status: '
s_value += inventory['running'][tool] + '\n'
if s_value:
value += p_value + s_value
else:
value = 'There was an issue with ' + self.action['name']
value += ' retrieval:\n' + str(response[1])
value += '\nPlease see vent.log for more details.'
self.all_tools = value.split('\n')
self.display_val = self.add(npyscreen.Pager, values=value.split('\n'))
|
def function[create, parameter[self]]:
constant[ Override method for creating FormBaseNew form ]
call[name[self].add_handlers, parameter[dictionary[[<ast.Constant object at 0x7da2047eb400>, <ast.Constant object at 0x7da2047e85e0>, <ast.Constant object at 0x7da2047ea1a0>], [<ast.Attribute object at 0x7da2047eb880>, <ast.Attribute object at 0x7da2047e92d0>, <ast.Attribute object at 0x7da2047e9ba0>]]]]
call[name[self].add, parameter[name[npyscreen].TitleFixedText]]
variable[response] assign[=] call[call[name[self].action][constant[api_action]].inventory, parameter[]]
if call[name[response]][constant[0]] begin[:]
variable[inventory] assign[=] call[name[response]][constant[1]]
if compare[call[name[len], parameter[call[name[inventory]][constant[repos]]]] equal[==] constant[0]] begin[:]
variable[value] assign[=] constant[No tools were found.
]
variable[tools] assign[=] constant[None]
if call[name[inventory]][constant[tools]] begin[:]
variable[tools] assign[=] call[name[inventory]][constant[tools]]
for taget[name[repo]] in starred[call[name[inventory]][constant[repos]]] begin[:]
variable[s_value] assign[=] constant[]
variable[repo_name] assign[=] call[call[name[repo].rsplit, parameter[constant[/], constant[2]]]][<ast.Slice object at 0x7da20e963580>]
if compare[call[name[len], parameter[name[repo_name]]] equal[==] constant[1]] begin[:]
variable[repo_name] assign[=] call[name[repo].split, parameter[constant[/]]]
if name[tools] begin[:]
variable[p_value] assign[=] binary_operation[binary_operation[constant[
Plugin: ] + name[repo]] + constant[
]]
for taget[name[tool]] in starred[name[tools]] begin[:]
variable[t_name] assign[=] call[name[tool].split, parameter[constant[:]]]
if <ast.BoolOp object at 0x7da20c7c96c0> begin[:]
<ast.AugAssign object at 0x7da2047e96c0>
<ast.AugAssign object at 0x7da2047e8fd0>
<ast.AugAssign object at 0x7da2047e9840>
<ast.AugAssign object at 0x7da2047ea650>
<ast.AugAssign object at 0x7da2047eafb0>
<ast.AugAssign object at 0x7da2047eb460>
if name[s_value] begin[:]
<ast.AugAssign object at 0x7da2047ea8c0>
name[self].all_tools assign[=] call[name[value].split, parameter[constant[
]]]
name[self].display_val assign[=] call[name[self].add, parameter[name[npyscreen].Pager]]
|
keyword[def] identifier[create] ( identifier[self] ):
literal[string]
identifier[self] . identifier[add_handlers] ({ literal[string] : identifier[self] . identifier[quit] , literal[string] : identifier[self] . identifier[quit] ,
literal[string] : identifier[self] . identifier[toggle_view] })
identifier[self] . identifier[add] ( identifier[npyscreen] . identifier[TitleFixedText] , identifier[name] = identifier[self] . identifier[action] [ literal[string] ], identifier[value] = literal[string] )
identifier[response] = identifier[self] . identifier[action] [ literal[string] ]. identifier[inventory] ( identifier[choices] =[ literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ])
keyword[if] identifier[response] [ literal[int] ]:
identifier[inventory] = identifier[response] [ literal[int] ]
keyword[if] identifier[len] ( identifier[inventory] [ literal[string] ])== literal[int] :
identifier[value] = literal[string]
keyword[else] :
identifier[value] = literal[string]
identifier[tools] = keyword[None]
keyword[if] identifier[inventory] [ literal[string] ]:
identifier[tools] = identifier[inventory] [ literal[string] ]
keyword[for] identifier[repo] keyword[in] identifier[inventory] [ literal[string] ]:
identifier[s_value] = literal[string]
identifier[repo_name] = identifier[repo] . identifier[rsplit] ( literal[string] , literal[int] )[ literal[int] :]
keyword[if] identifier[len] ( identifier[repo_name] )== literal[int] :
identifier[repo_name] = identifier[repo] . identifier[split] ( literal[string] )
keyword[if] identifier[tools] :
identifier[p_value] = literal[string] + identifier[repo] + literal[string]
keyword[for] identifier[tool] keyword[in] identifier[tools] :
identifier[t_name] = identifier[tool] . identifier[split] ( literal[string] )
keyword[if] ( identifier[t_name] [ literal[int] ]== identifier[repo_name] [ literal[int] ] keyword[and]
identifier[t_name] [ literal[int] ]== identifier[repo_name] [ literal[int] ]):
identifier[s_value] += literal[string] + identifier[tools] [ identifier[tool] ]+ literal[string]
identifier[s_value] += identifier[inventory] [ literal[string] ][ identifier[tool] ]+ literal[string]
identifier[s_value] += literal[string]
identifier[s_value] += identifier[inventory] [ literal[string] ][ identifier[tool] ]+ literal[string]
identifier[s_value] += literal[string]
identifier[s_value] += identifier[inventory] [ literal[string] ][ identifier[tool] ]+ literal[string]
keyword[if] identifier[s_value] :
identifier[value] += identifier[p_value] + identifier[s_value]
keyword[else] :
identifier[value] = literal[string] + identifier[self] . identifier[action] [ literal[string] ]
identifier[value] += literal[string] + identifier[str] ( identifier[response] [ literal[int] ])
identifier[value] += literal[string]
identifier[self] . identifier[all_tools] = identifier[value] . identifier[split] ( literal[string] )
identifier[self] . identifier[display_val] = identifier[self] . identifier[add] ( identifier[npyscreen] . identifier[Pager] , identifier[values] = identifier[value] . identifier[split] ( literal[string] ))
|
def create(self):
""" Override method for creating FormBaseNew form """
self.add_handlers({'^T': self.quit, '^Q': self.quit, '^V': self.toggle_view})
self.add(npyscreen.TitleFixedText, name=self.action['title'], value='')
response = self.action['api_action'].inventory(choices=['repos', 'tools', 'images', 'built', 'running'])
if response[0]:
inventory = response[1]
if len(inventory['repos']) == 0:
value = 'No tools were found.\n' # depends on [control=['if'], data=[]]
else:
value = 'Tools for all groups found:\n'
tools = None
if inventory['tools']:
tools = inventory['tools'] # depends on [control=['if'], data=[]]
for repo in inventory['repos']:
s_value = ''
repo_name = repo.rsplit('/', 2)[1:]
if len(repo_name) == 1:
repo_name = repo.split('/') # depends on [control=['if'], data=[]]
if tools:
p_value = '\n Plugin: ' + repo + '\n'
for tool in tools:
t_name = tool.split(':')
if t_name[0] == repo_name[0] and t_name[1] == repo_name[1]:
s_value += ' ' + tools[tool] + '\n Built: '
s_value += inventory['built'][tool] + '\n'
s_value += ' Image name: '
s_value += inventory['images'][tool] + '\n'
s_value += ' Status: '
s_value += inventory['running'][tool] + '\n' # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['tool']] # depends on [control=['if'], data=[]]
if s_value:
value += p_value + s_value # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['repo']] # depends on [control=['if'], data=[]]
else:
value = 'There was an issue with ' + self.action['name']
value += ' retrieval:\n' + str(response[1])
value += '\nPlease see vent.log for more details.'
self.all_tools = value.split('\n')
self.display_val = self.add(npyscreen.Pager, values=value.split('\n'))
|
def _parse_match(self, match):
"""
If there is a group dict, return the dict;
even if there's only one value in the dict, return a dictionary;
If there is a group in match, return the group;
if there is only one value in the group, return the value;
if there has no group, return the whole matched string;
if there are many groups, return a tuple;
:param match:
:return:
"""
if not match:
if self.default:
return self.default
else:
raise NothingMatchedError(
f"Extract `{self._re_select}` error, "
f"please check selector or set parameter named `default`")
else:
string = match.group()
groups = match.groups()
group_dict = match.groupdict()
if group_dict:
return group_dict
if groups:
return groups[0] if len(groups) == 1 else groups
return string
|
def function[_parse_match, parameter[self, match]]:
constant[
If there is a group dict, return the dict;
even if there's only one value in the dict, return a dictionary;
If there is a group in match, return the group;
if there is only one value in the group, return the value;
if there has no group, return the whole matched string;
if there are many groups, return a tuple;
:param match:
:return:
]
if <ast.UnaryOp object at 0x7da1b23476d0> begin[:]
if name[self].default begin[:]
return[name[self].default]
|
keyword[def] identifier[_parse_match] ( identifier[self] , identifier[match] ):
literal[string]
keyword[if] keyword[not] identifier[match] :
keyword[if] identifier[self] . identifier[default] :
keyword[return] identifier[self] . identifier[default]
keyword[else] :
keyword[raise] identifier[NothingMatchedError] (
literal[string]
literal[string] )
keyword[else] :
identifier[string] = identifier[match] . identifier[group] ()
identifier[groups] = identifier[match] . identifier[groups] ()
identifier[group_dict] = identifier[match] . identifier[groupdict] ()
keyword[if] identifier[group_dict] :
keyword[return] identifier[group_dict]
keyword[if] identifier[groups] :
keyword[return] identifier[groups] [ literal[int] ] keyword[if] identifier[len] ( identifier[groups] )== literal[int] keyword[else] identifier[groups]
keyword[return] identifier[string]
|
def _parse_match(self, match):
"""
If there is a group dict, return the dict;
even if there's only one value in the dict, return a dictionary;
If there is a group in match, return the group;
if there is only one value in the group, return the value;
if there has no group, return the whole matched string;
if there are many groups, return a tuple;
:param match:
:return:
"""
if not match:
if self.default:
return self.default # depends on [control=['if'], data=[]]
else:
raise NothingMatchedError(f'Extract `{self._re_select}` error, please check selector or set parameter named `default`') # depends on [control=['if'], data=[]]
else:
string = match.group()
groups = match.groups()
group_dict = match.groupdict()
if group_dict:
return group_dict # depends on [control=['if'], data=[]]
if groups:
return groups[0] if len(groups) == 1 else groups # depends on [control=['if'], data=[]]
return string
|
def has_hints(self):
"""
True if self provides hints on the cutoff energy.
"""
for acc in ["low", "normal", "high"]:
try:
if self.hint_for_accuracy(acc) is None:
return False
except KeyError:
return False
return True
|
def function[has_hints, parameter[self]]:
constant[
True if self provides hints on the cutoff energy.
]
for taget[name[acc]] in starred[list[[<ast.Constant object at 0x7da2047eac50>, <ast.Constant object at 0x7da2047e92a0>, <ast.Constant object at 0x7da2047e9600>]]] begin[:]
<ast.Try object at 0x7da2047e9450>
return[constant[True]]
|
keyword[def] identifier[has_hints] ( identifier[self] ):
literal[string]
keyword[for] identifier[acc] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
keyword[try] :
keyword[if] identifier[self] . identifier[hint_for_accuracy] ( identifier[acc] ) keyword[is] keyword[None] :
keyword[return] keyword[False]
keyword[except] identifier[KeyError] :
keyword[return] keyword[False]
keyword[return] keyword[True]
|
def has_hints(self):
"""
True if self provides hints on the cutoff energy.
"""
for acc in ['low', 'normal', 'high']:
try:
if self.hint_for_accuracy(acc) is None:
return False # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except KeyError:
return False # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['acc']]
return True
|
def create_parser(self, subparsers):
"""
Creates the subparser for this particular command
"""
self.parser = subparsers.add_parser(self.name, help=self.help, parents=self.parents)
self.add_arguments()
self.parser.set_defaults(func=self.handle)
return self.parser
|
def function[create_parser, parameter[self, subparsers]]:
constant[
Creates the subparser for this particular command
]
name[self].parser assign[=] call[name[subparsers].add_parser, parameter[name[self].name]]
call[name[self].add_arguments, parameter[]]
call[name[self].parser.set_defaults, parameter[]]
return[name[self].parser]
|
keyword[def] identifier[create_parser] ( identifier[self] , identifier[subparsers] ):
literal[string]
identifier[self] . identifier[parser] = identifier[subparsers] . identifier[add_parser] ( identifier[self] . identifier[name] , identifier[help] = identifier[self] . identifier[help] , identifier[parents] = identifier[self] . identifier[parents] )
identifier[self] . identifier[add_arguments] ()
identifier[self] . identifier[parser] . identifier[set_defaults] ( identifier[func] = identifier[self] . identifier[handle] )
keyword[return] identifier[self] . identifier[parser]
|
def create_parser(self, subparsers):
"""
Creates the subparser for this particular command
"""
self.parser = subparsers.add_parser(self.name, help=self.help, parents=self.parents)
self.add_arguments()
self.parser.set_defaults(func=self.handle)
return self.parser
|
def connect(cls, pipeline_method, name=None):
"""
Low level logic to bind a callable method to a name.
Don't call this directly unless you know what you are doing.
:param pipeline_method: callable
:param name: str optional
:return: None
"""
new_pool = pipeline_method().connection_pool
try:
if cls.get(name).connection_pool != new_pool:
raise AlreadyConnected("can't change connection for %s" % name)
except InvalidPipeline:
pass
cls.connections[name] = pipeline_method
|
def function[connect, parameter[cls, pipeline_method, name]]:
constant[
Low level logic to bind a callable method to a name.
Don't call this directly unless you know what you are doing.
:param pipeline_method: callable
:param name: str optional
:return: None
]
variable[new_pool] assign[=] call[name[pipeline_method], parameter[]].connection_pool
<ast.Try object at 0x7da1b0a4d870>
call[name[cls].connections][name[name]] assign[=] name[pipeline_method]
|
keyword[def] identifier[connect] ( identifier[cls] , identifier[pipeline_method] , identifier[name] = keyword[None] ):
literal[string]
identifier[new_pool] = identifier[pipeline_method] (). identifier[connection_pool]
keyword[try] :
keyword[if] identifier[cls] . identifier[get] ( identifier[name] ). identifier[connection_pool] != identifier[new_pool] :
keyword[raise] identifier[AlreadyConnected] ( literal[string] % identifier[name] )
keyword[except] identifier[InvalidPipeline] :
keyword[pass]
identifier[cls] . identifier[connections] [ identifier[name] ]= identifier[pipeline_method]
|
def connect(cls, pipeline_method, name=None):
"""
Low level logic to bind a callable method to a name.
Don't call this directly unless you know what you are doing.
:param pipeline_method: callable
:param name: str optional
:return: None
"""
new_pool = pipeline_method().connection_pool
try:
if cls.get(name).connection_pool != new_pool:
raise AlreadyConnected("can't change connection for %s" % name) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except InvalidPipeline:
pass # depends on [control=['except'], data=[]]
cls.connections[name] = pipeline_method
|
def decorate_obj(parent, n, o, otype, recurse=True, redecorate=False):
"""Adds the decoration for automated logging to the specified object, if it
hasn't already been done.
Args:
parent: object that `o` belongs to.
n (str): name in the parent's dictionary.
o (type): instance of the object's type.
otype (str): one of ['classes', 'functions', 'methods', 'modules'];
specifies which group the object belongs to.
recurse (bool): when True, the objects methods and functions are also
decorated recursively.
Examples:
Decorate the function `mymod.myfunc` to log automatically to the
database.
>>> from acorn.logging.decoration import decorate_obj
>>> import mymod
>>> decorate_obj(mymod, "myfunc", mymod.myfunc, "functions")
"""
global _decor_count, _decorated_o
from inspect import isclass, isfunction, ismodule
pmodule = parent if ismodule(parent) or isclass(parent) else None
fqdn = _fqdn(o, recheck=True, pmodule=pmodule)
if fqdn is None:
#This object didn't have a name, which means we can't extend it or
#track it anyway.
return
package = fqdn.split('.')[0]
d = _get_stack_depth(package, fqdn)
if (package in _decorated_o and
(id(o) not in _decorated_o[package] or redecorate)):
decor = None
if hasattr(o, "__call__") and otype != "classes":
#calling on class types is handled by the construction decorator
#below.
cdecor = CallingDecorator(o)
if isclass(parent):
clog = cdecor(fqdn, package, parent, d)
else:
clog = cdecor(fqdn, package, None, d)
#We can't update the attributes of the static methods (it just
#produces errors), so we do what we can before that.
msg.std("Setting decorator on {}.".format(fqdn), 4)
_update_attrs(clog, o)
if ((hasattr(o, "im_self") and o.im_self is parent)):
clog = staticmethod(clog)
setok = _safe_setattr(parent, n, clog)
if setok:
decor = cdecor
msg.okay("Set calling logger on {}: {}.".format(n, fqdn), 3)
_decor_count[package][0] += 1
else:
setok = _safe_setattr(o, "__acorn__", None)
_decor_count[package][2] += 1
if otype == "classes" and setok:
if hasattr(o, "__new__"):
setattr(o, "__old__", staticmethod(o.__new__))
crelog = creationlog(o, package, d)
setok = _safe_setattr(o, "__new__", creationlog(o, package, d))
if setok:
decor = crelog
msg.gen("Set creation logger on {}: {}.".format(n, fqdn),3)
_decor_count[package][0] += 1
#else: must have only static methods and no instances.
if setok:
_decorated_o[package][id(o)] = decor
else:
_decorated_o[package][id(o)] = None
#We don't need to bother recursing for those modules/classes that
#can't have their attributes set, since their members will have the
#same restrictions.
if setok and otype in ["classes", "modules"]:
#These types can be further decorated; let's traverse their members
#and try to decorate those as well.
splits = _split_object(o, package)
for ot, ol in splits.items():
for nobj, obj in ol:
decorate_obj(o, nobj, obj, ot)
elif otype != "classes" and package in _decorated_o:
#Even though the object with that id() has been decorated, it doesn't
#mean that the parent has had its attribute overwritten to point to the
#decorated object. This happens with instance methods on different
#classes that are implemented by another generic method.
target = _decorated_o[package][id(o)]
child = getattr(parent, n)
if target is not None:
clog = target(fqdn, package, parent)
_safe_setattr(clog, "__acorn__", o)
_update_attrs(clog, o)
setok = _safe_setattr(parent, n, clog)
msg.okay("Set existing calling logger on {}: {}.".format(n,fqdn), 4)
|
def function[decorate_obj, parameter[parent, n, o, otype, recurse, redecorate]]:
constant[Adds the decoration for automated logging to the specified object, if it
hasn't already been done.
Args:
parent: object that `o` belongs to.
n (str): name in the parent's dictionary.
o (type): instance of the object's type.
otype (str): one of ['classes', 'functions', 'methods', 'modules'];
specifies which group the object belongs to.
recurse (bool): when True, the objects methods and functions are also
decorated recursively.
Examples:
Decorate the function `mymod.myfunc` to log automatically to the
database.
>>> from acorn.logging.decoration import decorate_obj
>>> import mymod
>>> decorate_obj(mymod, "myfunc", mymod.myfunc, "functions")
]
<ast.Global object at 0x7da20c7cb0a0>
from relative_module[inspect] import module[isclass], module[isfunction], module[ismodule]
variable[pmodule] assign[=] <ast.IfExp object at 0x7da20c7c9b70>
variable[fqdn] assign[=] call[name[_fqdn], parameter[name[o]]]
if compare[name[fqdn] is constant[None]] begin[:]
return[None]
variable[package] assign[=] call[call[name[fqdn].split, parameter[constant[.]]]][constant[0]]
variable[d] assign[=] call[name[_get_stack_depth], parameter[name[package], name[fqdn]]]
if <ast.BoolOp object at 0x7da20c7caf80> begin[:]
variable[decor] assign[=] constant[None]
if <ast.BoolOp object at 0x7da20c7cae60> begin[:]
variable[cdecor] assign[=] call[name[CallingDecorator], parameter[name[o]]]
if call[name[isclass], parameter[name[parent]]] begin[:]
variable[clog] assign[=] call[name[cdecor], parameter[name[fqdn], name[package], name[parent], name[d]]]
call[name[msg].std, parameter[call[constant[Setting decorator on {}.].format, parameter[name[fqdn]]], constant[4]]]
call[name[_update_attrs], parameter[name[clog], name[o]]]
if <ast.BoolOp object at 0x7da20c7ca470> begin[:]
variable[clog] assign[=] call[name[staticmethod], parameter[name[clog]]]
variable[setok] assign[=] call[name[_safe_setattr], parameter[name[parent], name[n], name[clog]]]
if name[setok] begin[:]
variable[decor] assign[=] name[cdecor]
call[name[msg].okay, parameter[call[constant[Set calling logger on {}: {}.].format, parameter[name[n], name[fqdn]]], constant[3]]]
<ast.AugAssign object at 0x7da20c7ca5c0>
if <ast.BoolOp object at 0x7da20c7c8700> begin[:]
if call[name[hasattr], parameter[name[o], constant[__new__]]] begin[:]
call[name[setattr], parameter[name[o], constant[__old__], call[name[staticmethod], parameter[name[o].__new__]]]]
variable[crelog] assign[=] call[name[creationlog], parameter[name[o], name[package], name[d]]]
variable[setok] assign[=] call[name[_safe_setattr], parameter[name[o], constant[__new__], call[name[creationlog], parameter[name[o], name[package], name[d]]]]]
if name[setok] begin[:]
variable[decor] assign[=] name[crelog]
call[name[msg].gen, parameter[call[constant[Set creation logger on {}: {}.].format, parameter[name[n], name[fqdn]]], constant[3]]]
<ast.AugAssign object at 0x7da18f722470>
if name[setok] begin[:]
call[call[name[_decorated_o]][name[package]]][call[name[id], parameter[name[o]]]] assign[=] name[decor]
if <ast.BoolOp object at 0x7da18eb54ac0> begin[:]
variable[splits] assign[=] call[name[_split_object], parameter[name[o], name[package]]]
for taget[tuple[[<ast.Name object at 0x7da18eb56530>, <ast.Name object at 0x7da18eb55d20>]]] in starred[call[name[splits].items, parameter[]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da18eb56e00>, <ast.Name object at 0x7da18eb54b50>]]] in starred[name[ol]] begin[:]
call[name[decorate_obj], parameter[name[o], name[nobj], name[obj], name[ot]]]
|
keyword[def] identifier[decorate_obj] ( identifier[parent] , identifier[n] , identifier[o] , identifier[otype] , identifier[recurse] = keyword[True] , identifier[redecorate] = keyword[False] ):
literal[string]
keyword[global] identifier[_decor_count] , identifier[_decorated_o]
keyword[from] identifier[inspect] keyword[import] identifier[isclass] , identifier[isfunction] , identifier[ismodule]
identifier[pmodule] = identifier[parent] keyword[if] identifier[ismodule] ( identifier[parent] ) keyword[or] identifier[isclass] ( identifier[parent] ) keyword[else] keyword[None]
identifier[fqdn] = identifier[_fqdn] ( identifier[o] , identifier[recheck] = keyword[True] , identifier[pmodule] = identifier[pmodule] )
keyword[if] identifier[fqdn] keyword[is] keyword[None] :
keyword[return]
identifier[package] = identifier[fqdn] . identifier[split] ( literal[string] )[ literal[int] ]
identifier[d] = identifier[_get_stack_depth] ( identifier[package] , identifier[fqdn] )
keyword[if] ( identifier[package] keyword[in] identifier[_decorated_o] keyword[and]
( identifier[id] ( identifier[o] ) keyword[not] keyword[in] identifier[_decorated_o] [ identifier[package] ] keyword[or] identifier[redecorate] )):
identifier[decor] = keyword[None]
keyword[if] identifier[hasattr] ( identifier[o] , literal[string] ) keyword[and] identifier[otype] != literal[string] :
identifier[cdecor] = identifier[CallingDecorator] ( identifier[o] )
keyword[if] identifier[isclass] ( identifier[parent] ):
identifier[clog] = identifier[cdecor] ( identifier[fqdn] , identifier[package] , identifier[parent] , identifier[d] )
keyword[else] :
identifier[clog] = identifier[cdecor] ( identifier[fqdn] , identifier[package] , keyword[None] , identifier[d] )
identifier[msg] . identifier[std] ( literal[string] . identifier[format] ( identifier[fqdn] ), literal[int] )
identifier[_update_attrs] ( identifier[clog] , identifier[o] )
keyword[if] (( identifier[hasattr] ( identifier[o] , literal[string] ) keyword[and] identifier[o] . identifier[im_self] keyword[is] identifier[parent] )):
identifier[clog] = identifier[staticmethod] ( identifier[clog] )
identifier[setok] = identifier[_safe_setattr] ( identifier[parent] , identifier[n] , identifier[clog] )
keyword[if] identifier[setok] :
identifier[decor] = identifier[cdecor]
identifier[msg] . identifier[okay] ( literal[string] . identifier[format] ( identifier[n] , identifier[fqdn] ), literal[int] )
identifier[_decor_count] [ identifier[package] ][ literal[int] ]+= literal[int]
keyword[else] :
identifier[setok] = identifier[_safe_setattr] ( identifier[o] , literal[string] , keyword[None] )
identifier[_decor_count] [ identifier[package] ][ literal[int] ]+= literal[int]
keyword[if] identifier[otype] == literal[string] keyword[and] identifier[setok] :
keyword[if] identifier[hasattr] ( identifier[o] , literal[string] ):
identifier[setattr] ( identifier[o] , literal[string] , identifier[staticmethod] ( identifier[o] . identifier[__new__] ))
identifier[crelog] = identifier[creationlog] ( identifier[o] , identifier[package] , identifier[d] )
identifier[setok] = identifier[_safe_setattr] ( identifier[o] , literal[string] , identifier[creationlog] ( identifier[o] , identifier[package] , identifier[d] ))
keyword[if] identifier[setok] :
identifier[decor] = identifier[crelog]
identifier[msg] . identifier[gen] ( literal[string] . identifier[format] ( identifier[n] , identifier[fqdn] ), literal[int] )
identifier[_decor_count] [ identifier[package] ][ literal[int] ]+= literal[int]
keyword[if] identifier[setok] :
identifier[_decorated_o] [ identifier[package] ][ identifier[id] ( identifier[o] )]= identifier[decor]
keyword[else] :
identifier[_decorated_o] [ identifier[package] ][ identifier[id] ( identifier[o] )]= keyword[None]
keyword[if] identifier[setok] keyword[and] identifier[otype] keyword[in] [ literal[string] , literal[string] ]:
identifier[splits] = identifier[_split_object] ( identifier[o] , identifier[package] )
keyword[for] identifier[ot] , identifier[ol] keyword[in] identifier[splits] . identifier[items] ():
keyword[for] identifier[nobj] , identifier[obj] keyword[in] identifier[ol] :
identifier[decorate_obj] ( identifier[o] , identifier[nobj] , identifier[obj] , identifier[ot] )
keyword[elif] identifier[otype] != literal[string] keyword[and] identifier[package] keyword[in] identifier[_decorated_o] :
identifier[target] = identifier[_decorated_o] [ identifier[package] ][ identifier[id] ( identifier[o] )]
identifier[child] = identifier[getattr] ( identifier[parent] , identifier[n] )
keyword[if] identifier[target] keyword[is] keyword[not] keyword[None] :
identifier[clog] = identifier[target] ( identifier[fqdn] , identifier[package] , identifier[parent] )
identifier[_safe_setattr] ( identifier[clog] , literal[string] , identifier[o] )
identifier[_update_attrs] ( identifier[clog] , identifier[o] )
identifier[setok] = identifier[_safe_setattr] ( identifier[parent] , identifier[n] , identifier[clog] )
identifier[msg] . identifier[okay] ( literal[string] . identifier[format] ( identifier[n] , identifier[fqdn] ), literal[int] )
|
def decorate_obj(parent, n, o, otype, recurse=True, redecorate=False):
"""Adds the decoration for automated logging to the specified object, if it
hasn't already been done.
Args:
parent: object that `o` belongs to.
n (str): name in the parent's dictionary.
o (type): instance of the object's type.
otype (str): one of ['classes', 'functions', 'methods', 'modules'];
specifies which group the object belongs to.
recurse (bool): when True, the objects methods and functions are also
decorated recursively.
Examples:
Decorate the function `mymod.myfunc` to log automatically to the
database.
>>> from acorn.logging.decoration import decorate_obj
>>> import mymod
>>> decorate_obj(mymod, "myfunc", mymod.myfunc, "functions")
"""
global _decor_count, _decorated_o
from inspect import isclass, isfunction, ismodule
pmodule = parent if ismodule(parent) or isclass(parent) else None
fqdn = _fqdn(o, recheck=True, pmodule=pmodule)
if fqdn is None:
#This object didn't have a name, which means we can't extend it or
#track it anyway.
return # depends on [control=['if'], data=[]]
package = fqdn.split('.')[0]
d = _get_stack_depth(package, fqdn)
if package in _decorated_o and (id(o) not in _decorated_o[package] or redecorate):
decor = None
if hasattr(o, '__call__') and otype != 'classes':
#calling on class types is handled by the construction decorator
#below.
cdecor = CallingDecorator(o)
if isclass(parent):
clog = cdecor(fqdn, package, parent, d) # depends on [control=['if'], data=[]]
else:
clog = cdecor(fqdn, package, None, d)
#We can't update the attributes of the static methods (it just
#produces errors), so we do what we can before that.
msg.std('Setting decorator on {}.'.format(fqdn), 4)
_update_attrs(clog, o)
if hasattr(o, 'im_self') and o.im_self is parent:
clog = staticmethod(clog) # depends on [control=['if'], data=[]]
setok = _safe_setattr(parent, n, clog)
if setok:
decor = cdecor
msg.okay('Set calling logger on {}: {}.'.format(n, fqdn), 3) # depends on [control=['if'], data=[]]
_decor_count[package][0] += 1 # depends on [control=['if'], data=[]]
else:
setok = _safe_setattr(o, '__acorn__', None)
_decor_count[package][2] += 1
if otype == 'classes' and setok:
if hasattr(o, '__new__'):
setattr(o, '__old__', staticmethod(o.__new__))
crelog = creationlog(o, package, d)
setok = _safe_setattr(o, '__new__', creationlog(o, package, d))
if setok:
decor = crelog
msg.gen('Set creation logger on {}: {}.'.format(n, fqdn), 3) # depends on [control=['if'], data=[]]
_decor_count[package][0] += 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
#else: must have only static methods and no instances.
if setok:
_decorated_o[package][id(o)] = decor # depends on [control=['if'], data=[]]
else:
_decorated_o[package][id(o)] = None
#We don't need to bother recursing for those modules/classes that
#can't have their attributes set, since their members will have the
#same restrictions.
if setok and otype in ['classes', 'modules']:
#These types can be further decorated; let's traverse their members
#and try to decorate those as well.
splits = _split_object(o, package)
for (ot, ol) in splits.items():
for (nobj, obj) in ol:
decorate_obj(o, nobj, obj, ot) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif otype != 'classes' and package in _decorated_o:
#Even though the object with that id() has been decorated, it doesn't
#mean that the parent has had its attribute overwritten to point to the
#decorated object. This happens with instance methods on different
#classes that are implemented by another generic method.
target = _decorated_o[package][id(o)]
child = getattr(parent, n)
if target is not None:
clog = target(fqdn, package, parent)
_safe_setattr(clog, '__acorn__', o)
_update_attrs(clog, o)
setok = _safe_setattr(parent, n, clog)
msg.okay('Set existing calling logger on {}: {}.'.format(n, fqdn), 4) # depends on [control=['if'], data=['target']] # depends on [control=['if'], data=[]]
|
def event_estimation(self, event, logliks, logsumexps, renamed=''):
"""Show the values underlying bayesian modality estimations of an event
Parameters
----------
Returns
-------
Raises
------
"""
plotter = _ModelLoglikPlotter()
plotter.plot(event, logliks, logsumexps, self.modality_to_color,
renamed=renamed)
return plotter
|
def function[event_estimation, parameter[self, event, logliks, logsumexps, renamed]]:
constant[Show the values underlying bayesian modality estimations of an event
Parameters
----------
Returns
-------
Raises
------
]
variable[plotter] assign[=] call[name[_ModelLoglikPlotter], parameter[]]
call[name[plotter].plot, parameter[name[event], name[logliks], name[logsumexps], name[self].modality_to_color]]
return[name[plotter]]
|
keyword[def] identifier[event_estimation] ( identifier[self] , identifier[event] , identifier[logliks] , identifier[logsumexps] , identifier[renamed] = literal[string] ):
literal[string]
identifier[plotter] = identifier[_ModelLoglikPlotter] ()
identifier[plotter] . identifier[plot] ( identifier[event] , identifier[logliks] , identifier[logsumexps] , identifier[self] . identifier[modality_to_color] ,
identifier[renamed] = identifier[renamed] )
keyword[return] identifier[plotter]
|
def event_estimation(self, event, logliks, logsumexps, renamed=''):
"""Show the values underlying bayesian modality estimations of an event
Parameters
----------
Returns
-------
Raises
------
"""
plotter = _ModelLoglikPlotter()
plotter.plot(event, logliks, logsumexps, self.modality_to_color, renamed=renamed)
return plotter
|
def local_1d_halo_exchange(k, v, num_w_blocks, w_dim, mask_right):
"""Halo exchange for keys and values for Local 1D attention."""
if num_w_blocks is not None:
if mask_right:
k = mtf.left_halo_exchange(k, num_w_blocks, w_dim, w_dim.size)
v = mtf.left_halo_exchange(v, num_w_blocks, w_dim, w_dim.size)
else:
k = mtf.halo_exchange(k, num_w_blocks, w_dim, w_dim.size)
v = mtf.halo_exchange(v, num_w_blocks, w_dim, w_dim.size)
else:
if mask_right:
k = mtf.pad(k, [w_dim, None], w_dim.name)
v = mtf.pad(v, [w_dim, None], w_dim.name)
else:
k = mtf.pad(k, [w_dim, w_dim], w_dim.name)
v = mtf.pad(v, [w_dim, w_dim], w_dim.name)
return k, v
|
def function[local_1d_halo_exchange, parameter[k, v, num_w_blocks, w_dim, mask_right]]:
constant[Halo exchange for keys and values for Local 1D attention.]
if compare[name[num_w_blocks] is_not constant[None]] begin[:]
if name[mask_right] begin[:]
variable[k] assign[=] call[name[mtf].left_halo_exchange, parameter[name[k], name[num_w_blocks], name[w_dim], name[w_dim].size]]
variable[v] assign[=] call[name[mtf].left_halo_exchange, parameter[name[v], name[num_w_blocks], name[w_dim], name[w_dim].size]]
return[tuple[[<ast.Name object at 0x7da18dc98cd0>, <ast.Name object at 0x7da18dc98280>]]]
|
keyword[def] identifier[local_1d_halo_exchange] ( identifier[k] , identifier[v] , identifier[num_w_blocks] , identifier[w_dim] , identifier[mask_right] ):
literal[string]
keyword[if] identifier[num_w_blocks] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[mask_right] :
identifier[k] = identifier[mtf] . identifier[left_halo_exchange] ( identifier[k] , identifier[num_w_blocks] , identifier[w_dim] , identifier[w_dim] . identifier[size] )
identifier[v] = identifier[mtf] . identifier[left_halo_exchange] ( identifier[v] , identifier[num_w_blocks] , identifier[w_dim] , identifier[w_dim] . identifier[size] )
keyword[else] :
identifier[k] = identifier[mtf] . identifier[halo_exchange] ( identifier[k] , identifier[num_w_blocks] , identifier[w_dim] , identifier[w_dim] . identifier[size] )
identifier[v] = identifier[mtf] . identifier[halo_exchange] ( identifier[v] , identifier[num_w_blocks] , identifier[w_dim] , identifier[w_dim] . identifier[size] )
keyword[else] :
keyword[if] identifier[mask_right] :
identifier[k] = identifier[mtf] . identifier[pad] ( identifier[k] ,[ identifier[w_dim] , keyword[None] ], identifier[w_dim] . identifier[name] )
identifier[v] = identifier[mtf] . identifier[pad] ( identifier[v] ,[ identifier[w_dim] , keyword[None] ], identifier[w_dim] . identifier[name] )
keyword[else] :
identifier[k] = identifier[mtf] . identifier[pad] ( identifier[k] ,[ identifier[w_dim] , identifier[w_dim] ], identifier[w_dim] . identifier[name] )
identifier[v] = identifier[mtf] . identifier[pad] ( identifier[v] ,[ identifier[w_dim] , identifier[w_dim] ], identifier[w_dim] . identifier[name] )
keyword[return] identifier[k] , identifier[v]
|
def local_1d_halo_exchange(k, v, num_w_blocks, w_dim, mask_right):
"""Halo exchange for keys and values for Local 1D attention."""
if num_w_blocks is not None:
if mask_right:
k = mtf.left_halo_exchange(k, num_w_blocks, w_dim, w_dim.size)
v = mtf.left_halo_exchange(v, num_w_blocks, w_dim, w_dim.size) # depends on [control=['if'], data=[]]
else:
k = mtf.halo_exchange(k, num_w_blocks, w_dim, w_dim.size)
v = mtf.halo_exchange(v, num_w_blocks, w_dim, w_dim.size) # depends on [control=['if'], data=['num_w_blocks']]
elif mask_right:
k = mtf.pad(k, [w_dim, None], w_dim.name)
v = mtf.pad(v, [w_dim, None], w_dim.name) # depends on [control=['if'], data=[]]
else:
k = mtf.pad(k, [w_dim, w_dim], w_dim.name)
v = mtf.pad(v, [w_dim, w_dim], w_dim.name)
return (k, v)
|
def ulocalized_time(self, time, context, request):
"""Returns the localized time in string format
"""
value = ut(time, long_format=self.show_time, time_only=False,
context=context, request=request)
return value or ""
|
def function[ulocalized_time, parameter[self, time, context, request]]:
constant[Returns the localized time in string format
]
variable[value] assign[=] call[name[ut], parameter[name[time]]]
return[<ast.BoolOp object at 0x7da18f09d300>]
|
keyword[def] identifier[ulocalized_time] ( identifier[self] , identifier[time] , identifier[context] , identifier[request] ):
literal[string]
identifier[value] = identifier[ut] ( identifier[time] , identifier[long_format] = identifier[self] . identifier[show_time] , identifier[time_only] = keyword[False] ,
identifier[context] = identifier[context] , identifier[request] = identifier[request] )
keyword[return] identifier[value] keyword[or] literal[string]
|
def ulocalized_time(self, time, context, request):
"""Returns the localized time in string format
"""
value = ut(time, long_format=self.show_time, time_only=False, context=context, request=request)
return value or ''
|
def get_content_type(self, msg, content_type="HTML"):
"""
Given an Email.Message object, gets the content-type payload
as specified by @content_type. This is the actual body of the
email.
@Params
msg - Email.Message object to get message content for
content_type - Type of content to get from the email
@Return
String content of the email in the given type
"""
if "HTML" in content_type.upper():
content_type = self.HTML
elif "PLAIN" in content_type.upper():
content_type = self.PLAIN
for part in msg.walk():
if str(part.get_content_type()) == content_type:
return str(part.get_payload(decode=True))
|
def function[get_content_type, parameter[self, msg, content_type]]:
constant[
Given an Email.Message object, gets the content-type payload
as specified by @content_type. This is the actual body of the
email.
@Params
msg - Email.Message object to get message content for
content_type - Type of content to get from the email
@Return
String content of the email in the given type
]
if compare[constant[HTML] in call[name[content_type].upper, parameter[]]] begin[:]
variable[content_type] assign[=] name[self].HTML
for taget[name[part]] in starred[call[name[msg].walk, parameter[]]] begin[:]
if compare[call[name[str], parameter[call[name[part].get_content_type, parameter[]]]] equal[==] name[content_type]] begin[:]
return[call[name[str], parameter[call[name[part].get_payload, parameter[]]]]]
|
keyword[def] identifier[get_content_type] ( identifier[self] , identifier[msg] , identifier[content_type] = literal[string] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[content_type] . identifier[upper] ():
identifier[content_type] = identifier[self] . identifier[HTML]
keyword[elif] literal[string] keyword[in] identifier[content_type] . identifier[upper] ():
identifier[content_type] = identifier[self] . identifier[PLAIN]
keyword[for] identifier[part] keyword[in] identifier[msg] . identifier[walk] ():
keyword[if] identifier[str] ( identifier[part] . identifier[get_content_type] ())== identifier[content_type] :
keyword[return] identifier[str] ( identifier[part] . identifier[get_payload] ( identifier[decode] = keyword[True] ))
|
def get_content_type(self, msg, content_type='HTML'):
"""
Given an Email.Message object, gets the content-type payload
as specified by @content_type. This is the actual body of the
email.
@Params
msg - Email.Message object to get message content for
content_type - Type of content to get from the email
@Return
String content of the email in the given type
"""
if 'HTML' in content_type.upper():
content_type = self.HTML # depends on [control=['if'], data=[]]
elif 'PLAIN' in content_type.upper():
content_type = self.PLAIN # depends on [control=['if'], data=[]]
for part in msg.walk():
if str(part.get_content_type()) == content_type:
return str(part.get_payload(decode=True)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['part']]
|
def get_name_for_model(cls, model):
""" Get a name for given class or model:
-- it's a service type for a service
-- it's a <service_type>.<resource_model_name> for a resource
"""
key = cls.get_model_key(model)
model_str = cls._get_model_str(model)
service = cls._registry[key]
if model_str in service['resources']:
return '{}.{}'.format(service['name'], service['resources'][model_str]['name'])
else:
return service['name']
|
def function[get_name_for_model, parameter[cls, model]]:
constant[ Get a name for given class or model:
-- it's a service type for a service
-- it's a <service_type>.<resource_model_name> for a resource
]
variable[key] assign[=] call[name[cls].get_model_key, parameter[name[model]]]
variable[model_str] assign[=] call[name[cls]._get_model_str, parameter[name[model]]]
variable[service] assign[=] call[name[cls]._registry][name[key]]
if compare[name[model_str] in call[name[service]][constant[resources]]] begin[:]
return[call[constant[{}.{}].format, parameter[call[name[service]][constant[name]], call[call[call[name[service]][constant[resources]]][name[model_str]]][constant[name]]]]]
|
keyword[def] identifier[get_name_for_model] ( identifier[cls] , identifier[model] ):
literal[string]
identifier[key] = identifier[cls] . identifier[get_model_key] ( identifier[model] )
identifier[model_str] = identifier[cls] . identifier[_get_model_str] ( identifier[model] )
identifier[service] = identifier[cls] . identifier[_registry] [ identifier[key] ]
keyword[if] identifier[model_str] keyword[in] identifier[service] [ literal[string] ]:
keyword[return] literal[string] . identifier[format] ( identifier[service] [ literal[string] ], identifier[service] [ literal[string] ][ identifier[model_str] ][ literal[string] ])
keyword[else] :
keyword[return] identifier[service] [ literal[string] ]
|
def get_name_for_model(cls, model):
""" Get a name for given class or model:
-- it's a service type for a service
-- it's a <service_type>.<resource_model_name> for a resource
"""
key = cls.get_model_key(model)
model_str = cls._get_model_str(model)
service = cls._registry[key]
if model_str in service['resources']:
return '{}.{}'.format(service['name'], service['resources'][model_str]['name']) # depends on [control=['if'], data=['model_str']]
else:
return service['name']
|
def plot_isoratios(self,xiso,yiso,fign=1,spec=None,deltax=True,deltay=True,logx=False,logy=False,
title=None,legend=None,legloc='lower right',errbar=True,dcycle=500,addiso=None,
co_toggle='c',cust_toggle=None,shift=0,weighting=None,zoneselect=None,iniabufile='iniab2.0E-02GN93.ppn',
plt_sparse=1,plt_symb='o',plt_col='k',plt_lt='-',plt_lw=1.,alpha_dum=1.,plt_massrange=False,plt_show=True,
figsave=False):
'''
This is the new routine to plot isotopic ratios for ALL input. rt, June 2014
Parameters:
-----------
xiso : np.array
x data to plot. This can be an array or a list of arrays, depending on who calls the routine
yiso : np.array
y data to plot. This can be an array or a list of arrays, depending on who calls the routine
fign : integer, optional
Figure number
spec : string, optional
What specifications do you want to do when coming from nugridse models. Choose 'surf' for
surface models or 'exp' for explosions (out files)
deltax : boolean, optional
X axis in delta values?
deltay : boolean, optional
Y axis in delta values?
logx : boolean, optional
Logarithmic x axis?
logy : boolean, optional
Logarithmic y axis?
title : string, optional
Title for your plot
legend : string, optional
Legend for your model / grains. For grains the legend is automatically taken from the
grain class
legloc : string / integer, optional
Location of the legend, use matplotlib standard. Use None to not plot legend if plotted
by default, e.g., from grain class routine.
errbar : boolean, optional
Error bars on grain data?
dcycle : integer, optional
Difference between cycles to take for thermal pulse searching, if searching is
deactivated, dcycle describes how often cycles are sampled. The default is 500.
addiso : list, optional
For explosive models. Add an isotope. Format ['C-12', 0.5 ,'N-12'] to add N12
to C12 and multiply it with a factor of 0.5. Multiple isotopes can be added, the
factor is optional and does not have to be given. Isotopes can be added to other
isotopes as well, i.e., [['C-12', 'N-12'], ['C-13', 'N-13']]. The default None.
Notice that while addiso = [['N-14','O-14'],['N-14',fractionation,'C-14']]
works, other options like addiso = [['N-14','O-14',fractionation,'C-14']] or
addiso = [['N-14',1,'O-14',fractionation,'C-14']] are not working and give Typerror.
CAREFUL, that for the option addiso = [['N-14',fractionation,'C-14','O-14']] there is
no error message, but the fractionation is applied to both O14 and C14!
co_toggle : string, optional
For explosive models, choose what shells you want to look for! Select 'c' for
selecting zones with C/O >= 1. Select 'o' for C/O <= 1. If 'a' takes the
whole star. The defalut is 'c'. See cust_toggle (below) for an alternative!
cust_toggle : list, optional
This option is like co_toggle (and overwrites it when chosen) but lets you choose
your own comparison. For example you want to find zones that have a 10 fold
overabundance of Ti-46 and Ti-47 over O-16 and Zr-96, you can choose here
[['Ti-46','Ti-47'],['O-16','Zr-96'],100.] Assuming the first list is is x, the
second list y, and the comparator number is f, the statement only plots shells
in which the condition x/y>f is fulfilled. x and y are number sums of the chosen
isotopes, f has to be given as a float. This is only for explosive shells. Please
note, if this toggle is NOT None, then co_toggle is overwritten!
shift : integer, optional
For explosive models, how much do you want to shift the models back from the
last cycle? By default (0) the last cycle is taken.
weighting : string, optional
For explosive models. If None then, plot every profile separately. If 'zone'
then, average each zone. If 'all' then average all selected zones. The
default is None.
zoneselect: string, optional
For explosive models. Select if you want to plot 'all' zones or outer most zone.
Arguments are 'all' and 'top', respectively. Default is None, then the user is
asked to provide this information during the routine as input.
iniabufile : string, optional
Initial abundance file. Use absolute path for your file or filename to choose a
given file in USEEPP. Attention: You need a standard tree checked out from SVN
plt_sparse : integer, optional
Every how many datapoints is the plot done? Not used for some routines!
plt_symb : string, optional
Symbol for the plot. In case of grains, this is handled automatically.
plt_col : string / float, optional
Color for plotted curve. In case of grains, this is handled automatically.
plt_lt : string, optional
line type for plot.
plt_lw : float, optional
Line width for plot.
alpha_dum : trasparency to apply to grains data, in case of many data are plotted.
This may be allpied also for theoretical curves.
plt_massrange : boolean, optional
For explosive models. Plot mass of shell with first and last datapoint of
each zone. If list given, label those zones. The default is False.
plt_show : boolean, optional
Do you want to show the plot or not?
figsave : string, optional
Give path and filename here, if you want to save the figure.
'''
from . import utils as u
### WORK ON PATH ###
# define svn path form path where script runs, depending on standard input or not
if len(iniabufile.split('/')) == 1 : # means not an absolute path!
scriptpathtmp = __file__
if len(scriptpathtmp.split('/')) == 1: # in folder where nugridse is
scriptpathtmp = os.path.abspath('.') + '/nugridse.py' # to get the current dir
svnpathtmp = '/'
for i in range(len(scriptpathtmp.split('/'))-3): # -3 to go to folders up!
if scriptpathtmp.split('/')[i] != '':
svnpathtmp += scriptpathtmp.split('/')[i] + '/'
iniabufile = svnpathtmp + 'frames/mppnp/USEEPP/' + iniabufile # make absolute path for iniabufile
### get solar system ratios for the isotopes that are specified in the input file ###
inut = u.iniabu(iniabufile)
try:
xrat_solsys = inut.isoratio_init(xiso)
except KeyError: # if isotope not available, e.g., if plotting Ti-44 / Ti-48 ratio
xrat_solsys = 0.
try:
yrat_solsys = inut.isoratio_init(yiso)
except KeyError:
yrat_solsys = 0.
# number ratio for solar system ratio
xrat_solsys *= (old_div(float(xiso[1].split('-')[1]), float(xiso[0].split('-')[1])))
yrat_solsys *= (old_div(float(yiso[1].split('-')[1]), float(yiso[0].split('-')[1])))
# initialize xdataerr and ydataerr as None
xdataerr = None
ydataerr = None
### DO PLOTS FROM NUGRIDSE CLASS ###
if self._classTest() == 'se':
if spec==None:
spec = str(eval(input('Please specify \'surf\' for surface models (AGB stars) or \'exp\' for explosive'
'models and zone finding, etc., and press enter: ')))
### SURFACE MODELS - PLOT AGB STAR STUFF ###
if spec == 'surf':
print('Plotting AGB star stuff')
# read in thermal pulse position and co ratio
tp_pos, co_return = self._tp_finder(dcycle)
tp_pos_tmp = []
co_return_tmp = []
tp_pos_tmp.append(1)
co_return_tmp.append(co_return[0])
for i in range(len(tp_pos)):
tp_pos_tmp.append(tp_pos[i])
co_return_tmp.append(co_return[i])
tp_pos = tp_pos_tmp
co_return = co_return_tmp
# read in data
iso_alldata = self.get(tp_pos,[xiso[0],xiso[1],yiso[0],yiso[1]])
xrat = np.zeros(len(iso_alldata))
yrat = np.zeros(len(iso_alldata))
for i in range(len(iso_alldata)):
xrat[i] = old_div(iso_alldata[i][0], iso_alldata[i][1])
yrat[i] = old_div(iso_alldata[i][2], iso_alldata[i][3])
# make number ratios
for i in range(len(xrat)):
xrat[i] *= old_div(float(xiso[1].split('-')[1]), float(xiso[0].split('-')[1]))
yrat[i] *= old_div(float(yiso[1].split('-')[1]), float(yiso[0].split('-')[1]))
# if delta values are requested, need to calculate those now
if deltax:
xrat = (old_div(xrat,xrat_solsys) -1.) * 1000.
if deltay:
yrat = (old_div(yrat,yrat_solsys) -1.) * 1000.
# now we might have o and c rich zones. prepare stuff for plotting
xdata_o = []
ydata_o = []
xdata_c = []
ydata_c = []
for i in range(len(co_return)):
if co_return[i] <= 1.:
xdata_o.append(xrat[i])
ydata_o.append(yrat[i])
else:
xdata_c.append(xrat[i])
ydata_c.append(yrat[i])
if xdata_o != [] and xdata_c != []:
xdata_o.append(xdata_c[0])
ydata_o.append(ydata_c[0])
# now make the styles
style_o = [plt_symb + '--', plt_col, '1.', '4', '2',None]
style_c = [plt_symb + plt_lt, plt_col, plt_col, '7.', '1', legend]
# now make data for plotting
xdata = []
ydata = []
style = []
if xdata_o != []:
xdata.append(xdata_o)
ydata.append(ydata_o)
style.append(style_o)
if xdata_c != []:
xdata.append(xdata_c)
ydata.append(ydata_c)
style.append(style_c)
### EXPLOSIVE MODELS ###
elif spec == 'exp':
print('explosive models')
# compatibility
co_toggle = co_toggle.lower()
isotope_list = [xiso[0],xiso[1],yiso[0],yiso[1]]
# cycle
cyc_no = self.se.cycles[len(self.se.cycles)-1-shift]
mco_data = self.get(cyc_no,['mass','C-12','C-13','O-16','O-17','O-18',xiso[0],xiso[1],yiso[0],yiso[1]])
mass = mco_data[0]
# if no custom toggle for enrichment
if cust_toggle == None:
c_elem = mco_data[1]+mco_data[2]
o_elem = mco_data[3]+mco_data[4]+mco_data[5]
co_ratio = c_elem / o_elem * (old_div(16., 12.))
co_comp_val = 1.
else:
co_data1 = self.get(cyc_no,cust_toggle[0])
co_data2 = self.get(cyc_no,cust_toggle[1])
for i in range(len(co_data1)):
if i == 0:
c_elem = co_data1[i]
else:
c_elem += co_data1[i]
for i in range(len(co_data2)):
if i == 0:
o_elem = co_data2[i]
else:
o_elem += co_data2[i]
# now we need to make the mass number of everything in here to make number ratios
massn1 = 0.
for i in range(len(co_data1)):
massn1 += sum(co_data1[i]) * float(cust_toggle[0][i].split('-')[1])
massn1 /= sum(c_elem)
massn2 = 0.
for i in range(len(co_data2)):
massn2 += sum(co_data2[i]) * float(cust_toggle[1][i].split('-')[1])
massn2 /= sum(o_elem)
co_ratio = c_elem / o_elem * (old_div(massn2, massn1)) # this has nothing to do with a C/O ratio anymore! but keep name
# comparator value
co_comp_val = float(cust_toggle[2])
# get the data now
isotope_profile = []
for i in range(6,10): # in mco_data
isotope_profile.append(mco_data[i])
# add radioactive isotopes (if given)
if addiso != None:
if type(addiso[0] == list): # then list of lists
for i in range(len(addiso)):
for j in range(4):
if isotope_list[j] == addiso[i][0]:
multiplicator_addiso = 1.
try:
multiplicator_addiso = float(addiso[i][1])
starter = 2
except ValueError:
starter = 1
for k in range(starter,len(addiso[i])):
isotope_profile[j] += array(self.get(cyc_no,addiso[i][k])) * multiplicator_addiso
else:
for j in range(4):
if isotope_list[j] == addiso[0]:
multiplicator_addiso = 1.
try:
multiplicator_addiso = float(addiso[1])
starter = 2
except ValueError:
starter = 1
for k in range(starter,len(addiso)):
isotope_profile[j] += array(self.get(cyc_no,addiso[k])) * multiplicator_addiso
# search for carbon / oxygen rich layers
crich = [] # alternating start stop values. if odd number, then surface is c-rich, but add stop number
dumb = True
if cust_toggle != None:
for i in range(len(co_ratio)):
if dumb:
if co_ratio[i] >= co_comp_val:
crich.append(i)
dumb = False
continue
else:
if co_ratio[i] < co_comp_val:
crich.append(i)
dumb = True
elif co_toggle != 'a':
for i in range(len(co_ratio)):
if co_toggle == 'c': # carbon rich
if dumb:
if co_ratio[i] >= 1:
crich.append(i)
dumb = False
continue
else:
if co_ratio[i] < 1:
crich.append(i)
dumb = True
elif co_toggle == 'o': # oxygen rich
if dumb:
if co_ratio[i] <= 1:
crich.append(i)
dumb = False
continue
else:
if co_ratio[i] > 1:
crich.append(i)
dumb = True
else:
print('Select your enrichment!')
return None
else: # take whole star
print('Using all profiles to mix')
crich.append(0)
crich.append(len(co_toggle))
if len(crich)%2 == 1:
crich.append(len(co_ratio)-1)
if len(crich) == 0:
print('Star did not get rich in C or O, depending on what you specified')
return None
# make isotope_profile into array and transpose
isotope_profile = array(isotope_profile).transpose()
# Ask user which zones to use
if co_toggle != 'a':
if cust_toggle != None:
print('\n\nI have found the following zones:\n')
elif co_toggle == 'c':
print('\n\nI have found the following carbon rich zones:\n')
elif co_toggle == 'o':
print('\n\nI have found the following oxygen rich zones:\n')
mass_tmp = zeros((len(crich)))
for i in range(len(crich)):
mass_tmp[i] = mass[crich[i]]
j = 1
for i in range(old_div(len(crich),2)):
print('Mass range (' + str(j) + '):\t' + str(mass_tmp[2*i]) + ' - ' + str(mass_tmp[2*i+1]))
j += 1
print('\n')
if zoneselect == 'all':
usr_zones = 0
elif zoneselect == 'top':
usr_zones = [j-1]
else:
usr_zones = eval(input('Please select which mass range you want to use. Select 0 for all zones. Otherwise give one zone or a list of zones separated by comma (e.g.: 1, 2, 4): '))
crich_dumb = crich
if usr_zones == 0:
print('I continue w/ all zones then')
elif type(usr_zones) == int: # only one zone selected
tmp = int(usr_zones)-1
crich = crich_dumb[2*tmp:2*tmp+2]
else:
crich = []
for i in range(len(usr_zones)):
tmp = int(usr_zones[i])-1
crich.append(crich_dumb[2*tmp])
crich.append(crich_dumb[2*tmp + 1])
# weight profiles according to weighting factor using the selected crich
# define isos_to_use variable for later
if weighting == None:
isos_to_use = []
for i in range(old_div(len(crich),2)):
isos_dumb = []
n = crich[2*i]
while n <= crich[2*i+1]:
isos_dumb.append(isotope_profile[n])
n += 1
isos_to_use.append(array(isos_dumb))
elif weighting.lower() == 'zone' or weighting.lower() == 'zones':
# make array w/ mass weigted isotope ratio (4) for all mass zones
isotope_profile_cweight = zeros((old_div(len(crich),2),4))
mass_tot = []
for i in range(len(isotope_profile_cweight)): # 2*i is start, 2*i+1 is stop value
if crich[2*i] == 0:
print('C- / O-rich in first shell (core).')
else:
dumb = crich[2*i + 1]
j = crich[2*i]
mass_tmp = 0
while j <= dumb:
mass_shell = mass[j] - mass[j-1]
mass_tmp += mass_shell
for k in range(4):
isotope_profile_cweight[i][k] += isotope_profile[j][k]*mass_shell
j += 1
mass_tot.append(mass_tmp)
for i in range(len(isotope_profile_cweight)):
for j in range(4):
isotope_profile_cweight[i][j] /= mass_tot[i]
isos_to_use = [array(isotope_profile_cweight)]
elif weighting.lower() == 'all': # average all zones by mass
isos_tmp = zeros((1, len(isotope_profile[0])))
for i in range(len(isotope_profile)-1): # neglect surface effects
for j in range(len(isos_tmp[0])):
mass_shell = mass[i+1] - mass[i]
isos_tmp[0][j] += isotope_profile[i][j]*mass_shell
# weight all
isos_tmp /= sum(mass)
isos_to_use = [isos_tmp]
# change to isotope numbers from mass!
for i in range(len(isos_to_use)):
for j in range(len(isos_to_use[i])):
for k in range(len(isos_to_use[i][j])):
# here we just divide 'iso_massf' output with the mass number
# this means that in the end, the isotope ratios in number space are correc
# but have to use ratios from here on for meaningful stuff
isos_to_use[i][j][k] /= float(isotope_list[k].split('-')[1])
# do the ratios and stuff
ratiox = []
ratioy = []
for i in range(len(isos_to_use)):
ratiox_dumb = []
ratioy_dumb = []
for j in range(len(isos_to_use[i])):
ratiox_dumb.append(old_div(isos_to_use[i][j][0], isos_to_use[i][j][1]))
ratioy_dumb.append(old_div(isos_to_use[i][j][2], isos_to_use[i][j][3]))
ratiox.append(array(ratiox_dumb))
ratioy.append(array(ratioy_dumb))
# make arrays for ratiox and ratioy
ratiox = array(ratiox)
ratioy = array(ratioy)
# make number ratio out of everything
for i in range(len(ratiox)):
for j in range(len(ratiox[i])):
ratiox[i][j] *= (old_div(float(xiso[1].split('-')[1]), float(xiso[0].split('-')[1])))
ratioy[i][j] *= (old_div(float(yiso[1].split('-')[1]), float(yiso[0].split('-')[1])))
if deltax:
ratiox_tmp = []
for i in range(len(ratiox)):
ratiox_tmp_tmp = []
for j in range(len(ratiox[i])):
ratiox_tmp_tmp.append((old_div(ratiox[i][j], xrat_solsys) - 1.) * 1000.)
ratiox_tmp.append(ratiox_tmp_tmp)
ratiox = array(ratiox_tmp)
if deltay:
ratioy_tmp = []
for i in range(len(ratioy)):
ratioy_tmp_tmp = []
for j in range(len(ratioy[i])):
ratioy_tmp_tmp.append((old_div(ratioy[i][j], yrat_solsys) - 1.) * 1000.)
ratioy_tmp.append(ratioy_tmp_tmp)
ratioy = array(ratioy_tmp)
# create massrange array if necessary
plt_massrange_lst = []
if plt_massrange==True: # use == True because otherwise the list enters here too... why?
for i in range(len(ratiox)):
plt_massrange_lst.append([ratiox[i][0], ratioy[i][0], mass[crich[2*i]]])
plt_massrange_lst.append([ratiox[i][len(ratiox[i])-1], ratioy[i][len(ratioy[i])-1], mass[crich[2*i+1]]]) # start: x-ratio, y-ratio, mass label
elif plt_massrange != False:
for plt_mr_val in plt_massrange:
mrng_i = 0
while plt_mr_val > mass[mrng_i] and mrng_i < len(mass):
mrng_i += 1
if mrng_i > 0:
mrng_i -= 1
mratx = old_div(isotope_profile[mrng_i][0],isotope_profile[mrng_i][1])
mraty = old_div(isotope_profile[mrng_i][2],isotope_profile[mrng_i][3])
if deltax:
mratx = (old_div(mratx, ratiox_solsys) - 1.) * 1000.
if deltay:
mraty = (old_div(mraty, ratioy_solsys) - 1.) * 1000.
plt_massrange_lst.append([mratx,mraty,mass[mrng_i]])
# make style and prepare for plotting here
xdata = ratiox
ydata = ratioy
style_tmp0= [plt_symb + plt_lt, plt_col, plt_col, '13.', '1', legend]
style_tmp = [plt_symb + plt_lt, plt_col, plt_col, '13.', '1', None]
style = []
for i in range(len(xdata)):
if i == 0:
style.append(style_tmp0)
else:
style.append(style_tmp)
else:
print('You did not specify a useful spec argument -> abort.')
return None
### PLOTS FROM GRAIN CLASS ###
if self._classTest() == 'grain':
print('Presolar grains are cool!')
xdata,xdataerr,ydata,ydataerr,style = self.plot_ratio_return(xiso,yiso,deltax,deltay)
legend=True
plt_sparse=1 # to avoid monkey input
plt_lw = 0.
### PLOT ###
# data is prepared now, make the plots. data must be in format
# [[data1],[data2],[data3],...]
# three arrays like this, for xdata, ydata, and style.
# style format: symbol, edge color, face color, symbol size, edge width, label
# this is then compatible with grain.py style definitions
# Size of font etc.
params = {'axes.labelsize': 20,
'text.fontsize': 14,
'legend.fontsize': 14,
'xtick.labelsize': 14,
'ytick.labelsize': 14}
pl.rcParams.update(params)
pl.figure(fign)
for i in range(len(xdata)):
if errbar:
if xdataerr != None or ydataerr != None:
pl.errorbar(xdata[i],ydata[i],xerr=xdataerr[i],yerr=ydataerr[i],marker=style[i][0],
color=style[i][1],linestyle='',lw=2,markevery=plt_sparse,alpha=alpha_dum)
pl.plot(xdata[i],ydata[i],style[i][0],c=style[i][1],mfc=style[i][2],ms=float(style[i][3]),
mew=float(style[i][4]),label=style[i][5],markevery=plt_sparse,linewidth=plt_lw,alpha=alpha_dum)
# plot text labels if necessary
if plt_massrange != False:
for mrng_ind in range(len(plt_massrange_lst)):
pl.text(plt_massrange_lst[mrng_ind][0], plt_massrange_lst[mrng_ind][1],
str(round(plt_massrange_lst[mrng_ind][2], 2)), ha='right', va='bottom', color=plt_col,fontsize=15.)
# log?
if logx and logy == False:
pl.semilogx()
elif logx == False and logy:
pl.semilogy()
elif logx and logy:
pl.loglog()
# legend
if legend != None and legloc != None:
pl.legend(loc=legloc)
# title and labels
if title != None:
pl.title(title)
if deltax:
pl.xlabel('$\delta$($^{' + xiso[0].split('-')[1] + '}$' +xiso[0].split('-')[0] + '/$^{' + xiso[1].split('-')[1] + '}$' +xiso[1].split('-')[0] + ')' )
else:
pl.xlabel('$^{' + xiso[0].split('-')[1] + '}$' +xiso[0].split('-')[0] + '/$^{' + xiso[1].split('-')[1] + '}$' +xiso[1].split('-')[0])
if deltay:
pl.ylabel('$\delta$($^{' + yiso[0].split('-')[1] + '}$' +yiso[0].split('-')[0] + '/$^{' + yiso[1].split('-')[1] + '}$' +yiso[1].split('-')[0] + ')' )
else:
pl.ylabel('$^{' + yiso[0].split('-')[1] + '}$' +yiso[0].split('-')[0] + '/$^{' + yiso[1].split('-')[1] + '}$' +yiso[1].split('-')[0])
# plot horizontal and vertical lines
print(xrat_solsys, yrat_solsys)
if deltay:
pl.axhline(0,color='k')
else:
pl.axhline(yrat_solsys,color='k')
if deltax:
pl.axvline(0,color='k')
else:
pl.axvline(xrat_solsys,color='k')
# borders of plot
pl.gcf().subplots_adjust(bottom=0.15)
pl.gcf().subplots_adjust(left=0.15)
# save and show
if figsave != False:
pl.savefig(figsave)
if plt_show:
pl.show()
|
def function[plot_isoratios, parameter[self, xiso, yiso, fign, spec, deltax, deltay, logx, logy, title, legend, legloc, errbar, dcycle, addiso, co_toggle, cust_toggle, shift, weighting, zoneselect, iniabufile, plt_sparse, plt_symb, plt_col, plt_lt, plt_lw, alpha_dum, plt_massrange, plt_show, figsave]]:
constant[
This is the new routine to plot isotopic ratios for ALL input. rt, June 2014
Parameters:
-----------
xiso : np.array
x data to plot. This can be an array or a list of arrays, depending on who calls the routine
yiso : np.array
y data to plot. This can be an array or a list of arrays, depending on who calls the routine
fign : integer, optional
Figure number
spec : string, optional
What specifications do you want to do when coming from nugridse models. Choose 'surf' for
surface models or 'exp' for explosions (out files)
deltax : boolean, optional
X axis in delta values?
deltay : boolean, optional
Y axis in delta values?
logx : boolean, optional
Logarithmic x axis?
logy : boolean, optional
Logarithmic y axis?
title : string, optional
Title for your plot
legend : string, optional
Legend for your model / grains. For grains the legend is automatically taken from the
grain class
legloc : string / integer, optional
Location of the legend, use matplotlib standard. Use None to not plot legend if plotted
by default, e.g., from grain class routine.
errbar : boolean, optional
Error bars on grain data?
dcycle : integer, optional
Difference between cycles to take for thermal pulse searching, if searching is
deactivated, dcycle describes how often cycles are sampled. The default is 500.
addiso : list, optional
For explosive models. Add an isotope. Format ['C-12', 0.5 ,'N-12'] to add N12
to C12 and multiply it with a factor of 0.5. Multiple isotopes can be added, the
factor is optional and does not have to be given. Isotopes can be added to other
isotopes as well, i.e., [['C-12', 'N-12'], ['C-13', 'N-13']]. The default None.
Notice that while addiso = [['N-14','O-14'],['N-14',fractionation,'C-14']]
works, other options like addiso = [['N-14','O-14',fractionation,'C-14']] or
addiso = [['N-14',1,'O-14',fractionation,'C-14']] are not working and give Typerror.
CAREFUL, that for the option addiso = [['N-14',fractionation,'C-14','O-14']] there is
no error message, but the fractionation is applied to both O14 and C14!
co_toggle : string, optional
For explosive models, choose what shells you want to look for! Select 'c' for
selecting zones with C/O >= 1. Select 'o' for C/O <= 1. If 'a' takes the
whole star. The defalut is 'c'. See cust_toggle (below) for an alternative!
cust_toggle : list, optional
This option is like co_toggle (and overwrites it when chosen) but lets you choose
your own comparison. For example you want to find zones that have a 10 fold
overabundance of Ti-46 and Ti-47 over O-16 and Zr-96, you can choose here
[['Ti-46','Ti-47'],['O-16','Zr-96'],100.] Assuming the first list is is x, the
second list y, and the comparator number is f, the statement only plots shells
in which the condition x/y>f is fulfilled. x and y are number sums of the chosen
isotopes, f has to be given as a float. This is only for explosive shells. Please
note, if this toggle is NOT None, then co_toggle is overwritten!
shift : integer, optional
For explosive models, how much do you want to shift the models back from the
last cycle? By default (0) the last cycle is taken.
weighting : string, optional
For explosive models. If None then, plot every profile separately. If 'zone'
then, average each zone. If 'all' then average all selected zones. The
default is None.
zoneselect: string, optional
For explosive models. Select if you want to plot 'all' zones or outer most zone.
Arguments are 'all' and 'top', respectively. Default is None, then the user is
asked to provide this information during the routine as input.
iniabufile : string, optional
Initial abundance file. Use absolute path for your file or filename to choose a
given file in USEEPP. Attention: You need a standard tree checked out from SVN
plt_sparse : integer, optional
Every how many datapoints is the plot done? Not used for some routines!
plt_symb : string, optional
Symbol for the plot. In case of grains, this is handled automatically.
plt_col : string / float, optional
Color for plotted curve. In case of grains, this is handled automatically.
plt_lt : string, optional
line type for plot.
plt_lw : float, optional
Line width for plot.
alpha_dum : trasparency to apply to grains data, in case of many data are plotted.
This may be allpied also for theoretical curves.
plt_massrange : boolean, optional
For explosive models. Plot mass of shell with first and last datapoint of
each zone. If list given, label those zones. The default is False.
plt_show : boolean, optional
Do you want to show the plot or not?
figsave : string, optional
Give path and filename here, if you want to save the figure.
]
from relative_module[None] import module[utils]
if compare[call[name[len], parameter[call[name[iniabufile].split, parameter[constant[/]]]]] equal[==] constant[1]] begin[:]
variable[scriptpathtmp] assign[=] name[__file__]
if compare[call[name[len], parameter[call[name[scriptpathtmp].split, parameter[constant[/]]]]] equal[==] constant[1]] begin[:]
variable[scriptpathtmp] assign[=] binary_operation[call[name[os].path.abspath, parameter[constant[.]]] + constant[/nugridse.py]]
variable[svnpathtmp] assign[=] constant[/]
for taget[name[i]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[call[name[scriptpathtmp].split, parameter[constant[/]]]]] - constant[3]]]]] begin[:]
if compare[call[call[name[scriptpathtmp].split, parameter[constant[/]]]][name[i]] not_equal[!=] constant[]] begin[:]
<ast.AugAssign object at 0x7da1b1b16950>
variable[iniabufile] assign[=] binary_operation[binary_operation[name[svnpathtmp] + constant[frames/mppnp/USEEPP/]] + name[iniabufile]]
variable[inut] assign[=] call[name[u].iniabu, parameter[name[iniabufile]]]
<ast.Try object at 0x7da1b1b16500>
<ast.Try object at 0x7da1b1b162c0>
<ast.AugAssign object at 0x7da1b1b16080>
<ast.AugAssign object at 0x7da1b1b15c00>
variable[xdataerr] assign[=] constant[None]
variable[ydataerr] assign[=] constant[None]
if compare[call[name[self]._classTest, parameter[]] equal[==] constant[se]] begin[:]
if compare[name[spec] equal[==] constant[None]] begin[:]
variable[spec] assign[=] call[name[str], parameter[call[name[eval], parameter[call[name[input], parameter[constant[Please specify 'surf' for surface models (AGB stars) or 'exp' for explosivemodels and zone finding, etc., and press enter: ]]]]]]]
if compare[name[spec] equal[==] constant[surf]] begin[:]
call[name[print], parameter[constant[Plotting AGB star stuff]]]
<ast.Tuple object at 0x7da1b1b150f0> assign[=] call[name[self]._tp_finder, parameter[name[dcycle]]]
variable[tp_pos_tmp] assign[=] list[[]]
variable[co_return_tmp] assign[=] list[[]]
call[name[tp_pos_tmp].append, parameter[constant[1]]]
call[name[co_return_tmp].append, parameter[call[name[co_return]][constant[0]]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[tp_pos]]]]]] begin[:]
call[name[tp_pos_tmp].append, parameter[call[name[tp_pos]][name[i]]]]
call[name[co_return_tmp].append, parameter[call[name[co_return]][name[i]]]]
variable[tp_pos] assign[=] name[tp_pos_tmp]
variable[co_return] assign[=] name[co_return_tmp]
variable[iso_alldata] assign[=] call[name[self].get, parameter[name[tp_pos], list[[<ast.Subscript object at 0x7da1b1ba8e50>, <ast.Subscript object at 0x7da1b1ba8dc0>, <ast.Subscript object at 0x7da1b1ba8d30>, <ast.Subscript object at 0x7da1b1ba8ca0>]]]]
variable[xrat] assign[=] call[name[np].zeros, parameter[call[name[len], parameter[name[iso_alldata]]]]]
variable[yrat] assign[=] call[name[np].zeros, parameter[call[name[len], parameter[name[iso_alldata]]]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[iso_alldata]]]]]] begin[:]
call[name[xrat]][name[i]] assign[=] call[name[old_div], parameter[call[call[name[iso_alldata]][name[i]]][constant[0]], call[call[name[iso_alldata]][name[i]]][constant[1]]]]
call[name[yrat]][name[i]] assign[=] call[name[old_div], parameter[call[call[name[iso_alldata]][name[i]]][constant[2]], call[call[name[iso_alldata]][name[i]]][constant[3]]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[xrat]]]]]] begin[:]
<ast.AugAssign object at 0x7da1b1ba8070>
<ast.AugAssign object at 0x7da20c7956c0>
if name[deltax] begin[:]
variable[xrat] assign[=] binary_operation[binary_operation[call[name[old_div], parameter[name[xrat], name[xrat_solsys]]] - constant[1.0]] * constant[1000.0]]
if name[deltay] begin[:]
variable[yrat] assign[=] binary_operation[binary_operation[call[name[old_div], parameter[name[yrat], name[yrat_solsys]]] - constant[1.0]] * constant[1000.0]]
variable[xdata_o] assign[=] list[[]]
variable[ydata_o] assign[=] list[[]]
variable[xdata_c] assign[=] list[[]]
variable[ydata_c] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[co_return]]]]]] begin[:]
if compare[call[name[co_return]][name[i]] less_or_equal[<=] constant[1.0]] begin[:]
call[name[xdata_o].append, parameter[call[name[xrat]][name[i]]]]
call[name[ydata_o].append, parameter[call[name[yrat]][name[i]]]]
if <ast.BoolOp object at 0x7da20c796b30> begin[:]
call[name[xdata_o].append, parameter[call[name[xdata_c]][constant[0]]]]
call[name[ydata_o].append, parameter[call[name[ydata_c]][constant[0]]]]
variable[style_o] assign[=] list[[<ast.BinOp object at 0x7da18f58c310>, <ast.Name object at 0x7da18f58ebf0>, <ast.Constant object at 0x7da18f58fd00>, <ast.Constant object at 0x7da18f58f190>, <ast.Constant object at 0x7da18f58cb50>, <ast.Constant object at 0x7da18f58e1a0>]]
variable[style_c] assign[=] list[[<ast.BinOp object at 0x7da18f58c340>, <ast.Name object at 0x7da18f58f970>, <ast.Name object at 0x7da18f58d360>, <ast.Constant object at 0x7da18f58f580>, <ast.Constant object at 0x7da18f58ecb0>, <ast.Name object at 0x7da18f58dc30>]]
variable[xdata] assign[=] list[[]]
variable[ydata] assign[=] list[[]]
variable[style] assign[=] list[[]]
if compare[name[xdata_o] not_equal[!=] list[[]]] begin[:]
call[name[xdata].append, parameter[name[xdata_o]]]
call[name[ydata].append, parameter[name[ydata_o]]]
call[name[style].append, parameter[name[style_o]]]
if compare[name[xdata_c] not_equal[!=] list[[]]] begin[:]
call[name[xdata].append, parameter[name[xdata_c]]]
call[name[ydata].append, parameter[name[ydata_c]]]
call[name[style].append, parameter[name[style_c]]]
if compare[call[name[self]._classTest, parameter[]] equal[==] constant[grain]] begin[:]
call[name[print], parameter[constant[Presolar grains are cool!]]]
<ast.Tuple object at 0x7da18dc99cf0> assign[=] call[name[self].plot_ratio_return, parameter[name[xiso], name[yiso], name[deltax], name[deltay]]]
variable[legend] assign[=] constant[True]
variable[plt_sparse] assign[=] constant[1]
variable[plt_lw] assign[=] constant[0.0]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da18dc9b040>, <ast.Constant object at 0x7da18dc98ac0>, <ast.Constant object at 0x7da18dc99d50>, <ast.Constant object at 0x7da18dc99810>, <ast.Constant object at 0x7da18dc98670>], [<ast.Constant object at 0x7da18dc9b610>, <ast.Constant object at 0x7da18dc99ae0>, <ast.Constant object at 0x7da18dc9a8c0>, <ast.Constant object at 0x7da18dc99f90>, <ast.Constant object at 0x7da18dc98550>]]
call[name[pl].rcParams.update, parameter[name[params]]]
call[name[pl].figure, parameter[name[fign]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[xdata]]]]]] begin[:]
if name[errbar] begin[:]
if <ast.BoolOp object at 0x7da18dc9bc70> begin[:]
call[name[pl].errorbar, parameter[call[name[xdata]][name[i]], call[name[ydata]][name[i]]]]
call[name[pl].plot, parameter[call[name[xdata]][name[i]], call[name[ydata]][name[i]], call[call[name[style]][name[i]]][constant[0]]]]
if compare[name[plt_massrange] not_equal[!=] constant[False]] begin[:]
for taget[name[mrng_ind]] in starred[call[name[range], parameter[call[name[len], parameter[name[plt_massrange_lst]]]]]] begin[:]
call[name[pl].text, parameter[call[call[name[plt_massrange_lst]][name[mrng_ind]]][constant[0]], call[call[name[plt_massrange_lst]][name[mrng_ind]]][constant[1]], call[name[str], parameter[call[name[round], parameter[call[call[name[plt_massrange_lst]][name[mrng_ind]]][constant[2]], constant[2]]]]]]]
if <ast.BoolOp object at 0x7da1b1992e60> begin[:]
call[name[pl].semilogx, parameter[]]
if <ast.BoolOp object at 0x7da20c6c6a70> begin[:]
call[name[pl].legend, parameter[]]
if compare[name[title] not_equal[!=] constant[None]] begin[:]
call[name[pl].title, parameter[name[title]]]
if name[deltax] begin[:]
call[name[pl].xlabel, parameter[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[$\delta$($^{] + call[call[call[name[xiso]][constant[0]].split, parameter[constant[-]]]][constant[1]]] + constant[}$]] + call[call[call[name[xiso]][constant[0]].split, parameter[constant[-]]]][constant[0]]] + constant[/$^{]] + call[call[call[name[xiso]][constant[1]].split, parameter[constant[-]]]][constant[1]]] + constant[}$]] + call[call[call[name[xiso]][constant[1]].split, parameter[constant[-]]]][constant[0]]] + constant[)]]]]
if name[deltay] begin[:]
call[name[pl].ylabel, parameter[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[$\delta$($^{] + call[call[call[name[yiso]][constant[0]].split, parameter[constant[-]]]][constant[1]]] + constant[}$]] + call[call[call[name[yiso]][constant[0]].split, parameter[constant[-]]]][constant[0]]] + constant[/$^{]] + call[call[call[name[yiso]][constant[1]].split, parameter[constant[-]]]][constant[1]]] + constant[}$]] + call[call[call[name[yiso]][constant[1]].split, parameter[constant[-]]]][constant[0]]] + constant[)]]]]
call[name[print], parameter[name[xrat_solsys], name[yrat_solsys]]]
if name[deltay] begin[:]
call[name[pl].axhline, parameter[constant[0]]]
if name[deltax] begin[:]
call[name[pl].axvline, parameter[constant[0]]]
call[call[name[pl].gcf, parameter[]].subplots_adjust, parameter[]]
call[call[name[pl].gcf, parameter[]].subplots_adjust, parameter[]]
if compare[name[figsave] not_equal[!=] constant[False]] begin[:]
call[name[pl].savefig, parameter[name[figsave]]]
if name[plt_show] begin[:]
call[name[pl].show, parameter[]]
|
keyword[def] identifier[plot_isoratios] ( identifier[self] , identifier[xiso] , identifier[yiso] , identifier[fign] = literal[int] , identifier[spec] = keyword[None] , identifier[deltax] = keyword[True] , identifier[deltay] = keyword[True] , identifier[logx] = keyword[False] , identifier[logy] = keyword[False] ,
identifier[title] = keyword[None] , identifier[legend] = keyword[None] , identifier[legloc] = literal[string] , identifier[errbar] = keyword[True] , identifier[dcycle] = literal[int] , identifier[addiso] = keyword[None] ,
identifier[co_toggle] = literal[string] , identifier[cust_toggle] = keyword[None] , identifier[shift] = literal[int] , identifier[weighting] = keyword[None] , identifier[zoneselect] = keyword[None] , identifier[iniabufile] = literal[string] ,
identifier[plt_sparse] = literal[int] , identifier[plt_symb] = literal[string] , identifier[plt_col] = literal[string] , identifier[plt_lt] = literal[string] , identifier[plt_lw] = literal[int] , identifier[alpha_dum] = literal[int] , identifier[plt_massrange] = keyword[False] , identifier[plt_show] = keyword[True] ,
identifier[figsave] = keyword[False] ):
literal[string]
keyword[from] . keyword[import] identifier[utils] keyword[as] identifier[u]
keyword[if] identifier[len] ( identifier[iniabufile] . identifier[split] ( literal[string] ))== literal[int] :
identifier[scriptpathtmp] = identifier[__file__]
keyword[if] identifier[len] ( identifier[scriptpathtmp] . identifier[split] ( literal[string] ))== literal[int] :
identifier[scriptpathtmp] = identifier[os] . identifier[path] . identifier[abspath] ( literal[string] )+ literal[string]
identifier[svnpathtmp] = literal[string]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[scriptpathtmp] . identifier[split] ( literal[string] ))- literal[int] ):
keyword[if] identifier[scriptpathtmp] . identifier[split] ( literal[string] )[ identifier[i] ]!= literal[string] :
identifier[svnpathtmp] += identifier[scriptpathtmp] . identifier[split] ( literal[string] )[ identifier[i] ]+ literal[string]
identifier[iniabufile] = identifier[svnpathtmp] + literal[string] + identifier[iniabufile]
identifier[inut] = identifier[u] . identifier[iniabu] ( identifier[iniabufile] )
keyword[try] :
identifier[xrat_solsys] = identifier[inut] . identifier[isoratio_init] ( identifier[xiso] )
keyword[except] identifier[KeyError] :
identifier[xrat_solsys] = literal[int]
keyword[try] :
identifier[yrat_solsys] = identifier[inut] . identifier[isoratio_init] ( identifier[yiso] )
keyword[except] identifier[KeyError] :
identifier[yrat_solsys] = literal[int]
identifier[xrat_solsys] *=( identifier[old_div] ( identifier[float] ( identifier[xiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]), identifier[float] ( identifier[xiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ])))
identifier[yrat_solsys] *=( identifier[old_div] ( identifier[float] ( identifier[yiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]), identifier[float] ( identifier[yiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ])))
identifier[xdataerr] = keyword[None]
identifier[ydataerr] = keyword[None]
keyword[if] identifier[self] . identifier[_classTest] ()== literal[string] :
keyword[if] identifier[spec] == keyword[None] :
identifier[spec] = identifier[str] ( identifier[eval] ( identifier[input] ( literal[string]
literal[string] )))
keyword[if] identifier[spec] == literal[string] :
identifier[print] ( literal[string] )
identifier[tp_pos] , identifier[co_return] = identifier[self] . identifier[_tp_finder] ( identifier[dcycle] )
identifier[tp_pos_tmp] =[]
identifier[co_return_tmp] =[]
identifier[tp_pos_tmp] . identifier[append] ( literal[int] )
identifier[co_return_tmp] . identifier[append] ( identifier[co_return] [ literal[int] ])
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[tp_pos] )):
identifier[tp_pos_tmp] . identifier[append] ( identifier[tp_pos] [ identifier[i] ])
identifier[co_return_tmp] . identifier[append] ( identifier[co_return] [ identifier[i] ])
identifier[tp_pos] = identifier[tp_pos_tmp]
identifier[co_return] = identifier[co_return_tmp]
identifier[iso_alldata] = identifier[self] . identifier[get] ( identifier[tp_pos] ,[ identifier[xiso] [ literal[int] ], identifier[xiso] [ literal[int] ], identifier[yiso] [ literal[int] ], identifier[yiso] [ literal[int] ]])
identifier[xrat] = identifier[np] . identifier[zeros] ( identifier[len] ( identifier[iso_alldata] ))
identifier[yrat] = identifier[np] . identifier[zeros] ( identifier[len] ( identifier[iso_alldata] ))
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[iso_alldata] )):
identifier[xrat] [ identifier[i] ]= identifier[old_div] ( identifier[iso_alldata] [ identifier[i] ][ literal[int] ], identifier[iso_alldata] [ identifier[i] ][ literal[int] ])
identifier[yrat] [ identifier[i] ]= identifier[old_div] ( identifier[iso_alldata] [ identifier[i] ][ literal[int] ], identifier[iso_alldata] [ identifier[i] ][ literal[int] ])
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[xrat] )):
identifier[xrat] [ identifier[i] ]*= identifier[old_div] ( identifier[float] ( identifier[xiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]), identifier[float] ( identifier[xiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]))
identifier[yrat] [ identifier[i] ]*= identifier[old_div] ( identifier[float] ( identifier[yiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]), identifier[float] ( identifier[yiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]))
keyword[if] identifier[deltax] :
identifier[xrat] =( identifier[old_div] ( identifier[xrat] , identifier[xrat_solsys] )- literal[int] )* literal[int]
keyword[if] identifier[deltay] :
identifier[yrat] =( identifier[old_div] ( identifier[yrat] , identifier[yrat_solsys] )- literal[int] )* literal[int]
identifier[xdata_o] =[]
identifier[ydata_o] =[]
identifier[xdata_c] =[]
identifier[ydata_c] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[co_return] )):
keyword[if] identifier[co_return] [ identifier[i] ]<= literal[int] :
identifier[xdata_o] . identifier[append] ( identifier[xrat] [ identifier[i] ])
identifier[ydata_o] . identifier[append] ( identifier[yrat] [ identifier[i] ])
keyword[else] :
identifier[xdata_c] . identifier[append] ( identifier[xrat] [ identifier[i] ])
identifier[ydata_c] . identifier[append] ( identifier[yrat] [ identifier[i] ])
keyword[if] identifier[xdata_o] !=[] keyword[and] identifier[xdata_c] !=[]:
identifier[xdata_o] . identifier[append] ( identifier[xdata_c] [ literal[int] ])
identifier[ydata_o] . identifier[append] ( identifier[ydata_c] [ literal[int] ])
identifier[style_o] =[ identifier[plt_symb] + literal[string] , identifier[plt_col] , literal[string] , literal[string] , literal[string] , keyword[None] ]
identifier[style_c] =[ identifier[plt_symb] + identifier[plt_lt] , identifier[plt_col] , identifier[plt_col] , literal[string] , literal[string] , identifier[legend] ]
identifier[xdata] =[]
identifier[ydata] =[]
identifier[style] =[]
keyword[if] identifier[xdata_o] !=[]:
identifier[xdata] . identifier[append] ( identifier[xdata_o] )
identifier[ydata] . identifier[append] ( identifier[ydata_o] )
identifier[style] . identifier[append] ( identifier[style_o] )
keyword[if] identifier[xdata_c] !=[]:
identifier[xdata] . identifier[append] ( identifier[xdata_c] )
identifier[ydata] . identifier[append] ( identifier[ydata_c] )
identifier[style] . identifier[append] ( identifier[style_c] )
keyword[elif] identifier[spec] == literal[string] :
identifier[print] ( literal[string] )
identifier[co_toggle] = identifier[co_toggle] . identifier[lower] ()
identifier[isotope_list] =[ identifier[xiso] [ literal[int] ], identifier[xiso] [ literal[int] ], identifier[yiso] [ literal[int] ], identifier[yiso] [ literal[int] ]]
identifier[cyc_no] = identifier[self] . identifier[se] . identifier[cycles] [ identifier[len] ( identifier[self] . identifier[se] . identifier[cycles] )- literal[int] - identifier[shift] ]
identifier[mco_data] = identifier[self] . identifier[get] ( identifier[cyc_no] ,[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , identifier[xiso] [ literal[int] ], identifier[xiso] [ literal[int] ], identifier[yiso] [ literal[int] ], identifier[yiso] [ literal[int] ]])
identifier[mass] = identifier[mco_data] [ literal[int] ]
keyword[if] identifier[cust_toggle] == keyword[None] :
identifier[c_elem] = identifier[mco_data] [ literal[int] ]+ identifier[mco_data] [ literal[int] ]
identifier[o_elem] = identifier[mco_data] [ literal[int] ]+ identifier[mco_data] [ literal[int] ]+ identifier[mco_data] [ literal[int] ]
identifier[co_ratio] = identifier[c_elem] / identifier[o_elem] *( identifier[old_div] ( literal[int] , literal[int] ))
identifier[co_comp_val] = literal[int]
keyword[else] :
identifier[co_data1] = identifier[self] . identifier[get] ( identifier[cyc_no] , identifier[cust_toggle] [ literal[int] ])
identifier[co_data2] = identifier[self] . identifier[get] ( identifier[cyc_no] , identifier[cust_toggle] [ literal[int] ])
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[co_data1] )):
keyword[if] identifier[i] == literal[int] :
identifier[c_elem] = identifier[co_data1] [ identifier[i] ]
keyword[else] :
identifier[c_elem] += identifier[co_data1] [ identifier[i] ]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[co_data2] )):
keyword[if] identifier[i] == literal[int] :
identifier[o_elem] = identifier[co_data2] [ identifier[i] ]
keyword[else] :
identifier[o_elem] += identifier[co_data2] [ identifier[i] ]
identifier[massn1] = literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[co_data1] )):
identifier[massn1] += identifier[sum] ( identifier[co_data1] [ identifier[i] ])* identifier[float] ( identifier[cust_toggle] [ literal[int] ][ identifier[i] ]. identifier[split] ( literal[string] )[ literal[int] ])
identifier[massn1] /= identifier[sum] ( identifier[c_elem] )
identifier[massn2] = literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[co_data2] )):
identifier[massn2] += identifier[sum] ( identifier[co_data2] [ identifier[i] ])* identifier[float] ( identifier[cust_toggle] [ literal[int] ][ identifier[i] ]. identifier[split] ( literal[string] )[ literal[int] ])
identifier[massn2] /= identifier[sum] ( identifier[o_elem] )
identifier[co_ratio] = identifier[c_elem] / identifier[o_elem] *( identifier[old_div] ( identifier[massn2] , identifier[massn1] ))
identifier[co_comp_val] = identifier[float] ( identifier[cust_toggle] [ literal[int] ])
identifier[isotope_profile] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , literal[int] ):
identifier[isotope_profile] . identifier[append] ( identifier[mco_data] [ identifier[i] ])
keyword[if] identifier[addiso] != keyword[None] :
keyword[if] identifier[type] ( identifier[addiso] [ literal[int] ]== identifier[list] ):
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[addiso] )):
keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] ):
keyword[if] identifier[isotope_list] [ identifier[j] ]== identifier[addiso] [ identifier[i] ][ literal[int] ]:
identifier[multiplicator_addiso] = literal[int]
keyword[try] :
identifier[multiplicator_addiso] = identifier[float] ( identifier[addiso] [ identifier[i] ][ literal[int] ])
identifier[starter] = literal[int]
keyword[except] identifier[ValueError] :
identifier[starter] = literal[int]
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[starter] , identifier[len] ( identifier[addiso] [ identifier[i] ])):
identifier[isotope_profile] [ identifier[j] ]+= identifier[array] ( identifier[self] . identifier[get] ( identifier[cyc_no] , identifier[addiso] [ identifier[i] ][ identifier[k] ]))* identifier[multiplicator_addiso]
keyword[else] :
keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] ):
keyword[if] identifier[isotope_list] [ identifier[j] ]== identifier[addiso] [ literal[int] ]:
identifier[multiplicator_addiso] = literal[int]
keyword[try] :
identifier[multiplicator_addiso] = identifier[float] ( identifier[addiso] [ literal[int] ])
identifier[starter] = literal[int]
keyword[except] identifier[ValueError] :
identifier[starter] = literal[int]
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[starter] , identifier[len] ( identifier[addiso] )):
identifier[isotope_profile] [ identifier[j] ]+= identifier[array] ( identifier[self] . identifier[get] ( identifier[cyc_no] , identifier[addiso] [ identifier[k] ]))* identifier[multiplicator_addiso]
identifier[crich] =[]
identifier[dumb] = keyword[True]
keyword[if] identifier[cust_toggle] != keyword[None] :
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[co_ratio] )):
keyword[if] identifier[dumb] :
keyword[if] identifier[co_ratio] [ identifier[i] ]>= identifier[co_comp_val] :
identifier[crich] . identifier[append] ( identifier[i] )
identifier[dumb] = keyword[False]
keyword[continue]
keyword[else] :
keyword[if] identifier[co_ratio] [ identifier[i] ]< identifier[co_comp_val] :
identifier[crich] . identifier[append] ( identifier[i] )
identifier[dumb] = keyword[True]
keyword[elif] identifier[co_toggle] != literal[string] :
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[co_ratio] )):
keyword[if] identifier[co_toggle] == literal[string] :
keyword[if] identifier[dumb] :
keyword[if] identifier[co_ratio] [ identifier[i] ]>= literal[int] :
identifier[crich] . identifier[append] ( identifier[i] )
identifier[dumb] = keyword[False]
keyword[continue]
keyword[else] :
keyword[if] identifier[co_ratio] [ identifier[i] ]< literal[int] :
identifier[crich] . identifier[append] ( identifier[i] )
identifier[dumb] = keyword[True]
keyword[elif] identifier[co_toggle] == literal[string] :
keyword[if] identifier[dumb] :
keyword[if] identifier[co_ratio] [ identifier[i] ]<= literal[int] :
identifier[crich] . identifier[append] ( identifier[i] )
identifier[dumb] = keyword[False]
keyword[continue]
keyword[else] :
keyword[if] identifier[co_ratio] [ identifier[i] ]> literal[int] :
identifier[crich] . identifier[append] ( identifier[i] )
identifier[dumb] = keyword[True]
keyword[else] :
identifier[print] ( literal[string] )
keyword[return] keyword[None]
keyword[else] :
identifier[print] ( literal[string] )
identifier[crich] . identifier[append] ( literal[int] )
identifier[crich] . identifier[append] ( identifier[len] ( identifier[co_toggle] ))
keyword[if] identifier[len] ( identifier[crich] )% literal[int] == literal[int] :
identifier[crich] . identifier[append] ( identifier[len] ( identifier[co_ratio] )- literal[int] )
keyword[if] identifier[len] ( identifier[crich] )== literal[int] :
identifier[print] ( literal[string] )
keyword[return] keyword[None]
identifier[isotope_profile] = identifier[array] ( identifier[isotope_profile] ). identifier[transpose] ()
keyword[if] identifier[co_toggle] != literal[string] :
keyword[if] identifier[cust_toggle] != keyword[None] :
identifier[print] ( literal[string] )
keyword[elif] identifier[co_toggle] == literal[string] :
identifier[print] ( literal[string] )
keyword[elif] identifier[co_toggle] == literal[string] :
identifier[print] ( literal[string] )
identifier[mass_tmp] = identifier[zeros] (( identifier[len] ( identifier[crich] )))
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[crich] )):
identifier[mass_tmp] [ identifier[i] ]= identifier[mass] [ identifier[crich] [ identifier[i] ]]
identifier[j] = literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[old_div] ( identifier[len] ( identifier[crich] ), literal[int] )):
identifier[print] ( literal[string] + identifier[str] ( identifier[j] )+ literal[string] + identifier[str] ( identifier[mass_tmp] [ literal[int] * identifier[i] ])+ literal[string] + identifier[str] ( identifier[mass_tmp] [ literal[int] * identifier[i] + literal[int] ]))
identifier[j] += literal[int]
identifier[print] ( literal[string] )
keyword[if] identifier[zoneselect] == literal[string] :
identifier[usr_zones] = literal[int]
keyword[elif] identifier[zoneselect] == literal[string] :
identifier[usr_zones] =[ identifier[j] - literal[int] ]
keyword[else] :
identifier[usr_zones] = identifier[eval] ( identifier[input] ( literal[string] ))
identifier[crich_dumb] = identifier[crich]
keyword[if] identifier[usr_zones] == literal[int] :
identifier[print] ( literal[string] )
keyword[elif] identifier[type] ( identifier[usr_zones] )== identifier[int] :
identifier[tmp] = identifier[int] ( identifier[usr_zones] )- literal[int]
identifier[crich] = identifier[crich_dumb] [ literal[int] * identifier[tmp] : literal[int] * identifier[tmp] + literal[int] ]
keyword[else] :
identifier[crich] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[usr_zones] )):
identifier[tmp] = identifier[int] ( identifier[usr_zones] [ identifier[i] ])- literal[int]
identifier[crich] . identifier[append] ( identifier[crich_dumb] [ literal[int] * identifier[tmp] ])
identifier[crich] . identifier[append] ( identifier[crich_dumb] [ literal[int] * identifier[tmp] + literal[int] ])
keyword[if] identifier[weighting] == keyword[None] :
identifier[isos_to_use] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[old_div] ( identifier[len] ( identifier[crich] ), literal[int] )):
identifier[isos_dumb] =[]
identifier[n] = identifier[crich] [ literal[int] * identifier[i] ]
keyword[while] identifier[n] <= identifier[crich] [ literal[int] * identifier[i] + literal[int] ]:
identifier[isos_dumb] . identifier[append] ( identifier[isotope_profile] [ identifier[n] ])
identifier[n] += literal[int]
identifier[isos_to_use] . identifier[append] ( identifier[array] ( identifier[isos_dumb] ))
keyword[elif] identifier[weighting] . identifier[lower] ()== literal[string] keyword[or] identifier[weighting] . identifier[lower] ()== literal[string] :
identifier[isotope_profile_cweight] = identifier[zeros] (( identifier[old_div] ( identifier[len] ( identifier[crich] ), literal[int] ), literal[int] ))
identifier[mass_tot] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[isotope_profile_cweight] )):
keyword[if] identifier[crich] [ literal[int] * identifier[i] ]== literal[int] :
identifier[print] ( literal[string] )
keyword[else] :
identifier[dumb] = identifier[crich] [ literal[int] * identifier[i] + literal[int] ]
identifier[j] = identifier[crich] [ literal[int] * identifier[i] ]
identifier[mass_tmp] = literal[int]
keyword[while] identifier[j] <= identifier[dumb] :
identifier[mass_shell] = identifier[mass] [ identifier[j] ]- identifier[mass] [ identifier[j] - literal[int] ]
identifier[mass_tmp] += identifier[mass_shell]
keyword[for] identifier[k] keyword[in] identifier[range] ( literal[int] ):
identifier[isotope_profile_cweight] [ identifier[i] ][ identifier[k] ]+= identifier[isotope_profile] [ identifier[j] ][ identifier[k] ]* identifier[mass_shell]
identifier[j] += literal[int]
identifier[mass_tot] . identifier[append] ( identifier[mass_tmp] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[isotope_profile_cweight] )):
keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] ):
identifier[isotope_profile_cweight] [ identifier[i] ][ identifier[j] ]/= identifier[mass_tot] [ identifier[i] ]
identifier[isos_to_use] =[ identifier[array] ( identifier[isotope_profile_cweight] )]
keyword[elif] identifier[weighting] . identifier[lower] ()== literal[string] :
identifier[isos_tmp] = identifier[zeros] (( literal[int] , identifier[len] ( identifier[isotope_profile] [ literal[int] ])))
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[isotope_profile] )- literal[int] ):
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[isos_tmp] [ literal[int] ])):
identifier[mass_shell] = identifier[mass] [ identifier[i] + literal[int] ]- identifier[mass] [ identifier[i] ]
identifier[isos_tmp] [ literal[int] ][ identifier[j] ]+= identifier[isotope_profile] [ identifier[i] ][ identifier[j] ]* identifier[mass_shell]
identifier[isos_tmp] /= identifier[sum] ( identifier[mass] )
identifier[isos_to_use] =[ identifier[isos_tmp] ]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[isos_to_use] )):
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[isos_to_use] [ identifier[i] ])):
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[len] ( identifier[isos_to_use] [ identifier[i] ][ identifier[j] ])):
identifier[isos_to_use] [ identifier[i] ][ identifier[j] ][ identifier[k] ]/= identifier[float] ( identifier[isotope_list] [ identifier[k] ]. identifier[split] ( literal[string] )[ literal[int] ])
identifier[ratiox] =[]
identifier[ratioy] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[isos_to_use] )):
identifier[ratiox_dumb] =[]
identifier[ratioy_dumb] =[]
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[isos_to_use] [ identifier[i] ])):
identifier[ratiox_dumb] . identifier[append] ( identifier[old_div] ( identifier[isos_to_use] [ identifier[i] ][ identifier[j] ][ literal[int] ], identifier[isos_to_use] [ identifier[i] ][ identifier[j] ][ literal[int] ]))
identifier[ratioy_dumb] . identifier[append] ( identifier[old_div] ( identifier[isos_to_use] [ identifier[i] ][ identifier[j] ][ literal[int] ], identifier[isos_to_use] [ identifier[i] ][ identifier[j] ][ literal[int] ]))
identifier[ratiox] . identifier[append] ( identifier[array] ( identifier[ratiox_dumb] ))
identifier[ratioy] . identifier[append] ( identifier[array] ( identifier[ratioy_dumb] ))
identifier[ratiox] = identifier[array] ( identifier[ratiox] )
identifier[ratioy] = identifier[array] ( identifier[ratioy] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[ratiox] )):
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[ratiox] [ identifier[i] ])):
identifier[ratiox] [ identifier[i] ][ identifier[j] ]*=( identifier[old_div] ( identifier[float] ( identifier[xiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]), identifier[float] ( identifier[xiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ])))
identifier[ratioy] [ identifier[i] ][ identifier[j] ]*=( identifier[old_div] ( identifier[float] ( identifier[yiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]), identifier[float] ( identifier[yiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ])))
keyword[if] identifier[deltax] :
identifier[ratiox_tmp] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[ratiox] )):
identifier[ratiox_tmp_tmp] =[]
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[ratiox] [ identifier[i] ])):
identifier[ratiox_tmp_tmp] . identifier[append] (( identifier[old_div] ( identifier[ratiox] [ identifier[i] ][ identifier[j] ], identifier[xrat_solsys] )- literal[int] )* literal[int] )
identifier[ratiox_tmp] . identifier[append] ( identifier[ratiox_tmp_tmp] )
identifier[ratiox] = identifier[array] ( identifier[ratiox_tmp] )
keyword[if] identifier[deltay] :
identifier[ratioy_tmp] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[ratioy] )):
identifier[ratioy_tmp_tmp] =[]
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[ratioy] [ identifier[i] ])):
identifier[ratioy_tmp_tmp] . identifier[append] (( identifier[old_div] ( identifier[ratioy] [ identifier[i] ][ identifier[j] ], identifier[yrat_solsys] )- literal[int] )* literal[int] )
identifier[ratioy_tmp] . identifier[append] ( identifier[ratioy_tmp_tmp] )
identifier[ratioy] = identifier[array] ( identifier[ratioy_tmp] )
identifier[plt_massrange_lst] =[]
keyword[if] identifier[plt_massrange] == keyword[True] :
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[ratiox] )):
identifier[plt_massrange_lst] . identifier[append] ([ identifier[ratiox] [ identifier[i] ][ literal[int] ], identifier[ratioy] [ identifier[i] ][ literal[int] ], identifier[mass] [ identifier[crich] [ literal[int] * identifier[i] ]]])
identifier[plt_massrange_lst] . identifier[append] ([ identifier[ratiox] [ identifier[i] ][ identifier[len] ( identifier[ratiox] [ identifier[i] ])- literal[int] ], identifier[ratioy] [ identifier[i] ][ identifier[len] ( identifier[ratioy] [ identifier[i] ])- literal[int] ], identifier[mass] [ identifier[crich] [ literal[int] * identifier[i] + literal[int] ]]])
keyword[elif] identifier[plt_massrange] != keyword[False] :
keyword[for] identifier[plt_mr_val] keyword[in] identifier[plt_massrange] :
identifier[mrng_i] = literal[int]
keyword[while] identifier[plt_mr_val] > identifier[mass] [ identifier[mrng_i] ] keyword[and] identifier[mrng_i] < identifier[len] ( identifier[mass] ):
identifier[mrng_i] += literal[int]
keyword[if] identifier[mrng_i] > literal[int] :
identifier[mrng_i] -= literal[int]
identifier[mratx] = identifier[old_div] ( identifier[isotope_profile] [ identifier[mrng_i] ][ literal[int] ], identifier[isotope_profile] [ identifier[mrng_i] ][ literal[int] ])
identifier[mraty] = identifier[old_div] ( identifier[isotope_profile] [ identifier[mrng_i] ][ literal[int] ], identifier[isotope_profile] [ identifier[mrng_i] ][ literal[int] ])
keyword[if] identifier[deltax] :
identifier[mratx] =( identifier[old_div] ( identifier[mratx] , identifier[ratiox_solsys] )- literal[int] )* literal[int]
keyword[if] identifier[deltay] :
identifier[mraty] =( identifier[old_div] ( identifier[mraty] , identifier[ratioy_solsys] )- literal[int] )* literal[int]
identifier[plt_massrange_lst] . identifier[append] ([ identifier[mratx] , identifier[mraty] , identifier[mass] [ identifier[mrng_i] ]])
identifier[xdata] = identifier[ratiox]
identifier[ydata] = identifier[ratioy]
identifier[style_tmp0] =[ identifier[plt_symb] + identifier[plt_lt] , identifier[plt_col] , identifier[plt_col] , literal[string] , literal[string] , identifier[legend] ]
identifier[style_tmp] =[ identifier[plt_symb] + identifier[plt_lt] , identifier[plt_col] , identifier[plt_col] , literal[string] , literal[string] , keyword[None] ]
identifier[style] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[xdata] )):
keyword[if] identifier[i] == literal[int] :
identifier[style] . identifier[append] ( identifier[style_tmp0] )
keyword[else] :
identifier[style] . identifier[append] ( identifier[style_tmp] )
keyword[else] :
identifier[print] ( literal[string] )
keyword[return] keyword[None]
keyword[if] identifier[self] . identifier[_classTest] ()== literal[string] :
identifier[print] ( literal[string] )
identifier[xdata] , identifier[xdataerr] , identifier[ydata] , identifier[ydataerr] , identifier[style] = identifier[self] . identifier[plot_ratio_return] ( identifier[xiso] , identifier[yiso] , identifier[deltax] , identifier[deltay] )
identifier[legend] = keyword[True]
identifier[plt_sparse] = literal[int]
identifier[plt_lw] = literal[int]
identifier[params] ={ literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] }
identifier[pl] . identifier[rcParams] . identifier[update] ( identifier[params] )
identifier[pl] . identifier[figure] ( identifier[fign] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[xdata] )):
keyword[if] identifier[errbar] :
keyword[if] identifier[xdataerr] != keyword[None] keyword[or] identifier[ydataerr] != keyword[None] :
identifier[pl] . identifier[errorbar] ( identifier[xdata] [ identifier[i] ], identifier[ydata] [ identifier[i] ], identifier[xerr] = identifier[xdataerr] [ identifier[i] ], identifier[yerr] = identifier[ydataerr] [ identifier[i] ], identifier[marker] = identifier[style] [ identifier[i] ][ literal[int] ],
identifier[color] = identifier[style] [ identifier[i] ][ literal[int] ], identifier[linestyle] = literal[string] , identifier[lw] = literal[int] , identifier[markevery] = identifier[plt_sparse] , identifier[alpha] = identifier[alpha_dum] )
identifier[pl] . identifier[plot] ( identifier[xdata] [ identifier[i] ], identifier[ydata] [ identifier[i] ], identifier[style] [ identifier[i] ][ literal[int] ], identifier[c] = identifier[style] [ identifier[i] ][ literal[int] ], identifier[mfc] = identifier[style] [ identifier[i] ][ literal[int] ], identifier[ms] = identifier[float] ( identifier[style] [ identifier[i] ][ literal[int] ]),
identifier[mew] = identifier[float] ( identifier[style] [ identifier[i] ][ literal[int] ]), identifier[label] = identifier[style] [ identifier[i] ][ literal[int] ], identifier[markevery] = identifier[plt_sparse] , identifier[linewidth] = identifier[plt_lw] , identifier[alpha] = identifier[alpha_dum] )
keyword[if] identifier[plt_massrange] != keyword[False] :
keyword[for] identifier[mrng_ind] keyword[in] identifier[range] ( identifier[len] ( identifier[plt_massrange_lst] )):
identifier[pl] . identifier[text] ( identifier[plt_massrange_lst] [ identifier[mrng_ind] ][ literal[int] ], identifier[plt_massrange_lst] [ identifier[mrng_ind] ][ literal[int] ],
identifier[str] ( identifier[round] ( identifier[plt_massrange_lst] [ identifier[mrng_ind] ][ literal[int] ], literal[int] )), identifier[ha] = literal[string] , identifier[va] = literal[string] , identifier[color] = identifier[plt_col] , identifier[fontsize] = literal[int] )
keyword[if] identifier[logx] keyword[and] identifier[logy] == keyword[False] :
identifier[pl] . identifier[semilogx] ()
keyword[elif] identifier[logx] == keyword[False] keyword[and] identifier[logy] :
identifier[pl] . identifier[semilogy] ()
keyword[elif] identifier[logx] keyword[and] identifier[logy] :
identifier[pl] . identifier[loglog] ()
keyword[if] identifier[legend] != keyword[None] keyword[and] identifier[legloc] != keyword[None] :
identifier[pl] . identifier[legend] ( identifier[loc] = identifier[legloc] )
keyword[if] identifier[title] != keyword[None] :
identifier[pl] . identifier[title] ( identifier[title] )
keyword[if] identifier[deltax] :
identifier[pl] . identifier[xlabel] ( literal[string] + identifier[xiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]+ literal[string] + identifier[xiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]+ literal[string] + identifier[xiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]+ literal[string] + identifier[xiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]+ literal[string] )
keyword[else] :
identifier[pl] . identifier[xlabel] ( literal[string] + identifier[xiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]+ literal[string] + identifier[xiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]+ literal[string] + identifier[xiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]+ literal[string] + identifier[xiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ])
keyword[if] identifier[deltay] :
identifier[pl] . identifier[ylabel] ( literal[string] + identifier[yiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]+ literal[string] + identifier[yiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]+ literal[string] + identifier[yiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]+ literal[string] + identifier[yiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]+ literal[string] )
keyword[else] :
identifier[pl] . identifier[ylabel] ( literal[string] + identifier[yiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]+ literal[string] + identifier[yiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]+ literal[string] + identifier[yiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]+ literal[string] + identifier[yiso] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ])
identifier[print] ( identifier[xrat_solsys] , identifier[yrat_solsys] )
keyword[if] identifier[deltay] :
identifier[pl] . identifier[axhline] ( literal[int] , identifier[color] = literal[string] )
keyword[else] :
identifier[pl] . identifier[axhline] ( identifier[yrat_solsys] , identifier[color] = literal[string] )
keyword[if] identifier[deltax] :
identifier[pl] . identifier[axvline] ( literal[int] , identifier[color] = literal[string] )
keyword[else] :
identifier[pl] . identifier[axvline] ( identifier[xrat_solsys] , identifier[color] = literal[string] )
identifier[pl] . identifier[gcf] (). identifier[subplots_adjust] ( identifier[bottom] = literal[int] )
identifier[pl] . identifier[gcf] (). identifier[subplots_adjust] ( identifier[left] = literal[int] )
keyword[if] identifier[figsave] != keyword[False] :
identifier[pl] . identifier[savefig] ( identifier[figsave] )
keyword[if] identifier[plt_show] :
identifier[pl] . identifier[show] ()
|
def plot_isoratios(self, xiso, yiso, fign=1, spec=None, deltax=True, deltay=True, logx=False, logy=False, title=None, legend=None, legloc='lower right', errbar=True, dcycle=500, addiso=None, co_toggle='c', cust_toggle=None, shift=0, weighting=None, zoneselect=None, iniabufile='iniab2.0E-02GN93.ppn', plt_sparse=1, plt_symb='o', plt_col='k', plt_lt='-', plt_lw=1.0, alpha_dum=1.0, plt_massrange=False, plt_show=True, figsave=False):
"""
This is the new routine to plot isotopic ratios for ALL input. rt, June 2014
Parameters:
-----------
xiso : np.array
x data to plot. This can be an array or a list of arrays, depending on who calls the routine
yiso : np.array
y data to plot. This can be an array or a list of arrays, depending on who calls the routine
fign : integer, optional
Figure number
spec : string, optional
What specifications do you want to do when coming from nugridse models. Choose 'surf' for
surface models or 'exp' for explosions (out files)
deltax : boolean, optional
X axis in delta values?
deltay : boolean, optional
Y axis in delta values?
logx : boolean, optional
Logarithmic x axis?
logy : boolean, optional
Logarithmic y axis?
title : string, optional
Title for your plot
legend : string, optional
Legend for your model / grains. For grains the legend is automatically taken from the
grain class
legloc : string / integer, optional
Location of the legend, use matplotlib standard. Use None to not plot legend if plotted
by default, e.g., from grain class routine.
errbar : boolean, optional
Error bars on grain data?
dcycle : integer, optional
Difference between cycles to take for thermal pulse searching, if searching is
deactivated, dcycle describes how often cycles are sampled. The default is 500.
addiso : list, optional
For explosive models. Add an isotope. Format ['C-12', 0.5 ,'N-12'] to add N12
to C12 and multiply it with a factor of 0.5. Multiple isotopes can be added, the
factor is optional and does not have to be given. Isotopes can be added to other
isotopes as well, i.e., [['C-12', 'N-12'], ['C-13', 'N-13']]. The default None.
Notice that while addiso = [['N-14','O-14'],['N-14',fractionation,'C-14']]
works, other options like addiso = [['N-14','O-14',fractionation,'C-14']] or
addiso = [['N-14',1,'O-14',fractionation,'C-14']] are not working and give Typerror.
CAREFUL, that for the option addiso = [['N-14',fractionation,'C-14','O-14']] there is
no error message, but the fractionation is applied to both O14 and C14!
co_toggle : string, optional
For explosive models, choose what shells you want to look for! Select 'c' for
selecting zones with C/O >= 1. Select 'o' for C/O <= 1. If 'a' takes the
whole star. The defalut is 'c'. See cust_toggle (below) for an alternative!
cust_toggle : list, optional
This option is like co_toggle (and overwrites it when chosen) but lets you choose
your own comparison. For example you want to find zones that have a 10 fold
overabundance of Ti-46 and Ti-47 over O-16 and Zr-96, you can choose here
[['Ti-46','Ti-47'],['O-16','Zr-96'],100.] Assuming the first list is is x, the
second list y, and the comparator number is f, the statement only plots shells
in which the condition x/y>f is fulfilled. x and y are number sums of the chosen
isotopes, f has to be given as a float. This is only for explosive shells. Please
note, if this toggle is NOT None, then co_toggle is overwritten!
shift : integer, optional
For explosive models, how much do you want to shift the models back from the
last cycle? By default (0) the last cycle is taken.
weighting : string, optional
For explosive models. If None then, plot every profile separately. If 'zone'
then, average each zone. If 'all' then average all selected zones. The
default is None.
zoneselect: string, optional
For explosive models. Select if you want to plot 'all' zones or outer most zone.
Arguments are 'all' and 'top', respectively. Default is None, then the user is
asked to provide this information during the routine as input.
iniabufile : string, optional
Initial abundance file. Use absolute path for your file or filename to choose a
given file in USEEPP. Attention: You need a standard tree checked out from SVN
plt_sparse : integer, optional
Every how many datapoints is the plot done? Not used for some routines!
plt_symb : string, optional
Symbol for the plot. In case of grains, this is handled automatically.
plt_col : string / float, optional
Color for plotted curve. In case of grains, this is handled automatically.
plt_lt : string, optional
line type for plot.
plt_lw : float, optional
Line width for plot.
alpha_dum : trasparency to apply to grains data, in case of many data are plotted.
This may be allpied also for theoretical curves.
plt_massrange : boolean, optional
For explosive models. Plot mass of shell with first and last datapoint of
each zone. If list given, label those zones. The default is False.
plt_show : boolean, optional
Do you want to show the plot or not?
figsave : string, optional
Give path and filename here, if you want to save the figure.
"""
from . import utils as u
### WORK ON PATH ###
# define svn path form path where script runs, depending on standard input or not
if len(iniabufile.split('/')) == 1: # means not an absolute path!
scriptpathtmp = __file__
if len(scriptpathtmp.split('/')) == 1: # in folder where nugridse is
scriptpathtmp = os.path.abspath('.') + '/nugridse.py' # to get the current dir # depends on [control=['if'], data=[]]
svnpathtmp = '/'
for i in range(len(scriptpathtmp.split('/')) - 3): # -3 to go to folders up!
if scriptpathtmp.split('/')[i] != '':
svnpathtmp += scriptpathtmp.split('/')[i] + '/' # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
iniabufile = svnpathtmp + 'frames/mppnp/USEEPP/' + iniabufile # make absolute path for iniabufile # depends on [control=['if'], data=[]]
### get solar system ratios for the isotopes that are specified in the input file ###
inut = u.iniabu(iniabufile)
try:
xrat_solsys = inut.isoratio_init(xiso) # depends on [control=['try'], data=[]]
except KeyError: # if isotope not available, e.g., if plotting Ti-44 / Ti-48 ratio
xrat_solsys = 0.0 # depends on [control=['except'], data=[]]
try:
yrat_solsys = inut.isoratio_init(yiso) # depends on [control=['try'], data=[]]
except KeyError:
yrat_solsys = 0.0 # depends on [control=['except'], data=[]]
# number ratio for solar system ratio
xrat_solsys *= old_div(float(xiso[1].split('-')[1]), float(xiso[0].split('-')[1]))
yrat_solsys *= old_div(float(yiso[1].split('-')[1]), float(yiso[0].split('-')[1]))
# initialize xdataerr and ydataerr as None
xdataerr = None
ydataerr = None
### DO PLOTS FROM NUGRIDSE CLASS ###
if self._classTest() == 'se':
if spec == None:
spec = str(eval(input("Please specify 'surf' for surface models (AGB stars) or 'exp' for explosivemodels and zone finding, etc., and press enter: "))) # depends on [control=['if'], data=['spec']]
### SURFACE MODELS - PLOT AGB STAR STUFF ###
if spec == 'surf':
print('Plotting AGB star stuff')
# read in thermal pulse position and co ratio
(tp_pos, co_return) = self._tp_finder(dcycle)
tp_pos_tmp = []
co_return_tmp = []
tp_pos_tmp.append(1)
co_return_tmp.append(co_return[0])
for i in range(len(tp_pos)):
tp_pos_tmp.append(tp_pos[i])
co_return_tmp.append(co_return[i]) # depends on [control=['for'], data=['i']]
tp_pos = tp_pos_tmp
co_return = co_return_tmp
# read in data
iso_alldata = self.get(tp_pos, [xiso[0], xiso[1], yiso[0], yiso[1]])
xrat = np.zeros(len(iso_alldata))
yrat = np.zeros(len(iso_alldata))
for i in range(len(iso_alldata)):
xrat[i] = old_div(iso_alldata[i][0], iso_alldata[i][1])
yrat[i] = old_div(iso_alldata[i][2], iso_alldata[i][3]) # depends on [control=['for'], data=['i']]
# make number ratios
for i in range(len(xrat)):
xrat[i] *= old_div(float(xiso[1].split('-')[1]), float(xiso[0].split('-')[1]))
yrat[i] *= old_div(float(yiso[1].split('-')[1]), float(yiso[0].split('-')[1])) # depends on [control=['for'], data=['i']]
# if delta values are requested, need to calculate those now
if deltax:
xrat = (old_div(xrat, xrat_solsys) - 1.0) * 1000.0 # depends on [control=['if'], data=[]]
if deltay:
yrat = (old_div(yrat, yrat_solsys) - 1.0) * 1000.0 # depends on [control=['if'], data=[]]
# now we might have o and c rich zones. prepare stuff for plotting
xdata_o = []
ydata_o = []
xdata_c = []
ydata_c = []
for i in range(len(co_return)):
if co_return[i] <= 1.0:
xdata_o.append(xrat[i])
ydata_o.append(yrat[i]) # depends on [control=['if'], data=[]]
else:
xdata_c.append(xrat[i])
ydata_c.append(yrat[i]) # depends on [control=['for'], data=['i']]
if xdata_o != [] and xdata_c != []:
xdata_o.append(xdata_c[0])
ydata_o.append(ydata_c[0]) # depends on [control=['if'], data=[]]
# now make the styles
style_o = [plt_symb + '--', plt_col, '1.', '4', '2', None]
style_c = [plt_symb + plt_lt, plt_col, plt_col, '7.', '1', legend]
# now make data for plotting
xdata = []
ydata = []
style = []
if xdata_o != []:
xdata.append(xdata_o)
ydata.append(ydata_o)
style.append(style_o) # depends on [control=['if'], data=['xdata_o']]
if xdata_c != []:
xdata.append(xdata_c)
ydata.append(ydata_c)
style.append(style_c) # depends on [control=['if'], data=['xdata_c']] # depends on [control=['if'], data=[]]
### EXPLOSIVE MODELS ###
elif spec == 'exp':
print('explosive models')
# compatibility
co_toggle = co_toggle.lower()
isotope_list = [xiso[0], xiso[1], yiso[0], yiso[1]]
# cycle
cyc_no = self.se.cycles[len(self.se.cycles) - 1 - shift]
mco_data = self.get(cyc_no, ['mass', 'C-12', 'C-13', 'O-16', 'O-17', 'O-18', xiso[0], xiso[1], yiso[0], yiso[1]])
mass = mco_data[0]
# if no custom toggle for enrichment
if cust_toggle == None:
c_elem = mco_data[1] + mco_data[2]
o_elem = mco_data[3] + mco_data[4] + mco_data[5]
co_ratio = c_elem / o_elem * old_div(16.0, 12.0)
co_comp_val = 1.0 # depends on [control=['if'], data=[]]
else:
co_data1 = self.get(cyc_no, cust_toggle[0])
co_data2 = self.get(cyc_no, cust_toggle[1])
for i in range(len(co_data1)):
if i == 0:
c_elem = co_data1[i] # depends on [control=['if'], data=['i']]
else:
c_elem += co_data1[i] # depends on [control=['for'], data=['i']]
for i in range(len(co_data2)):
if i == 0:
o_elem = co_data2[i] # depends on [control=['if'], data=['i']]
else:
o_elem += co_data2[i] # depends on [control=['for'], data=['i']]
# now we need to make the mass number of everything in here to make number ratios
massn1 = 0.0
for i in range(len(co_data1)):
massn1 += sum(co_data1[i]) * float(cust_toggle[0][i].split('-')[1]) # depends on [control=['for'], data=['i']]
massn1 /= sum(c_elem)
massn2 = 0.0
for i in range(len(co_data2)):
massn2 += sum(co_data2[i]) * float(cust_toggle[1][i].split('-')[1]) # depends on [control=['for'], data=['i']]
massn2 /= sum(o_elem)
co_ratio = c_elem / o_elem * old_div(massn2, massn1) # this has nothing to do with a C/O ratio anymore! but keep name
# comparator value
co_comp_val = float(cust_toggle[2])
# get the data now
isotope_profile = []
for i in range(6, 10): # in mco_data
isotope_profile.append(mco_data[i]) # depends on [control=['for'], data=['i']]
# add radioactive isotopes (if given)
if addiso != None:
if type(addiso[0] == list): # then list of lists
for i in range(len(addiso)):
for j in range(4):
if isotope_list[j] == addiso[i][0]:
multiplicator_addiso = 1.0
try:
multiplicator_addiso = float(addiso[i][1])
starter = 2 # depends on [control=['try'], data=[]]
except ValueError:
starter = 1 # depends on [control=['except'], data=[]]
for k in range(starter, len(addiso[i])):
isotope_profile[j] += array(self.get(cyc_no, addiso[i][k])) * multiplicator_addiso # depends on [control=['for'], data=['k']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
else:
for j in range(4):
if isotope_list[j] == addiso[0]:
multiplicator_addiso = 1.0
try:
multiplicator_addiso = float(addiso[1])
starter = 2 # depends on [control=['try'], data=[]]
except ValueError:
starter = 1 # depends on [control=['except'], data=[]]
for k in range(starter, len(addiso)):
isotope_profile[j] += array(self.get(cyc_no, addiso[k])) * multiplicator_addiso # depends on [control=['for'], data=['k']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['j']] # depends on [control=['if'], data=['addiso']]
# search for carbon / oxygen rich layers
crich = [] # alternating start stop values. if odd number, then surface is c-rich, but add stop number
dumb = True
if cust_toggle != None:
for i in range(len(co_ratio)):
if dumb:
if co_ratio[i] >= co_comp_val:
crich.append(i)
dumb = False
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif co_ratio[i] < co_comp_val:
crich.append(i)
dumb = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
elif co_toggle != 'a':
for i in range(len(co_ratio)):
if co_toggle == 'c': # carbon rich
if dumb:
if co_ratio[i] >= 1:
crich.append(i)
dumb = False
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif co_ratio[i] < 1:
crich.append(i)
dumb = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif co_toggle == 'o': # oxygen rich
if dumb:
if co_ratio[i] <= 1:
crich.append(i)
dumb = False
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif co_ratio[i] > 1:
crich.append(i)
dumb = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
print('Select your enrichment!')
return None # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=['co_toggle']]
else: # take whole star
print('Using all profiles to mix')
crich.append(0)
crich.append(len(co_toggle))
if len(crich) % 2 == 1:
crich.append(len(co_ratio) - 1) # depends on [control=['if'], data=[]]
if len(crich) == 0:
print('Star did not get rich in C or O, depending on what you specified')
return None # depends on [control=['if'], data=[]]
# make isotope_profile into array and transpose
isotope_profile = array(isotope_profile).transpose()
# Ask user which zones to use
if co_toggle != 'a':
if cust_toggle != None:
print('\n\nI have found the following zones:\n') # depends on [control=['if'], data=[]]
elif co_toggle == 'c':
print('\n\nI have found the following carbon rich zones:\n') # depends on [control=['if'], data=[]]
elif co_toggle == 'o':
print('\n\nI have found the following oxygen rich zones:\n') # depends on [control=['if'], data=[]]
mass_tmp = zeros(len(crich))
for i in range(len(crich)):
mass_tmp[i] = mass[crich[i]] # depends on [control=['for'], data=['i']]
j = 1
for i in range(old_div(len(crich), 2)):
print('Mass range (' + str(j) + '):\t' + str(mass_tmp[2 * i]) + ' - ' + str(mass_tmp[2 * i + 1]))
j += 1 # depends on [control=['for'], data=['i']]
print('\n')
if zoneselect == 'all':
usr_zones = 0 # depends on [control=['if'], data=[]]
elif zoneselect == 'top':
usr_zones = [j - 1] # depends on [control=['if'], data=[]]
else:
usr_zones = eval(input('Please select which mass range you want to use. Select 0 for all zones. Otherwise give one zone or a list of zones separated by comma (e.g.: 1, 2, 4): '))
crich_dumb = crich
if usr_zones == 0:
print('I continue w/ all zones then') # depends on [control=['if'], data=[]]
elif type(usr_zones) == int: # only one zone selected
tmp = int(usr_zones) - 1
crich = crich_dumb[2 * tmp:2 * tmp + 2] # depends on [control=['if'], data=['int']]
else:
crich = []
for i in range(len(usr_zones)):
tmp = int(usr_zones[i]) - 1
crich.append(crich_dumb[2 * tmp])
crich.append(crich_dumb[2 * tmp + 1]) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=['co_toggle']]
# weight profiles according to weighting factor using the selected crich
# define isos_to_use variable for later
if weighting == None:
isos_to_use = []
for i in range(old_div(len(crich), 2)):
isos_dumb = []
n = crich[2 * i]
while n <= crich[2 * i + 1]:
isos_dumb.append(isotope_profile[n])
n += 1 # depends on [control=['while'], data=['n']]
isos_to_use.append(array(isos_dumb)) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
elif weighting.lower() == 'zone' or weighting.lower() == 'zones':
# make array w/ mass weigted isotope ratio (4) for all mass zones
isotope_profile_cweight = zeros((old_div(len(crich), 2), 4))
mass_tot = []
for i in range(len(isotope_profile_cweight)): # 2*i is start, 2*i+1 is stop value
if crich[2 * i] == 0:
print('C- / O-rich in first shell (core).') # depends on [control=['if'], data=[]]
else:
dumb = crich[2 * i + 1]
j = crich[2 * i]
mass_tmp = 0
while j <= dumb:
mass_shell = mass[j] - mass[j - 1]
mass_tmp += mass_shell
for k in range(4):
isotope_profile_cweight[i][k] += isotope_profile[j][k] * mass_shell # depends on [control=['for'], data=['k']]
j += 1 # depends on [control=['while'], data=['j']]
mass_tot.append(mass_tmp) # depends on [control=['for'], data=['i']]
for i in range(len(isotope_profile_cweight)):
for j in range(4):
isotope_profile_cweight[i][j] /= mass_tot[i] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']]
isos_to_use = [array(isotope_profile_cweight)] # depends on [control=['if'], data=[]]
elif weighting.lower() == 'all': # average all zones by mass
isos_tmp = zeros((1, len(isotope_profile[0])))
for i in range(len(isotope_profile) - 1): # neglect surface effects
for j in range(len(isos_tmp[0])):
mass_shell = mass[i + 1] - mass[i]
isos_tmp[0][j] += isotope_profile[i][j] * mass_shell # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']]
# weight all
isos_tmp /= sum(mass)
isos_to_use = [isos_tmp] # depends on [control=['if'], data=[]]
# change to isotope numbers from mass!
for i in range(len(isos_to_use)):
for j in range(len(isos_to_use[i])):
for k in range(len(isos_to_use[i][j])):
# here we just divide 'iso_massf' output with the mass number
# this means that in the end, the isotope ratios in number space are correc
# but have to use ratios from here on for meaningful stuff
isos_to_use[i][j][k] /= float(isotope_list[k].split('-')[1]) # depends on [control=['for'], data=['k']] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']]
# do the ratios and stuff
ratiox = []
ratioy = []
for i in range(len(isos_to_use)):
ratiox_dumb = []
ratioy_dumb = []
for j in range(len(isos_to_use[i])):
ratiox_dumb.append(old_div(isos_to_use[i][j][0], isos_to_use[i][j][1]))
ratioy_dumb.append(old_div(isos_to_use[i][j][2], isos_to_use[i][j][3])) # depends on [control=['for'], data=['j']]
ratiox.append(array(ratiox_dumb))
ratioy.append(array(ratioy_dumb)) # depends on [control=['for'], data=['i']]
# make arrays for ratiox and ratioy
ratiox = array(ratiox)
ratioy = array(ratioy)
# make number ratio out of everything
for i in range(len(ratiox)):
for j in range(len(ratiox[i])):
ratiox[i][j] *= old_div(float(xiso[1].split('-')[1]), float(xiso[0].split('-')[1]))
ratioy[i][j] *= old_div(float(yiso[1].split('-')[1]), float(yiso[0].split('-')[1])) # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']]
if deltax:
ratiox_tmp = []
for i in range(len(ratiox)):
ratiox_tmp_tmp = []
for j in range(len(ratiox[i])):
ratiox_tmp_tmp.append((old_div(ratiox[i][j], xrat_solsys) - 1.0) * 1000.0) # depends on [control=['for'], data=['j']]
ratiox_tmp.append(ratiox_tmp_tmp) # depends on [control=['for'], data=['i']]
ratiox = array(ratiox_tmp) # depends on [control=['if'], data=[]]
if deltay:
ratioy_tmp = []
for i in range(len(ratioy)):
ratioy_tmp_tmp = []
for j in range(len(ratioy[i])):
ratioy_tmp_tmp.append((old_div(ratioy[i][j], yrat_solsys) - 1.0) * 1000.0) # depends on [control=['for'], data=['j']]
ratioy_tmp.append(ratioy_tmp_tmp) # depends on [control=['for'], data=['i']]
ratioy = array(ratioy_tmp) # depends on [control=['if'], data=[]]
# create massrange array if necessary
plt_massrange_lst = []
if plt_massrange == True: # use == True because otherwise the list enters here too... why?
for i in range(len(ratiox)):
plt_massrange_lst.append([ratiox[i][0], ratioy[i][0], mass[crich[2 * i]]])
plt_massrange_lst.append([ratiox[i][len(ratiox[i]) - 1], ratioy[i][len(ratioy[i]) - 1], mass[crich[2 * i + 1]]]) # start: x-ratio, y-ratio, mass label # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
elif plt_massrange != False:
for plt_mr_val in plt_massrange:
mrng_i = 0
while plt_mr_val > mass[mrng_i] and mrng_i < len(mass):
mrng_i += 1 # depends on [control=['while'], data=[]]
if mrng_i > 0:
mrng_i -= 1 # depends on [control=['if'], data=['mrng_i']]
mratx = old_div(isotope_profile[mrng_i][0], isotope_profile[mrng_i][1])
mraty = old_div(isotope_profile[mrng_i][2], isotope_profile[mrng_i][3])
if deltax:
mratx = (old_div(mratx, ratiox_solsys) - 1.0) * 1000.0 # depends on [control=['if'], data=[]]
if deltay:
mraty = (old_div(mraty, ratioy_solsys) - 1.0) * 1000.0 # depends on [control=['if'], data=[]]
plt_massrange_lst.append([mratx, mraty, mass[mrng_i]]) # depends on [control=['for'], data=['plt_mr_val']] # depends on [control=['if'], data=['plt_massrange']]
# make style and prepare for plotting here
xdata = ratiox
ydata = ratioy
style_tmp0 = [plt_symb + plt_lt, plt_col, plt_col, '13.', '1', legend]
style_tmp = [plt_symb + plt_lt, plt_col, plt_col, '13.', '1', None]
style = []
for i in range(len(xdata)):
if i == 0:
style.append(style_tmp0) # depends on [control=['if'], data=[]]
else:
style.append(style_tmp) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
else:
print('You did not specify a useful spec argument -> abort.')
return None # depends on [control=['if'], data=[]]
### PLOTS FROM GRAIN CLASS ###
if self._classTest() == 'grain':
print('Presolar grains are cool!')
(xdata, xdataerr, ydata, ydataerr, style) = self.plot_ratio_return(xiso, yiso, deltax, deltay)
legend = True
plt_sparse = 1 # to avoid monkey input
plt_lw = 0.0 # depends on [control=['if'], data=[]]
### PLOT ###
# data is prepared now, make the plots. data must be in format
# [[data1],[data2],[data3],...]
# three arrays like this, for xdata, ydata, and style.
# style format: symbol, edge color, face color, symbol size, edge width, label
# this is then compatible with grain.py style definitions
# Size of font etc.
params = {'axes.labelsize': 20, 'text.fontsize': 14, 'legend.fontsize': 14, 'xtick.labelsize': 14, 'ytick.labelsize': 14}
pl.rcParams.update(params)
pl.figure(fign)
for i in range(len(xdata)):
if errbar:
if xdataerr != None or ydataerr != None:
pl.errorbar(xdata[i], ydata[i], xerr=xdataerr[i], yerr=ydataerr[i], marker=style[i][0], color=style[i][1], linestyle='', lw=2, markevery=plt_sparse, alpha=alpha_dum) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
pl.plot(xdata[i], ydata[i], style[i][0], c=style[i][1], mfc=style[i][2], ms=float(style[i][3]), mew=float(style[i][4]), label=style[i][5], markevery=plt_sparse, linewidth=plt_lw, alpha=alpha_dum) # depends on [control=['for'], data=['i']]
# plot text labels if necessary
if plt_massrange != False:
for mrng_ind in range(len(plt_massrange_lst)):
pl.text(plt_massrange_lst[mrng_ind][0], plt_massrange_lst[mrng_ind][1], str(round(plt_massrange_lst[mrng_ind][2], 2)), ha='right', va='bottom', color=plt_col, fontsize=15.0) # depends on [control=['for'], data=['mrng_ind']] # depends on [control=['if'], data=[]]
# log?
if logx and logy == False:
pl.semilogx() # depends on [control=['if'], data=[]]
elif logx == False and logy:
pl.semilogy() # depends on [control=['if'], data=[]]
elif logx and logy:
pl.loglog() # depends on [control=['if'], data=[]]
# legend
if legend != None and legloc != None:
pl.legend(loc=legloc) # depends on [control=['if'], data=[]]
# title and labels
if title != None:
pl.title(title) # depends on [control=['if'], data=['title']]
if deltax:
pl.xlabel('$\\delta$($^{' + xiso[0].split('-')[1] + '}$' + xiso[0].split('-')[0] + '/$^{' + xiso[1].split('-')[1] + '}$' + xiso[1].split('-')[0] + ')') # depends on [control=['if'], data=[]]
else:
pl.xlabel('$^{' + xiso[0].split('-')[1] + '}$' + xiso[0].split('-')[0] + '/$^{' + xiso[1].split('-')[1] + '}$' + xiso[1].split('-')[0])
if deltay:
pl.ylabel('$\\delta$($^{' + yiso[0].split('-')[1] + '}$' + yiso[0].split('-')[0] + '/$^{' + yiso[1].split('-')[1] + '}$' + yiso[1].split('-')[0] + ')') # depends on [control=['if'], data=[]]
else:
pl.ylabel('$^{' + yiso[0].split('-')[1] + '}$' + yiso[0].split('-')[0] + '/$^{' + yiso[1].split('-')[1] + '}$' + yiso[1].split('-')[0])
# plot horizontal and vertical lines
print(xrat_solsys, yrat_solsys)
if deltay:
pl.axhline(0, color='k') # depends on [control=['if'], data=[]]
else:
pl.axhline(yrat_solsys, color='k')
if deltax:
pl.axvline(0, color='k') # depends on [control=['if'], data=[]]
else:
pl.axvline(xrat_solsys, color='k')
# borders of plot
pl.gcf().subplots_adjust(bottom=0.15)
pl.gcf().subplots_adjust(left=0.15)
# save and show
if figsave != False:
pl.savefig(figsave) # depends on [control=['if'], data=['figsave']]
if plt_show:
pl.show() # depends on [control=['if'], data=[]]
|
def picknthweekday(year, month, dayofweek, hour, minute, whichweek):
""" dayofweek == 0 means Sunday, whichweek 5 means last instance """
first = datetime.datetime(year, month, 1, hour, minute)
# This will work if dayofweek is ISO weekday (1-7) or Microsoft-style (0-6),
# Because 7 % 7 = 0
weekdayone = first.replace(day=((dayofweek - first.isoweekday()) % 7) + 1)
wd = weekdayone + ((whichweek - 1) * ONEWEEK)
if (wd.month != month):
wd -= ONEWEEK
return wd
|
def function[picknthweekday, parameter[year, month, dayofweek, hour, minute, whichweek]]:
constant[ dayofweek == 0 means Sunday, whichweek 5 means last instance ]
variable[first] assign[=] call[name[datetime].datetime, parameter[name[year], name[month], constant[1], name[hour], name[minute]]]
variable[weekdayone] assign[=] call[name[first].replace, parameter[]]
variable[wd] assign[=] binary_operation[name[weekdayone] + binary_operation[binary_operation[name[whichweek] - constant[1]] * name[ONEWEEK]]]
if compare[name[wd].month not_equal[!=] name[month]] begin[:]
<ast.AugAssign object at 0x7da2044c2050>
return[name[wd]]
|
keyword[def] identifier[picknthweekday] ( identifier[year] , identifier[month] , identifier[dayofweek] , identifier[hour] , identifier[minute] , identifier[whichweek] ):
literal[string]
identifier[first] = identifier[datetime] . identifier[datetime] ( identifier[year] , identifier[month] , literal[int] , identifier[hour] , identifier[minute] )
identifier[weekdayone] = identifier[first] . identifier[replace] ( identifier[day] =(( identifier[dayofweek] - identifier[first] . identifier[isoweekday] ())% literal[int] )+ literal[int] )
identifier[wd] = identifier[weekdayone] +(( identifier[whichweek] - literal[int] )* identifier[ONEWEEK] )
keyword[if] ( identifier[wd] . identifier[month] != identifier[month] ):
identifier[wd] -= identifier[ONEWEEK]
keyword[return] identifier[wd]
|
def picknthweekday(year, month, dayofweek, hour, minute, whichweek):
""" dayofweek == 0 means Sunday, whichweek 5 means last instance """
first = datetime.datetime(year, month, 1, hour, minute)
# This will work if dayofweek is ISO weekday (1-7) or Microsoft-style (0-6),
# Because 7 % 7 = 0
weekdayone = first.replace(day=(dayofweek - first.isoweekday()) % 7 + 1)
wd = weekdayone + (whichweek - 1) * ONEWEEK
if wd.month != month:
wd -= ONEWEEK # depends on [control=['if'], data=[]]
return wd
|
def status():
'''Show the manager's current status.'''
with Session() as session:
resp = session.Manager.status()
print(tabulate([('Status', 'Active Sessions'),
(resp['status'], resp['active_sessions'])],
headers='firstrow'))
|
def function[status, parameter[]]:
constant[Show the manager's current status.]
with call[name[Session], parameter[]] begin[:]
variable[resp] assign[=] call[name[session].Manager.status, parameter[]]
call[name[print], parameter[call[name[tabulate], parameter[list[[<ast.Tuple object at 0x7da204345e10>, <ast.Tuple object at 0x7da204345090>]]]]]]
|
keyword[def] identifier[status] ():
literal[string]
keyword[with] identifier[Session] () keyword[as] identifier[session] :
identifier[resp] = identifier[session] . identifier[Manager] . identifier[status] ()
identifier[print] ( identifier[tabulate] ([( literal[string] , literal[string] ),
( identifier[resp] [ literal[string] ], identifier[resp] [ literal[string] ])],
identifier[headers] = literal[string] ))
|
def status():
"""Show the manager's current status."""
with Session() as session:
resp = session.Manager.status()
print(tabulate([('Status', 'Active Sessions'), (resp['status'], resp['active_sessions'])], headers='firstrow')) # depends on [control=['with'], data=['session']]
|
def findFile(input):
"""Search a directory for full filename with optional path."""
# If no input name is provided, default to returning 'no'(FALSE)
if not input:
return no
# We use 'osfn' here to insure that any IRAF variables are
# expanded out before splitting out the path...
_fdir, _fname = os.path.split(osfn(input))
if _fdir == '':
_fdir = os.curdir
try:
flist = os.listdir(_fdir)
except OSError:
# handle when requested file in on a disconnect network store
return no
_root, _extn = parseFilename(_fname)
found = no
for name in flist:
if name == _root:
# Check to see if given extension, if any, exists
if _extn is None:
found = yes
continue
else:
_split = _extn.split(',')
_extnum = None
_extver = None
if _split[0].isdigit():
_extname = None
_extnum = int(_split[0])
else:
_extname = _split[0]
if len(_split) > 1:
_extver = int(_split[1])
else:
_extver = 1
f = openImage(_root)
f.close()
if _extnum is not None:
if _extnum < len(f):
found = yes
del f
continue
else:
del f
else:
_fext = findExtname(f, _extname, extver=_extver)
if _fext is not None:
found = yes
del f
continue
return found
|
def function[findFile, parameter[input]]:
constant[Search a directory for full filename with optional path.]
if <ast.UnaryOp object at 0x7da1b0ef7790> begin[:]
return[name[no]]
<ast.Tuple object at 0x7da1b0ef6080> assign[=] call[name[os].path.split, parameter[call[name[osfn], parameter[name[input]]]]]
if compare[name[_fdir] equal[==] constant[]] begin[:]
variable[_fdir] assign[=] name[os].curdir
<ast.Try object at 0x7da1b0ef6b60>
<ast.Tuple object at 0x7da1b0ef6560> assign[=] call[name[parseFilename], parameter[name[_fname]]]
variable[found] assign[=] name[no]
for taget[name[name]] in starred[name[flist]] begin[:]
if compare[name[name] equal[==] name[_root]] begin[:]
if compare[name[_extn] is constant[None]] begin[:]
variable[found] assign[=] name[yes]
continue
return[name[found]]
|
keyword[def] identifier[findFile] ( identifier[input] ):
literal[string]
keyword[if] keyword[not] identifier[input] :
keyword[return] identifier[no]
identifier[_fdir] , identifier[_fname] = identifier[os] . identifier[path] . identifier[split] ( identifier[osfn] ( identifier[input] ))
keyword[if] identifier[_fdir] == literal[string] :
identifier[_fdir] = identifier[os] . identifier[curdir]
keyword[try] :
identifier[flist] = identifier[os] . identifier[listdir] ( identifier[_fdir] )
keyword[except] identifier[OSError] :
keyword[return] identifier[no]
identifier[_root] , identifier[_extn] = identifier[parseFilename] ( identifier[_fname] )
identifier[found] = identifier[no]
keyword[for] identifier[name] keyword[in] identifier[flist] :
keyword[if] identifier[name] == identifier[_root] :
keyword[if] identifier[_extn] keyword[is] keyword[None] :
identifier[found] = identifier[yes]
keyword[continue]
keyword[else] :
identifier[_split] = identifier[_extn] . identifier[split] ( literal[string] )
identifier[_extnum] = keyword[None]
identifier[_extver] = keyword[None]
keyword[if] identifier[_split] [ literal[int] ]. identifier[isdigit] ():
identifier[_extname] = keyword[None]
identifier[_extnum] = identifier[int] ( identifier[_split] [ literal[int] ])
keyword[else] :
identifier[_extname] = identifier[_split] [ literal[int] ]
keyword[if] identifier[len] ( identifier[_split] )> literal[int] :
identifier[_extver] = identifier[int] ( identifier[_split] [ literal[int] ])
keyword[else] :
identifier[_extver] = literal[int]
identifier[f] = identifier[openImage] ( identifier[_root] )
identifier[f] . identifier[close] ()
keyword[if] identifier[_extnum] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[_extnum] < identifier[len] ( identifier[f] ):
identifier[found] = identifier[yes]
keyword[del] identifier[f]
keyword[continue]
keyword[else] :
keyword[del] identifier[f]
keyword[else] :
identifier[_fext] = identifier[findExtname] ( identifier[f] , identifier[_extname] , identifier[extver] = identifier[_extver] )
keyword[if] identifier[_fext] keyword[is] keyword[not] keyword[None] :
identifier[found] = identifier[yes]
keyword[del] identifier[f]
keyword[continue]
keyword[return] identifier[found]
|
def findFile(input):
"""Search a directory for full filename with optional path."""
# If no input name is provided, default to returning 'no'(FALSE)
if not input:
return no # depends on [control=['if'], data=[]]
# We use 'osfn' here to insure that any IRAF variables are
# expanded out before splitting out the path...
(_fdir, _fname) = os.path.split(osfn(input))
if _fdir == '':
_fdir = os.curdir # depends on [control=['if'], data=['_fdir']]
try:
flist = os.listdir(_fdir) # depends on [control=['try'], data=[]]
except OSError:
# handle when requested file in on a disconnect network store
return no # depends on [control=['except'], data=[]]
(_root, _extn) = parseFilename(_fname)
found = no
for name in flist:
if name == _root:
# Check to see if given extension, if any, exists
if _extn is None:
found = yes
continue # depends on [control=['if'], data=[]]
else:
_split = _extn.split(',')
_extnum = None
_extver = None
if _split[0].isdigit():
_extname = None
_extnum = int(_split[0]) # depends on [control=['if'], data=[]]
else:
_extname = _split[0]
if len(_split) > 1:
_extver = int(_split[1]) # depends on [control=['if'], data=[]]
else:
_extver = 1
f = openImage(_root)
f.close()
if _extnum is not None:
if _extnum < len(f):
found = yes
del f
continue # depends on [control=['if'], data=[]]
else:
del f # depends on [control=['if'], data=['_extnum']]
else:
_fext = findExtname(f, _extname, extver=_extver)
if _fext is not None:
found = yes
del f
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['_root']] # depends on [control=['for'], data=['name']]
return found
|
def iter_lines(self, warn_only=False):
"""yields stdout text, line by line."""
remain = ""
for data in self.iter_content(LINE_CHUNK_SIZE, warn_only=True):
line_break_found = data[-1] in (b"\n", b"\r")
lines = data.decode(self.codec).splitlines()
lines[0] = remain + lines[0]
if not line_break_found:
remain = lines.pop()
for line in lines:
yield line
if remain:
yield remain
self._state = FINISHED
if not warn_only:
self.raise_for_error()
|
def function[iter_lines, parameter[self, warn_only]]:
constant[yields stdout text, line by line.]
variable[remain] assign[=] constant[]
for taget[name[data]] in starred[call[name[self].iter_content, parameter[name[LINE_CHUNK_SIZE]]]] begin[:]
variable[line_break_found] assign[=] compare[call[name[data]][<ast.UnaryOp object at 0x7da18bc70460>] in tuple[[<ast.Constant object at 0x7da18bc739d0>, <ast.Constant object at 0x7da18bc71720>]]]
variable[lines] assign[=] call[call[name[data].decode, parameter[name[self].codec]].splitlines, parameter[]]
call[name[lines]][constant[0]] assign[=] binary_operation[name[remain] + call[name[lines]][constant[0]]]
if <ast.UnaryOp object at 0x7da18bc73700> begin[:]
variable[remain] assign[=] call[name[lines].pop, parameter[]]
for taget[name[line]] in starred[name[lines]] begin[:]
<ast.Yield object at 0x7da18bc71300>
if name[remain] begin[:]
<ast.Yield object at 0x7da18bc73130>
name[self]._state assign[=] name[FINISHED]
if <ast.UnaryOp object at 0x7da18bc72bc0> begin[:]
call[name[self].raise_for_error, parameter[]]
|
keyword[def] identifier[iter_lines] ( identifier[self] , identifier[warn_only] = keyword[False] ):
literal[string]
identifier[remain] = literal[string]
keyword[for] identifier[data] keyword[in] identifier[self] . identifier[iter_content] ( identifier[LINE_CHUNK_SIZE] , identifier[warn_only] = keyword[True] ):
identifier[line_break_found] = identifier[data] [- literal[int] ] keyword[in] ( literal[string] , literal[string] )
identifier[lines] = identifier[data] . identifier[decode] ( identifier[self] . identifier[codec] ). identifier[splitlines] ()
identifier[lines] [ literal[int] ]= identifier[remain] + identifier[lines] [ literal[int] ]
keyword[if] keyword[not] identifier[line_break_found] :
identifier[remain] = identifier[lines] . identifier[pop] ()
keyword[for] identifier[line] keyword[in] identifier[lines] :
keyword[yield] identifier[line]
keyword[if] identifier[remain] :
keyword[yield] identifier[remain]
identifier[self] . identifier[_state] = identifier[FINISHED]
keyword[if] keyword[not] identifier[warn_only] :
identifier[self] . identifier[raise_for_error] ()
|
def iter_lines(self, warn_only=False):
"""yields stdout text, line by line."""
remain = ''
for data in self.iter_content(LINE_CHUNK_SIZE, warn_only=True):
line_break_found = data[-1] in (b'\n', b'\r')
lines = data.decode(self.codec).splitlines()
lines[0] = remain + lines[0]
if not line_break_found:
remain = lines.pop() # depends on [control=['if'], data=[]]
for line in lines:
yield line # depends on [control=['for'], data=['line']] # depends on [control=['for'], data=['data']]
if remain:
yield remain # depends on [control=['if'], data=[]]
self._state = FINISHED
if not warn_only:
self.raise_for_error() # depends on [control=['if'], data=[]]
|
def closest_point_naive(mesh, points):
"""
Given a mesh and a list of points find the closest point
on any triangle.
Does this by constructing a very large intermediate array and
comparing every point to every triangle.
Parameters
----------
mesh : Trimesh
Takes mesh to have same interfaces as `closest_point`
points : (m, 3) float
Points in space
Returns
----------
closest : (m, 3) float
Closest point on triangles for each point
distance : (m,) float
Distances between point and triangle
triangle_id : (m,) int
Index of triangle containing closest point
"""
# get triangles from mesh
triangles = mesh.triangles.view(np.ndarray)
# establish that input points are sane
points = np.asanyarray(points, dtype=np.float64)
if not util.is_shape(triangles, (-1, 3, 3)):
raise ValueError('triangles shape incorrect')
if not util.is_shape(points, (-1, 3)):
raise ValueError('points must be (n,3)')
# create a giant tiled array of each point tiled len(triangles) times
points_tiled = np.tile(points, (1, len(triangles)))
on_triangle = np.array([closest_point_corresponding(
triangles, i.reshape((-1, 3))) for i in points_tiled])
# distance squared
distance_2 = [((i - q)**2).sum(axis=1)
for i, q in zip(on_triangle, points)]
triangle_id = np.array([i.argmin() for i in distance_2])
# closest cartesian point
closest = np.array([g[i] for i, g in zip(triangle_id, on_triangle)])
distance = np.array([g[i] for i, g in zip(triangle_id, distance_2)]) ** .5
return closest, distance, triangle_id
|
def function[closest_point_naive, parameter[mesh, points]]:
constant[
Given a mesh and a list of points find the closest point
on any triangle.
Does this by constructing a very large intermediate array and
comparing every point to every triangle.
Parameters
----------
mesh : Trimesh
Takes mesh to have same interfaces as `closest_point`
points : (m, 3) float
Points in space
Returns
----------
closest : (m, 3) float
Closest point on triangles for each point
distance : (m,) float
Distances between point and triangle
triangle_id : (m,) int
Index of triangle containing closest point
]
variable[triangles] assign[=] call[name[mesh].triangles.view, parameter[name[np].ndarray]]
variable[points] assign[=] call[name[np].asanyarray, parameter[name[points]]]
if <ast.UnaryOp object at 0x7da20c7c9de0> begin[:]
<ast.Raise object at 0x7da20c7cac20>
if <ast.UnaryOp object at 0x7da20c7c9a80> begin[:]
<ast.Raise object at 0x7da20c7cb880>
variable[points_tiled] assign[=] call[name[np].tile, parameter[name[points], tuple[[<ast.Constant object at 0x7da20c992290>, <ast.Call object at 0x7da20c9906a0>]]]]
variable[on_triangle] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da20c992ef0>]]
variable[distance_2] assign[=] <ast.ListComp object at 0x7da20c9922c0>
variable[triangle_id] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da20c993100>]]
variable[closest] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da20c991240>]]
variable[distance] assign[=] binary_operation[call[name[np].array, parameter[<ast.ListComp object at 0x7da20c991f00>]] ** constant[0.5]]
return[tuple[[<ast.Name object at 0x7da20c993be0>, <ast.Name object at 0x7da20c76f8b0>, <ast.Name object at 0x7da20c76e4a0>]]]
|
keyword[def] identifier[closest_point_naive] ( identifier[mesh] , identifier[points] ):
literal[string]
identifier[triangles] = identifier[mesh] . identifier[triangles] . identifier[view] ( identifier[np] . identifier[ndarray] )
identifier[points] = identifier[np] . identifier[asanyarray] ( identifier[points] , identifier[dtype] = identifier[np] . identifier[float64] )
keyword[if] keyword[not] identifier[util] . identifier[is_shape] ( identifier[triangles] ,(- literal[int] , literal[int] , literal[int] )):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] keyword[not] identifier[util] . identifier[is_shape] ( identifier[points] ,(- literal[int] , literal[int] )):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[points_tiled] = identifier[np] . identifier[tile] ( identifier[points] ,( literal[int] , identifier[len] ( identifier[triangles] )))
identifier[on_triangle] = identifier[np] . identifier[array] ([ identifier[closest_point_corresponding] (
identifier[triangles] , identifier[i] . identifier[reshape] ((- literal[int] , literal[int] ))) keyword[for] identifier[i] keyword[in] identifier[points_tiled] ])
identifier[distance_2] =[(( identifier[i] - identifier[q] )** literal[int] ). identifier[sum] ( identifier[axis] = literal[int] )
keyword[for] identifier[i] , identifier[q] keyword[in] identifier[zip] ( identifier[on_triangle] , identifier[points] )]
identifier[triangle_id] = identifier[np] . identifier[array] ([ identifier[i] . identifier[argmin] () keyword[for] identifier[i] keyword[in] identifier[distance_2] ])
identifier[closest] = identifier[np] . identifier[array] ([ identifier[g] [ identifier[i] ] keyword[for] identifier[i] , identifier[g] keyword[in] identifier[zip] ( identifier[triangle_id] , identifier[on_triangle] )])
identifier[distance] = identifier[np] . identifier[array] ([ identifier[g] [ identifier[i] ] keyword[for] identifier[i] , identifier[g] keyword[in] identifier[zip] ( identifier[triangle_id] , identifier[distance_2] )])** literal[int]
keyword[return] identifier[closest] , identifier[distance] , identifier[triangle_id]
|
def closest_point_naive(mesh, points):
"""
Given a mesh and a list of points find the closest point
on any triangle.
Does this by constructing a very large intermediate array and
comparing every point to every triangle.
Parameters
----------
mesh : Trimesh
Takes mesh to have same interfaces as `closest_point`
points : (m, 3) float
Points in space
Returns
----------
closest : (m, 3) float
Closest point on triangles for each point
distance : (m,) float
Distances between point and triangle
triangle_id : (m,) int
Index of triangle containing closest point
"""
# get triangles from mesh
triangles = mesh.triangles.view(np.ndarray)
# establish that input points are sane
points = np.asanyarray(points, dtype=np.float64)
if not util.is_shape(triangles, (-1, 3, 3)):
raise ValueError('triangles shape incorrect') # depends on [control=['if'], data=[]]
if not util.is_shape(points, (-1, 3)):
raise ValueError('points must be (n,3)') # depends on [control=['if'], data=[]]
# create a giant tiled array of each point tiled len(triangles) times
points_tiled = np.tile(points, (1, len(triangles)))
on_triangle = np.array([closest_point_corresponding(triangles, i.reshape((-1, 3))) for i in points_tiled])
# distance squared
distance_2 = [((i - q) ** 2).sum(axis=1) for (i, q) in zip(on_triangle, points)]
triangle_id = np.array([i.argmin() for i in distance_2])
# closest cartesian point
closest = np.array([g[i] for (i, g) in zip(triangle_id, on_triangle)])
distance = np.array([g[i] for (i, g) in zip(triangle_id, distance_2)]) ** 0.5
return (closest, distance, triangle_id)
|
def select_newest_project(dx_project_ids):
"""
Given a list of DNAnexus project IDs, returns the one that is newest as determined by creation date.
Args:
dx_project_ids: `list` of DNAnexus project IDs.
Returns:
`str`.
"""
if len(dx_project_ids) == 1:
return dx_project_ids[0]
projects = [dxpy.DXProject(x) for x in dx_project_ids]
created_times = [x.describe()["created"] for x in projects]
paired = zip(created_times,projects)
paired.sort(reverse=True)
return paired[0][0]
|
def function[select_newest_project, parameter[dx_project_ids]]:
constant[
Given a list of DNAnexus project IDs, returns the one that is newest as determined by creation date.
Args:
dx_project_ids: `list` of DNAnexus project IDs.
Returns:
`str`.
]
if compare[call[name[len], parameter[name[dx_project_ids]]] equal[==] constant[1]] begin[:]
return[call[name[dx_project_ids]][constant[0]]]
variable[projects] assign[=] <ast.ListComp object at 0x7da1b1402890>
variable[created_times] assign[=] <ast.ListComp object at 0x7da1b1403be0>
variable[paired] assign[=] call[name[zip], parameter[name[created_times], name[projects]]]
call[name[paired].sort, parameter[]]
return[call[call[name[paired]][constant[0]]][constant[0]]]
|
keyword[def] identifier[select_newest_project] ( identifier[dx_project_ids] ):
literal[string]
keyword[if] identifier[len] ( identifier[dx_project_ids] )== literal[int] :
keyword[return] identifier[dx_project_ids] [ literal[int] ]
identifier[projects] =[ identifier[dxpy] . identifier[DXProject] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[dx_project_ids] ]
identifier[created_times] =[ identifier[x] . identifier[describe] ()[ literal[string] ] keyword[for] identifier[x] keyword[in] identifier[projects] ]
identifier[paired] = identifier[zip] ( identifier[created_times] , identifier[projects] )
identifier[paired] . identifier[sort] ( identifier[reverse] = keyword[True] )
keyword[return] identifier[paired] [ literal[int] ][ literal[int] ]
|
def select_newest_project(dx_project_ids):
"""
Given a list of DNAnexus project IDs, returns the one that is newest as determined by creation date.
Args:
dx_project_ids: `list` of DNAnexus project IDs.
Returns:
`str`.
"""
if len(dx_project_ids) == 1:
return dx_project_ids[0] # depends on [control=['if'], data=[]]
projects = [dxpy.DXProject(x) for x in dx_project_ids]
created_times = [x.describe()['created'] for x in projects]
paired = zip(created_times, projects)
paired.sort(reverse=True)
return paired[0][0]
|
def _evaluate3(nodes, x_val, y_val):
"""Helper for :func:`evaluate` when ``nodes`` is degree 3.
Args:
nodes (numpy.ndarray): ``2 x 4`` array of nodes in a curve.
x_val (float): ``x``-coordinate for evaluation.
y_val (float): ``y``-coordinate for evaluation.
Returns:
float: The computed value of :math:`f(x, y)`.
"""
# NOTE: This may be (a) slower and (b) less precise than
# hard-coding the determinant.
sylvester_mat = np.zeros((6, 6), order="F")
delta = nodes - np.asfortranarray([[x_val], [y_val]])
delta[:, 1:3] *= 3.0
# Swap rows/columns so that x-y are right next to each other.
# This will only change the determinant up to a sign.
sylvester_mat[:2, :4] = delta
sylvester_mat[2:4, 1:5] = delta
sylvester_mat[4:, 2:] = delta
return np.linalg.det(sylvester_mat)
|
def function[_evaluate3, parameter[nodes, x_val, y_val]]:
constant[Helper for :func:`evaluate` when ``nodes`` is degree 3.
Args:
nodes (numpy.ndarray): ``2 x 4`` array of nodes in a curve.
x_val (float): ``x``-coordinate for evaluation.
y_val (float): ``y``-coordinate for evaluation.
Returns:
float: The computed value of :math:`f(x, y)`.
]
variable[sylvester_mat] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Constant object at 0x7da20c6c4760>, <ast.Constant object at 0x7da20c6c6bf0>]]]]
variable[delta] assign[=] binary_operation[name[nodes] - call[name[np].asfortranarray, parameter[list[[<ast.List object at 0x7da20c6c6800>, <ast.List object at 0x7da20c6c4b80>]]]]]
<ast.AugAssign object at 0x7da20c6c7a60>
call[name[sylvester_mat]][tuple[[<ast.Slice object at 0x7da20c6c5d50>, <ast.Slice object at 0x7da20c6c7130>]]] assign[=] name[delta]
call[name[sylvester_mat]][tuple[[<ast.Slice object at 0x7da20c6c7850>, <ast.Slice object at 0x7da20c6c7790>]]] assign[=] name[delta]
call[name[sylvester_mat]][tuple[[<ast.Slice object at 0x7da20c9926e0>, <ast.Slice object at 0x7da20c991570>]]] assign[=] name[delta]
return[call[name[np].linalg.det, parameter[name[sylvester_mat]]]]
|
keyword[def] identifier[_evaluate3] ( identifier[nodes] , identifier[x_val] , identifier[y_val] ):
literal[string]
identifier[sylvester_mat] = identifier[np] . identifier[zeros] (( literal[int] , literal[int] ), identifier[order] = literal[string] )
identifier[delta] = identifier[nodes] - identifier[np] . identifier[asfortranarray] ([[ identifier[x_val] ],[ identifier[y_val] ]])
identifier[delta] [:, literal[int] : literal[int] ]*= literal[int]
identifier[sylvester_mat] [: literal[int] ,: literal[int] ]= identifier[delta]
identifier[sylvester_mat] [ literal[int] : literal[int] , literal[int] : literal[int] ]= identifier[delta]
identifier[sylvester_mat] [ literal[int] :, literal[int] :]= identifier[delta]
keyword[return] identifier[np] . identifier[linalg] . identifier[det] ( identifier[sylvester_mat] )
|
def _evaluate3(nodes, x_val, y_val):
"""Helper for :func:`evaluate` when ``nodes`` is degree 3.
Args:
nodes (numpy.ndarray): ``2 x 4`` array of nodes in a curve.
x_val (float): ``x``-coordinate for evaluation.
y_val (float): ``y``-coordinate for evaluation.
Returns:
float: The computed value of :math:`f(x, y)`.
"""
# NOTE: This may be (a) slower and (b) less precise than
# hard-coding the determinant.
sylvester_mat = np.zeros((6, 6), order='F')
delta = nodes - np.asfortranarray([[x_val], [y_val]])
delta[:, 1:3] *= 3.0
# Swap rows/columns so that x-y are right next to each other.
# This will only change the determinant up to a sign.
sylvester_mat[:2, :4] = delta
sylvester_mat[2:4, 1:5] = delta
sylvester_mat[4:, 2:] = delta
return np.linalg.det(sylvester_mat)
|
def get_settings_from_profile(profile, profile_dir=None):
# type: (str, Any)->str
""""Returns the configuration file path for the given profile.
:param profile: Profile name to be used.
:param profile_dir: The directory where the profile configuration file should reside. It
may be also a module, and then the directory of the module is used.
:return: Configuration file path.
"""
if profile_dir is None:
import settings
profile_dir = settings
if hasattr(profile_dir, '__file__'):
profile_dir = os.path.dirname(profile_dir.__file__)
return os.path.join(profile_dir, '{0}.cfg'.format(profile))
|
def function[get_settings_from_profile, parameter[profile, profile_dir]]:
constant["Returns the configuration file path for the given profile.
:param profile: Profile name to be used.
:param profile_dir: The directory where the profile configuration file should reside. It
may be also a module, and then the directory of the module is used.
:return: Configuration file path.
]
if compare[name[profile_dir] is constant[None]] begin[:]
import module[settings]
variable[profile_dir] assign[=] name[settings]
if call[name[hasattr], parameter[name[profile_dir], constant[__file__]]] begin[:]
variable[profile_dir] assign[=] call[name[os].path.dirname, parameter[name[profile_dir].__file__]]
return[call[name[os].path.join, parameter[name[profile_dir], call[constant[{0}.cfg].format, parameter[name[profile]]]]]]
|
keyword[def] identifier[get_settings_from_profile] ( identifier[profile] , identifier[profile_dir] = keyword[None] ):
literal[string]
keyword[if] identifier[profile_dir] keyword[is] keyword[None] :
keyword[import] identifier[settings]
identifier[profile_dir] = identifier[settings]
keyword[if] identifier[hasattr] ( identifier[profile_dir] , literal[string] ):
identifier[profile_dir] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[profile_dir] . identifier[__file__] )
keyword[return] identifier[os] . identifier[path] . identifier[join] ( identifier[profile_dir] , literal[string] . identifier[format] ( identifier[profile] ))
|
def get_settings_from_profile(profile, profile_dir=None):
# type: (str, Any)->str
'"Returns the configuration file path for the given profile.\n\n :param profile: Profile name to be used.\n :param profile_dir: The directory where the profile configuration file should reside. It\n may be also a module, and then the directory of the module is used.\n :return: Configuration file path.\n '
if profile_dir is None:
import settings
profile_dir = settings # depends on [control=['if'], data=['profile_dir']]
if hasattr(profile_dir, '__file__'):
profile_dir = os.path.dirname(profile_dir.__file__) # depends on [control=['if'], data=[]]
return os.path.join(profile_dir, '{0}.cfg'.format(profile))
|
def run(self):
"""Starts the algo
Connects to the Blotter, processes market data and passes
tick data to the ``on_tick`` function and bar data to the
``on_bar`` methods.
"""
history = pd.DataFrame()
# get history from csv dir
if self.backtest and self.backtest_csv:
kind = "TICK" if self.resolution[-1] in ("S", "K", "V") else "BAR"
dfs = []
for symbol in self.symbols:
file = "%s/%s.%s.csv" % (self.backtest_csv, symbol, kind)
if not os.path.exists(file):
self.log_algo.error(
"Can't load data for %s (%s doesn't exist)",
symbol, file)
sys.exit(0)
try:
df = pd.read_csv(file)
if "expiry" not in df.columns:
df.loc[:, "expiry"] = nan
if not validate_csv_columns(df, kind, raise_errors=False):
self.log_algo.error(
"%s isn't a QTPyLib-compatible format", file)
sys.exit(0)
if df['symbol'].values[-1] != symbol:
self.log_algo.error(
"%s Doesn't content data for %s", file, symbol)
sys.exit(0)
dfs.append(df)
except Exception as e:
self.log_algo.error(
"Error reading data for %s (%s)", symbol, file)
sys.exit(0)
history = prepare_history(
data=pd.concat(dfs, sort=True),
resolution=self.resolution,
tz=self.timezone,
continuous=self.continuous
)
history = history[history.index >= self.backtest_start]
elif not self.blotter_args["dbskip"] and (
self.backtest or self.preload):
start = self.backtest_start if self.backtest else tools.backdate(
self.preload)
end = self.backtest_end if self.backtest else None
history = self.blotter.history(
symbols=self.symbols,
start=start,
end=end,
resolution=self.resolution,
tz=self.timezone,
continuous=self.continuous
)
# history needs backfilling?
# self.blotter.backfilled = True
if not self.blotter.backfilled:
# "loan" Blotter our ibConn
self.blotter.ibConn = self.ibConn
# call the back fill
self.blotter.backfill(data=history,
resolution=self.resolution,
start=start, end=end)
# re-get history from db
history = self.blotter.history(
symbols=self.symbols,
start=start,
end=end,
resolution=self.resolution,
tz=self.timezone,
continuous=self.continuous
)
# take our ibConn back :)
self.blotter.ibConn = None
# optimize pandas
if not history.empty:
history['symbol'] = history['symbol'].astype('category')
history['symbol_group'] = history['symbol_group'].astype('category')
history['asset_class'] = history['asset_class'].astype('category')
if self.backtest:
# initiate strategy
self.on_start()
# drip history
drip_handler = self._tick_handler if self.resolution[-1] in (
"S", "K", "V") else self._bar_handler
self.blotter.drip(history, drip_handler)
else:
# place history self.bars
self.bars = history
# add instruments to blotter in case they do not exist
self.blotter.register(self.instruments)
# initiate strategy
self.on_start()
# listen for RT data
self.blotter.stream(
symbols=self.symbols,
tz=self.timezone,
quote_handler=self._quote_handler,
tick_handler=self._tick_handler,
bar_handler=self._bar_handler,
book_handler=self._book_handler
)
|
def function[run, parameter[self]]:
constant[Starts the algo
Connects to the Blotter, processes market data and passes
tick data to the ``on_tick`` function and bar data to the
``on_bar`` methods.
]
variable[history] assign[=] call[name[pd].DataFrame, parameter[]]
if <ast.BoolOp object at 0x7da1b1ce6b30> begin[:]
variable[kind] assign[=] <ast.IfExp object at 0x7da1b1ce4a30>
variable[dfs] assign[=] list[[]]
for taget[name[symbol]] in starred[name[self].symbols] begin[:]
variable[file] assign[=] binary_operation[constant[%s/%s.%s.csv] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b1ce4040>, <ast.Name object at 0x7da1b1ce4a00>, <ast.Name object at 0x7da1b1ce78b0>]]]
if <ast.UnaryOp object at 0x7da1b1ce4640> begin[:]
call[name[self].log_algo.error, parameter[constant[Can't load data for %s (%s doesn't exist)], name[symbol], name[file]]]
call[name[sys].exit, parameter[constant[0]]]
<ast.Try object at 0x7da1b1ce61d0>
variable[history] assign[=] call[name[prepare_history], parameter[]]
variable[history] assign[=] call[name[history]][compare[name[history].index greater_or_equal[>=] name[self].backtest_start]]
if <ast.UnaryOp object at 0x7da1b1b00430> begin[:]
call[name[history]][constant[symbol]] assign[=] call[call[name[history]][constant[symbol]].astype, parameter[constant[category]]]
call[name[history]][constant[symbol_group]] assign[=] call[call[name[history]][constant[symbol_group]].astype, parameter[constant[category]]]
call[name[history]][constant[asset_class]] assign[=] call[call[name[history]][constant[asset_class]].astype, parameter[constant[category]]]
if name[self].backtest begin[:]
call[name[self].on_start, parameter[]]
variable[drip_handler] assign[=] <ast.IfExp object at 0x7da1b1c6eb00>
call[name[self].blotter.drip, parameter[name[history], name[drip_handler]]]
|
keyword[def] identifier[run] ( identifier[self] ):
literal[string]
identifier[history] = identifier[pd] . identifier[DataFrame] ()
keyword[if] identifier[self] . identifier[backtest] keyword[and] identifier[self] . identifier[backtest_csv] :
identifier[kind] = literal[string] keyword[if] identifier[self] . identifier[resolution] [- literal[int] ] keyword[in] ( literal[string] , literal[string] , literal[string] ) keyword[else] literal[string]
identifier[dfs] =[]
keyword[for] identifier[symbol] keyword[in] identifier[self] . identifier[symbols] :
identifier[file] = literal[string] %( identifier[self] . identifier[backtest_csv] , identifier[symbol] , identifier[kind] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[file] ):
identifier[self] . identifier[log_algo] . identifier[error] (
literal[string] ,
identifier[symbol] , identifier[file] )
identifier[sys] . identifier[exit] ( literal[int] )
keyword[try] :
identifier[df] = identifier[pd] . identifier[read_csv] ( identifier[file] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[df] . identifier[columns] :
identifier[df] . identifier[loc] [:, literal[string] ]= identifier[nan]
keyword[if] keyword[not] identifier[validate_csv_columns] ( identifier[df] , identifier[kind] , identifier[raise_errors] = keyword[False] ):
identifier[self] . identifier[log_algo] . identifier[error] (
literal[string] , identifier[file] )
identifier[sys] . identifier[exit] ( literal[int] )
keyword[if] identifier[df] [ literal[string] ]. identifier[values] [- literal[int] ]!= identifier[symbol] :
identifier[self] . identifier[log_algo] . identifier[error] (
literal[string] , identifier[file] , identifier[symbol] )
identifier[sys] . identifier[exit] ( literal[int] )
identifier[dfs] . identifier[append] ( identifier[df] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[log_algo] . identifier[error] (
literal[string] , identifier[symbol] , identifier[file] )
identifier[sys] . identifier[exit] ( literal[int] )
identifier[history] = identifier[prepare_history] (
identifier[data] = identifier[pd] . identifier[concat] ( identifier[dfs] , identifier[sort] = keyword[True] ),
identifier[resolution] = identifier[self] . identifier[resolution] ,
identifier[tz] = identifier[self] . identifier[timezone] ,
identifier[continuous] = identifier[self] . identifier[continuous]
)
identifier[history] = identifier[history] [ identifier[history] . identifier[index] >= identifier[self] . identifier[backtest_start] ]
keyword[elif] keyword[not] identifier[self] . identifier[blotter_args] [ literal[string] ] keyword[and] (
identifier[self] . identifier[backtest] keyword[or] identifier[self] . identifier[preload] ):
identifier[start] = identifier[self] . identifier[backtest_start] keyword[if] identifier[self] . identifier[backtest] keyword[else] identifier[tools] . identifier[backdate] (
identifier[self] . identifier[preload] )
identifier[end] = identifier[self] . identifier[backtest_end] keyword[if] identifier[self] . identifier[backtest] keyword[else] keyword[None]
identifier[history] = identifier[self] . identifier[blotter] . identifier[history] (
identifier[symbols] = identifier[self] . identifier[symbols] ,
identifier[start] = identifier[start] ,
identifier[end] = identifier[end] ,
identifier[resolution] = identifier[self] . identifier[resolution] ,
identifier[tz] = identifier[self] . identifier[timezone] ,
identifier[continuous] = identifier[self] . identifier[continuous]
)
keyword[if] keyword[not] identifier[self] . identifier[blotter] . identifier[backfilled] :
identifier[self] . identifier[blotter] . identifier[ibConn] = identifier[self] . identifier[ibConn]
identifier[self] . identifier[blotter] . identifier[backfill] ( identifier[data] = identifier[history] ,
identifier[resolution] = identifier[self] . identifier[resolution] ,
identifier[start] = identifier[start] , identifier[end] = identifier[end] )
identifier[history] = identifier[self] . identifier[blotter] . identifier[history] (
identifier[symbols] = identifier[self] . identifier[symbols] ,
identifier[start] = identifier[start] ,
identifier[end] = identifier[end] ,
identifier[resolution] = identifier[self] . identifier[resolution] ,
identifier[tz] = identifier[self] . identifier[timezone] ,
identifier[continuous] = identifier[self] . identifier[continuous]
)
identifier[self] . identifier[blotter] . identifier[ibConn] = keyword[None]
keyword[if] keyword[not] identifier[history] . identifier[empty] :
identifier[history] [ literal[string] ]= identifier[history] [ literal[string] ]. identifier[astype] ( literal[string] )
identifier[history] [ literal[string] ]= identifier[history] [ literal[string] ]. identifier[astype] ( literal[string] )
identifier[history] [ literal[string] ]= identifier[history] [ literal[string] ]. identifier[astype] ( literal[string] )
keyword[if] identifier[self] . identifier[backtest] :
identifier[self] . identifier[on_start] ()
identifier[drip_handler] = identifier[self] . identifier[_tick_handler] keyword[if] identifier[self] . identifier[resolution] [- literal[int] ] keyword[in] (
literal[string] , literal[string] , literal[string] ) keyword[else] identifier[self] . identifier[_bar_handler]
identifier[self] . identifier[blotter] . identifier[drip] ( identifier[history] , identifier[drip_handler] )
keyword[else] :
identifier[self] . identifier[bars] = identifier[history]
identifier[self] . identifier[blotter] . identifier[register] ( identifier[self] . identifier[instruments] )
identifier[self] . identifier[on_start] ()
identifier[self] . identifier[blotter] . identifier[stream] (
identifier[symbols] = identifier[self] . identifier[symbols] ,
identifier[tz] = identifier[self] . identifier[timezone] ,
identifier[quote_handler] = identifier[self] . identifier[_quote_handler] ,
identifier[tick_handler] = identifier[self] . identifier[_tick_handler] ,
identifier[bar_handler] = identifier[self] . identifier[_bar_handler] ,
identifier[book_handler] = identifier[self] . identifier[_book_handler]
)
|
def run(self):
"""Starts the algo
Connects to the Blotter, processes market data and passes
tick data to the ``on_tick`` function and bar data to the
``on_bar`` methods.
"""
history = pd.DataFrame()
# get history from csv dir
if self.backtest and self.backtest_csv:
kind = 'TICK' if self.resolution[-1] in ('S', 'K', 'V') else 'BAR'
dfs = []
for symbol in self.symbols:
file = '%s/%s.%s.csv' % (self.backtest_csv, symbol, kind)
if not os.path.exists(file):
self.log_algo.error("Can't load data for %s (%s doesn't exist)", symbol, file)
sys.exit(0) # depends on [control=['if'], data=[]]
try:
df = pd.read_csv(file)
if 'expiry' not in df.columns:
df.loc[:, 'expiry'] = nan # depends on [control=['if'], data=[]]
if not validate_csv_columns(df, kind, raise_errors=False):
self.log_algo.error("%s isn't a QTPyLib-compatible format", file)
sys.exit(0) # depends on [control=['if'], data=[]]
if df['symbol'].values[-1] != symbol:
self.log_algo.error("%s Doesn't content data for %s", file, symbol)
sys.exit(0) # depends on [control=['if'], data=['symbol']]
dfs.append(df) # depends on [control=['try'], data=[]]
except Exception as e:
self.log_algo.error('Error reading data for %s (%s)', symbol, file)
sys.exit(0) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['symbol']]
history = prepare_history(data=pd.concat(dfs, sort=True), resolution=self.resolution, tz=self.timezone, continuous=self.continuous)
history = history[history.index >= self.backtest_start] # depends on [control=['if'], data=[]]
elif not self.blotter_args['dbskip'] and (self.backtest or self.preload):
start = self.backtest_start if self.backtest else tools.backdate(self.preload)
end = self.backtest_end if self.backtest else None
history = self.blotter.history(symbols=self.symbols, start=start, end=end, resolution=self.resolution, tz=self.timezone, continuous=self.continuous)
# history needs backfilling?
# self.blotter.backfilled = True
if not self.blotter.backfilled:
# "loan" Blotter our ibConn
self.blotter.ibConn = self.ibConn
# call the back fill
self.blotter.backfill(data=history, resolution=self.resolution, start=start, end=end)
# re-get history from db
history = self.blotter.history(symbols=self.symbols, start=start, end=end, resolution=self.resolution, tz=self.timezone, continuous=self.continuous)
# take our ibConn back :)
self.blotter.ibConn = None # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# optimize pandas
if not history.empty:
history['symbol'] = history['symbol'].astype('category')
history['symbol_group'] = history['symbol_group'].astype('category')
history['asset_class'] = history['asset_class'].astype('category') # depends on [control=['if'], data=[]]
if self.backtest:
# initiate strategy
self.on_start()
# drip history
drip_handler = self._tick_handler if self.resolution[-1] in ('S', 'K', 'V') else self._bar_handler
self.blotter.drip(history, drip_handler) # depends on [control=['if'], data=[]]
else:
# place history self.bars
self.bars = history
# add instruments to blotter in case they do not exist
self.blotter.register(self.instruments)
# initiate strategy
self.on_start()
# listen for RT data
self.blotter.stream(symbols=self.symbols, tz=self.timezone, quote_handler=self._quote_handler, tick_handler=self._tick_handler, bar_handler=self._bar_handler, book_handler=self._book_handler)
|
def generate_nonce():
""" Generate nonce number """
nonce = ''.join([str(randint(0, 9)) for i in range(8)])
return HMAC(
nonce.encode(),
"secret".encode(),
sha1
).hexdigest()
|
def function[generate_nonce, parameter[]]:
constant[ Generate nonce number ]
variable[nonce] assign[=] call[constant[].join, parameter[<ast.ListComp object at 0x7da20c6abd00>]]
return[call[call[name[HMAC], parameter[call[name[nonce].encode, parameter[]], call[constant[secret].encode, parameter[]], name[sha1]]].hexdigest, parameter[]]]
|
keyword[def] identifier[generate_nonce] ():
literal[string]
identifier[nonce] = literal[string] . identifier[join] ([ identifier[str] ( identifier[randint] ( literal[int] , literal[int] )) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] )])
keyword[return] identifier[HMAC] (
identifier[nonce] . identifier[encode] (),
literal[string] . identifier[encode] (),
identifier[sha1]
). identifier[hexdigest] ()
|
def generate_nonce():
""" Generate nonce number """
nonce = ''.join([str(randint(0, 9)) for i in range(8)])
return HMAC(nonce.encode(), 'secret'.encode(), sha1).hexdigest()
|
def _parse_get_snapshot_schedule(cls, args):
"""
Parse command line arguments for updating hbase snapshot schedule or to get details.
"""
argparser = ArgumentParser(prog="cluster snapshot_schedule")
group = argparser.add_mutually_exclusive_group(required=True)
group.add_argument("--id", dest="cluster_id",
help="execute on cluster with this id")
group.add_argument("--label", dest="label",
help="execute on cluster with this label")
arguments = argparser.parse_args(args)
return arguments
|
def function[_parse_get_snapshot_schedule, parameter[cls, args]]:
constant[
Parse command line arguments for updating hbase snapshot schedule or to get details.
]
variable[argparser] assign[=] call[name[ArgumentParser], parameter[]]
variable[group] assign[=] call[name[argparser].add_mutually_exclusive_group, parameter[]]
call[name[group].add_argument, parameter[constant[--id]]]
call[name[group].add_argument, parameter[constant[--label]]]
variable[arguments] assign[=] call[name[argparser].parse_args, parameter[name[args]]]
return[name[arguments]]
|
keyword[def] identifier[_parse_get_snapshot_schedule] ( identifier[cls] , identifier[args] ):
literal[string]
identifier[argparser] = identifier[ArgumentParser] ( identifier[prog] = literal[string] )
identifier[group] = identifier[argparser] . identifier[add_mutually_exclusive_group] ( identifier[required] = keyword[True] )
identifier[group] . identifier[add_argument] ( literal[string] , identifier[dest] = literal[string] ,
identifier[help] = literal[string] )
identifier[group] . identifier[add_argument] ( literal[string] , identifier[dest] = literal[string] ,
identifier[help] = literal[string] )
identifier[arguments] = identifier[argparser] . identifier[parse_args] ( identifier[args] )
keyword[return] identifier[arguments]
|
def _parse_get_snapshot_schedule(cls, args):
"""
Parse command line arguments for updating hbase snapshot schedule or to get details.
"""
argparser = ArgumentParser(prog='cluster snapshot_schedule')
group = argparser.add_mutually_exclusive_group(required=True)
group.add_argument('--id', dest='cluster_id', help='execute on cluster with this id')
group.add_argument('--label', dest='label', help='execute on cluster with this label')
arguments = argparser.parse_args(args)
return arguments
|
def get_for_org(self, org_id, ids, skip_cache=False):
'''
Returns objects for the given identifiers
If called with a list returns a list, else returns a single entity
'''
t = type(ids)
if t != list:
ids = [ids]
objs = []
for id in ids:
if id == '' or type(id) is not str: # Empty string
objs.append(None)
continue
if skip_cache ==False:
obj = self.get_from_cache(org_id, id)
else:
obj = None
if obj is None:
xml = self.request_obj(org_id, id)
obj = self.create_obj_from_xml(id, xml)
self.cache(obj) # Will cache either a real object, or a BioCycEntityNotFound
if obj: # Found
objs.append(obj)
else: # Not found (BioCycEntityNotFound)
objs.append(None)
if t != list:
return objs[0]
else:
return objs
|
def function[get_for_org, parameter[self, org_id, ids, skip_cache]]:
constant[
Returns objects for the given identifiers
If called with a list returns a list, else returns a single entity
]
variable[t] assign[=] call[name[type], parameter[name[ids]]]
if compare[name[t] not_equal[!=] name[list]] begin[:]
variable[ids] assign[=] list[[<ast.Name object at 0x7da207f01570>]]
variable[objs] assign[=] list[[]]
for taget[name[id]] in starred[name[ids]] begin[:]
if <ast.BoolOp object at 0x7da20c7953c0> begin[:]
call[name[objs].append, parameter[constant[None]]]
continue
if compare[name[skip_cache] equal[==] constant[False]] begin[:]
variable[obj] assign[=] call[name[self].get_from_cache, parameter[name[org_id], name[id]]]
if compare[name[obj] is constant[None]] begin[:]
variable[xml] assign[=] call[name[self].request_obj, parameter[name[org_id], name[id]]]
variable[obj] assign[=] call[name[self].create_obj_from_xml, parameter[name[id], name[xml]]]
call[name[self].cache, parameter[name[obj]]]
if name[obj] begin[:]
call[name[objs].append, parameter[name[obj]]]
if compare[name[t] not_equal[!=] name[list]] begin[:]
return[call[name[objs]][constant[0]]]
|
keyword[def] identifier[get_for_org] ( identifier[self] , identifier[org_id] , identifier[ids] , identifier[skip_cache] = keyword[False] ):
literal[string]
identifier[t] = identifier[type] ( identifier[ids] )
keyword[if] identifier[t] != identifier[list] :
identifier[ids] =[ identifier[ids] ]
identifier[objs] =[]
keyword[for] identifier[id] keyword[in] identifier[ids] :
keyword[if] identifier[id] == literal[string] keyword[or] identifier[type] ( identifier[id] ) keyword[is] keyword[not] identifier[str] :
identifier[objs] . identifier[append] ( keyword[None] )
keyword[continue]
keyword[if] identifier[skip_cache] == keyword[False] :
identifier[obj] = identifier[self] . identifier[get_from_cache] ( identifier[org_id] , identifier[id] )
keyword[else] :
identifier[obj] = keyword[None]
keyword[if] identifier[obj] keyword[is] keyword[None] :
identifier[xml] = identifier[self] . identifier[request_obj] ( identifier[org_id] , identifier[id] )
identifier[obj] = identifier[self] . identifier[create_obj_from_xml] ( identifier[id] , identifier[xml] )
identifier[self] . identifier[cache] ( identifier[obj] )
keyword[if] identifier[obj] :
identifier[objs] . identifier[append] ( identifier[obj] )
keyword[else] :
identifier[objs] . identifier[append] ( keyword[None] )
keyword[if] identifier[t] != identifier[list] :
keyword[return] identifier[objs] [ literal[int] ]
keyword[else] :
keyword[return] identifier[objs]
|
def get_for_org(self, org_id, ids, skip_cache=False):
"""
Returns objects for the given identifiers
If called with a list returns a list, else returns a single entity
"""
t = type(ids)
if t != list:
ids = [ids] # depends on [control=['if'], data=[]]
objs = []
for id in ids:
if id == '' or type(id) is not str: # Empty string
objs.append(None)
continue # depends on [control=['if'], data=[]]
if skip_cache == False:
obj = self.get_from_cache(org_id, id) # depends on [control=['if'], data=[]]
else:
obj = None
if obj is None:
xml = self.request_obj(org_id, id)
obj = self.create_obj_from_xml(id, xml)
self.cache(obj) # Will cache either a real object, or a BioCycEntityNotFound # depends on [control=['if'], data=['obj']]
if obj: # Found
objs.append(obj) # depends on [control=['if'], data=[]]
else: # Not found (BioCycEntityNotFound)
objs.append(None) # depends on [control=['for'], data=['id']]
if t != list:
return objs[0] # depends on [control=['if'], data=[]]
else:
return objs
|
def _uninstall(action='remove', name=None, pkgs=None, **kwargs):
'''
remove and purge do identical things but with different pacman commands,
this function performs the common logic.
'''
try:
pkg_params = __salt__['pkg_resource.parse_targets'](name, pkgs)[0]
except MinionError as exc:
raise CommandExecutionError(exc)
old = list_pkgs()
targets = [x for x in pkg_params if x in old]
if not targets:
return {}
remove_arg = '-Rsc' if action == 'purge' else '-R'
cmd = []
if salt.utils.systemd.has_scope(__context__) \
and __salt__['config.get']('systemd.scope', True):
cmd.extend(['systemd-run', '--scope'])
cmd.extend(['pacman', remove_arg, '--noprogressbar', '--noconfirm'])
cmd.extend(targets)
if 'root' in kwargs:
cmd.extend(('-r', kwargs['root']))
out = __salt__['cmd.run_all'](
cmd,
output_loglevel='trace',
python_shell=False
)
if out['retcode'] != 0 and out['stderr']:
errors = [out['stderr']]
else:
errors = []
__context__.pop('pkg.list_pkgs', None)
new = list_pkgs()
ret = salt.utils.data.compare_dicts(old, new)
if errors:
raise CommandExecutionError(
'Problem encountered removing package(s)',
info={'errors': errors, 'changes': ret}
)
return ret
|
def function[_uninstall, parameter[action, name, pkgs]]:
constant[
remove and purge do identical things but with different pacman commands,
this function performs the common logic.
]
<ast.Try object at 0x7da1b2135330>
variable[old] assign[=] call[name[list_pkgs], parameter[]]
variable[targets] assign[=] <ast.ListComp object at 0x7da1b2135480>
if <ast.UnaryOp object at 0x7da1b1f34130> begin[:]
return[dictionary[[], []]]
variable[remove_arg] assign[=] <ast.IfExp object at 0x7da1b1f35090>
variable[cmd] assign[=] list[[]]
if <ast.BoolOp object at 0x7da1b1f35d50> begin[:]
call[name[cmd].extend, parameter[list[[<ast.Constant object at 0x7da1b1c18670>, <ast.Constant object at 0x7da1b1c19720>]]]]
call[name[cmd].extend, parameter[list[[<ast.Constant object at 0x7da1b1c19660>, <ast.Name object at 0x7da1b1c194e0>, <ast.Constant object at 0x7da1b1c195a0>, <ast.Constant object at 0x7da1b1c19570>]]]]
call[name[cmd].extend, parameter[name[targets]]]
if compare[constant[root] in name[kwargs]] begin[:]
call[name[cmd].extend, parameter[tuple[[<ast.Constant object at 0x7da1b1c197e0>, <ast.Subscript object at 0x7da1b1c1b5b0>]]]]
variable[out] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[name[cmd]]]
if <ast.BoolOp object at 0x7da1b1c18eb0> begin[:]
variable[errors] assign[=] list[[<ast.Subscript object at 0x7da1b1c1a110>]]
call[name[__context__].pop, parameter[constant[pkg.list_pkgs], constant[None]]]
variable[new] assign[=] call[name[list_pkgs], parameter[]]
variable[ret] assign[=] call[name[salt].utils.data.compare_dicts, parameter[name[old], name[new]]]
if name[errors] begin[:]
<ast.Raise object at 0x7da1b1c1bc70>
return[name[ret]]
|
keyword[def] identifier[_uninstall] ( identifier[action] = literal[string] , identifier[name] = keyword[None] , identifier[pkgs] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[try] :
identifier[pkg_params] = identifier[__salt__] [ literal[string] ]( identifier[name] , identifier[pkgs] )[ literal[int] ]
keyword[except] identifier[MinionError] keyword[as] identifier[exc] :
keyword[raise] identifier[CommandExecutionError] ( identifier[exc] )
identifier[old] = identifier[list_pkgs] ()
identifier[targets] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[pkg_params] keyword[if] identifier[x] keyword[in] identifier[old] ]
keyword[if] keyword[not] identifier[targets] :
keyword[return] {}
identifier[remove_arg] = literal[string] keyword[if] identifier[action] == literal[string] keyword[else] literal[string]
identifier[cmd] =[]
keyword[if] identifier[salt] . identifier[utils] . identifier[systemd] . identifier[has_scope] ( identifier[__context__] ) keyword[and] identifier[__salt__] [ literal[string] ]( literal[string] , keyword[True] ):
identifier[cmd] . identifier[extend] ([ literal[string] , literal[string] ])
identifier[cmd] . identifier[extend] ([ literal[string] , identifier[remove_arg] , literal[string] , literal[string] ])
identifier[cmd] . identifier[extend] ( identifier[targets] )
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[cmd] . identifier[extend] (( literal[string] , identifier[kwargs] [ literal[string] ]))
identifier[out] = identifier[__salt__] [ literal[string] ](
identifier[cmd] ,
identifier[output_loglevel] = literal[string] ,
identifier[python_shell] = keyword[False]
)
keyword[if] identifier[out] [ literal[string] ]!= literal[int] keyword[and] identifier[out] [ literal[string] ]:
identifier[errors] =[ identifier[out] [ literal[string] ]]
keyword[else] :
identifier[errors] =[]
identifier[__context__] . identifier[pop] ( literal[string] , keyword[None] )
identifier[new] = identifier[list_pkgs] ()
identifier[ret] = identifier[salt] . identifier[utils] . identifier[data] . identifier[compare_dicts] ( identifier[old] , identifier[new] )
keyword[if] identifier[errors] :
keyword[raise] identifier[CommandExecutionError] (
literal[string] ,
identifier[info] ={ literal[string] : identifier[errors] , literal[string] : identifier[ret] }
)
keyword[return] identifier[ret]
|
def _uninstall(action='remove', name=None, pkgs=None, **kwargs):
"""
remove and purge do identical things but with different pacman commands,
this function performs the common logic.
"""
try:
pkg_params = __salt__['pkg_resource.parse_targets'](name, pkgs)[0] # depends on [control=['try'], data=[]]
except MinionError as exc:
raise CommandExecutionError(exc) # depends on [control=['except'], data=['exc']]
old = list_pkgs()
targets = [x for x in pkg_params if x in old]
if not targets:
return {} # depends on [control=['if'], data=[]]
remove_arg = '-Rsc' if action == 'purge' else '-R'
cmd = []
if salt.utils.systemd.has_scope(__context__) and __salt__['config.get']('systemd.scope', True):
cmd.extend(['systemd-run', '--scope']) # depends on [control=['if'], data=[]]
cmd.extend(['pacman', remove_arg, '--noprogressbar', '--noconfirm'])
cmd.extend(targets)
if 'root' in kwargs:
cmd.extend(('-r', kwargs['root'])) # depends on [control=['if'], data=['kwargs']]
out = __salt__['cmd.run_all'](cmd, output_loglevel='trace', python_shell=False)
if out['retcode'] != 0 and out['stderr']:
errors = [out['stderr']] # depends on [control=['if'], data=[]]
else:
errors = []
__context__.pop('pkg.list_pkgs', None)
new = list_pkgs()
ret = salt.utils.data.compare_dicts(old, new)
if errors:
raise CommandExecutionError('Problem encountered removing package(s)', info={'errors': errors, 'changes': ret}) # depends on [control=['if'], data=[]]
return ret
|
def jsonify(py_data, default=None, indent=4, sort_keys=True):
"""
Converts the inputted Python data to JSON format.
:param py_data | <variant>
"""
return json.dumps(py_data, default=py2json, indent=indent, sort_keys=sort_keys)
|
def function[jsonify, parameter[py_data, default, indent, sort_keys]]:
constant[
Converts the inputted Python data to JSON format.
:param py_data | <variant>
]
return[call[name[json].dumps, parameter[name[py_data]]]]
|
keyword[def] identifier[jsonify] ( identifier[py_data] , identifier[default] = keyword[None] , identifier[indent] = literal[int] , identifier[sort_keys] = keyword[True] ):
literal[string]
keyword[return] identifier[json] . identifier[dumps] ( identifier[py_data] , identifier[default] = identifier[py2json] , identifier[indent] = identifier[indent] , identifier[sort_keys] = identifier[sort_keys] )
|
def jsonify(py_data, default=None, indent=4, sort_keys=True):
"""
Converts the inputted Python data to JSON format.
:param py_data | <variant>
"""
return json.dumps(py_data, default=py2json, indent=indent, sort_keys=sort_keys)
|
def set_application_language(params):
"""Set the locale and translation for a pywws program.
This function reads the language from the configuration file, then
calls :func:`set_locale` and :func:`set_translation`.
:param params: a :class:`pywws.storage.params` object.
:type params: object
"""
lang = params.get('config', 'language', None)
if lang:
set_locale(lang)
set_translation(lang)
|
def function[set_application_language, parameter[params]]:
constant[Set the locale and translation for a pywws program.
This function reads the language from the configuration file, then
calls :func:`set_locale` and :func:`set_translation`.
:param params: a :class:`pywws.storage.params` object.
:type params: object
]
variable[lang] assign[=] call[name[params].get, parameter[constant[config], constant[language], constant[None]]]
if name[lang] begin[:]
call[name[set_locale], parameter[name[lang]]]
call[name[set_translation], parameter[name[lang]]]
|
keyword[def] identifier[set_application_language] ( identifier[params] ):
literal[string]
identifier[lang] = identifier[params] . identifier[get] ( literal[string] , literal[string] , keyword[None] )
keyword[if] identifier[lang] :
identifier[set_locale] ( identifier[lang] )
identifier[set_translation] ( identifier[lang] )
|
def set_application_language(params):
"""Set the locale and translation for a pywws program.
This function reads the language from the configuration file, then
calls :func:`set_locale` and :func:`set_translation`.
:param params: a :class:`pywws.storage.params` object.
:type params: object
"""
lang = params.get('config', 'language', None)
if lang:
set_locale(lang)
set_translation(lang) # depends on [control=['if'], data=[]]
|
def update_default_rules(self):
"""Concatinate application and global security group rules."""
app_ingress = self.properties['security_group']['ingress']
ingress = conservative_merger.merge(DEFAULT_SECURITYGROUP_RULES, app_ingress)
resolved_ingress = self.resolve_self_references(ingress)
self.log.info('Updated default rules:\n%s', ingress)
return resolved_ingress
|
def function[update_default_rules, parameter[self]]:
constant[Concatinate application and global security group rules.]
variable[app_ingress] assign[=] call[call[name[self].properties][constant[security_group]]][constant[ingress]]
variable[ingress] assign[=] call[name[conservative_merger].merge, parameter[name[DEFAULT_SECURITYGROUP_RULES], name[app_ingress]]]
variable[resolved_ingress] assign[=] call[name[self].resolve_self_references, parameter[name[ingress]]]
call[name[self].log.info, parameter[constant[Updated default rules:
%s], name[ingress]]]
return[name[resolved_ingress]]
|
keyword[def] identifier[update_default_rules] ( identifier[self] ):
literal[string]
identifier[app_ingress] = identifier[self] . identifier[properties] [ literal[string] ][ literal[string] ]
identifier[ingress] = identifier[conservative_merger] . identifier[merge] ( identifier[DEFAULT_SECURITYGROUP_RULES] , identifier[app_ingress] )
identifier[resolved_ingress] = identifier[self] . identifier[resolve_self_references] ( identifier[ingress] )
identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[ingress] )
keyword[return] identifier[resolved_ingress]
|
def update_default_rules(self):
"""Concatinate application and global security group rules."""
app_ingress = self.properties['security_group']['ingress']
ingress = conservative_merger.merge(DEFAULT_SECURITYGROUP_RULES, app_ingress)
resolved_ingress = self.resolve_self_references(ingress)
self.log.info('Updated default rules:\n%s', ingress)
return resolved_ingress
|
def generate_sample_cfn_module(env_root, module_dir=None):
"""Generate skeleton CloudFormation sample module."""
if module_dir is None:
module_dir = os.path.join(env_root, 'sampleapp.cfn')
generate_sample_module(module_dir)
for i in ['stacks.yaml', 'dev-us-east-1.env']:
shutil.copyfile(
os.path.join(ROOT,
'templates',
'cfn',
i),
os.path.join(module_dir, i)
)
os.mkdir(os.path.join(module_dir, 'templates'))
with open(os.path.join(module_dir,
'templates',
'tf_state.yml'), 'w') as stream:
stream.write(
cfn_flip.flip(
check_output(
[sys.executable,
os.path.join(ROOT,
'templates',
'stacker',
'tfstate_blueprints',
'tf_state.py')]
)
)
)
LOGGER.info("Sample CloudFormation module created at %s",
module_dir)
|
def function[generate_sample_cfn_module, parameter[env_root, module_dir]]:
constant[Generate skeleton CloudFormation sample module.]
if compare[name[module_dir] is constant[None]] begin[:]
variable[module_dir] assign[=] call[name[os].path.join, parameter[name[env_root], constant[sampleapp.cfn]]]
call[name[generate_sample_module], parameter[name[module_dir]]]
for taget[name[i]] in starred[list[[<ast.Constant object at 0x7da1b0781a80>, <ast.Constant object at 0x7da1b0781510>]]] begin[:]
call[name[shutil].copyfile, parameter[call[name[os].path.join, parameter[name[ROOT], constant[templates], constant[cfn], name[i]]], call[name[os].path.join, parameter[name[module_dir], name[i]]]]]
call[name[os].mkdir, parameter[call[name[os].path.join, parameter[name[module_dir], constant[templates]]]]]
with call[name[open], parameter[call[name[os].path.join, parameter[name[module_dir], constant[templates], constant[tf_state.yml]]], constant[w]]] begin[:]
call[name[stream].write, parameter[call[name[cfn_flip].flip, parameter[call[name[check_output], parameter[list[[<ast.Attribute object at 0x7da1b0715b70>, <ast.Call object at 0x7da1b0714580>]]]]]]]]
call[name[LOGGER].info, parameter[constant[Sample CloudFormation module created at %s], name[module_dir]]]
|
keyword[def] identifier[generate_sample_cfn_module] ( identifier[env_root] , identifier[module_dir] = keyword[None] ):
literal[string]
keyword[if] identifier[module_dir] keyword[is] keyword[None] :
identifier[module_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[env_root] , literal[string] )
identifier[generate_sample_module] ( identifier[module_dir] )
keyword[for] identifier[i] keyword[in] [ literal[string] , literal[string] ]:
identifier[shutil] . identifier[copyfile] (
identifier[os] . identifier[path] . identifier[join] ( identifier[ROOT] ,
literal[string] ,
literal[string] ,
identifier[i] ),
identifier[os] . identifier[path] . identifier[join] ( identifier[module_dir] , identifier[i] )
)
identifier[os] . identifier[mkdir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[module_dir] , literal[string] ))
keyword[with] identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[module_dir] ,
literal[string] ,
literal[string] ), literal[string] ) keyword[as] identifier[stream] :
identifier[stream] . identifier[write] (
identifier[cfn_flip] . identifier[flip] (
identifier[check_output] (
[ identifier[sys] . identifier[executable] ,
identifier[os] . identifier[path] . identifier[join] ( identifier[ROOT] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] )]
)
)
)
identifier[LOGGER] . identifier[info] ( literal[string] ,
identifier[module_dir] )
|
def generate_sample_cfn_module(env_root, module_dir=None):
"""Generate skeleton CloudFormation sample module."""
if module_dir is None:
module_dir = os.path.join(env_root, 'sampleapp.cfn') # depends on [control=['if'], data=['module_dir']]
generate_sample_module(module_dir)
for i in ['stacks.yaml', 'dev-us-east-1.env']:
shutil.copyfile(os.path.join(ROOT, 'templates', 'cfn', i), os.path.join(module_dir, i)) # depends on [control=['for'], data=['i']]
os.mkdir(os.path.join(module_dir, 'templates'))
with open(os.path.join(module_dir, 'templates', 'tf_state.yml'), 'w') as stream:
stream.write(cfn_flip.flip(check_output([sys.executable, os.path.join(ROOT, 'templates', 'stacker', 'tfstate_blueprints', 'tf_state.py')]))) # depends on [control=['with'], data=['stream']]
LOGGER.info('Sample CloudFormation module created at %s', module_dir)
|
def set_bounds(self, lb, ub):
"""
Change the lower and upper bounds of a variable.
"""
if lb is not None and ub is not None and lb > ub:
raise ValueError(
"The provided lower bound {} is larger than the provided upper bound {}".format(lb, ub)
)
self._lb = lb
self._ub = ub
if self.problem is not None:
self.problem._pending_modifications.var_lb.append((self, lb))
self.problem._pending_modifications.var_ub.append((self, ub))
|
def function[set_bounds, parameter[self, lb, ub]]:
constant[
Change the lower and upper bounds of a variable.
]
if <ast.BoolOp object at 0x7da1b0b7f310> begin[:]
<ast.Raise object at 0x7da1b0b7fd00>
name[self]._lb assign[=] name[lb]
name[self]._ub assign[=] name[ub]
if compare[name[self].problem is_not constant[None]] begin[:]
call[name[self].problem._pending_modifications.var_lb.append, parameter[tuple[[<ast.Name object at 0x7da1b0b7c820>, <ast.Name object at 0x7da1b0b7e7d0>]]]]
call[name[self].problem._pending_modifications.var_ub.append, parameter[tuple[[<ast.Name object at 0x7da18eb56830>, <ast.Name object at 0x7da18eb55b40>]]]]
|
keyword[def] identifier[set_bounds] ( identifier[self] , identifier[lb] , identifier[ub] ):
literal[string]
keyword[if] identifier[lb] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ub] keyword[is] keyword[not] keyword[None] keyword[and] identifier[lb] > identifier[ub] :
keyword[raise] identifier[ValueError] (
literal[string] . identifier[format] ( identifier[lb] , identifier[ub] )
)
identifier[self] . identifier[_lb] = identifier[lb]
identifier[self] . identifier[_ub] = identifier[ub]
keyword[if] identifier[self] . identifier[problem] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[problem] . identifier[_pending_modifications] . identifier[var_lb] . identifier[append] (( identifier[self] , identifier[lb] ))
identifier[self] . identifier[problem] . identifier[_pending_modifications] . identifier[var_ub] . identifier[append] (( identifier[self] , identifier[ub] ))
|
def set_bounds(self, lb, ub):
"""
Change the lower and upper bounds of a variable.
"""
if lb is not None and ub is not None and (lb > ub):
raise ValueError('The provided lower bound {} is larger than the provided upper bound {}'.format(lb, ub)) # depends on [control=['if'], data=[]]
self._lb = lb
self._ub = ub
if self.problem is not None:
self.problem._pending_modifications.var_lb.append((self, lb))
self.problem._pending_modifications.var_ub.append((self, ub)) # depends on [control=['if'], data=[]]
|
def raise_402(instance, msg=None):
"""Abort the current request with a 402 (Payment Required) response code.
If the message is given it's output as an error message in the response
body (correctly converted to the requested MIME type).
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 402
"""
instance.response.status = 402
if msg:
instance.response.body_raw = {'error': msg}
raise ResponseException(instance.response)
|
def function[raise_402, parameter[instance, msg]]:
constant[Abort the current request with a 402 (Payment Required) response code.
If the message is given it's output as an error message in the response
body (correctly converted to the requested MIME type).
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 402
]
name[instance].response.status assign[=] constant[402]
if name[msg] begin[:]
name[instance].response.body_raw assign[=] dictionary[[<ast.Constant object at 0x7da1b2298ee0>], [<ast.Name object at 0x7da1b2299810>]]
<ast.Raise object at 0x7da1b2298ac0>
|
keyword[def] identifier[raise_402] ( identifier[instance] , identifier[msg] = keyword[None] ):
literal[string]
identifier[instance] . identifier[response] . identifier[status] = literal[int]
keyword[if] identifier[msg] :
identifier[instance] . identifier[response] . identifier[body_raw] ={ literal[string] : identifier[msg] }
keyword[raise] identifier[ResponseException] ( identifier[instance] . identifier[response] )
|
def raise_402(instance, msg=None):
"""Abort the current request with a 402 (Payment Required) response code.
If the message is given it's output as an error message in the response
body (correctly converted to the requested MIME type).
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 402
"""
instance.response.status = 402
if msg:
instance.response.body_raw = {'error': msg} # depends on [control=['if'], data=[]]
raise ResponseException(instance.response)
|
def filter_counter(self, counter, min=2, max=100000000):
"""
Filter the counted records.
Returns: List with record numbers.
"""
records_filterd = {}
counter_all_records = 0
for item in counter:
counter_all_records += 1
if max > counter[item] >= min:
records_filterd[item] = counter[item]
self.stat['user_record_events'] = counter_all_records
self.stat['records_filtered'] = len(records_filterd)
return records_filterd
|
def function[filter_counter, parameter[self, counter, min, max]]:
constant[
Filter the counted records.
Returns: List with record numbers.
]
variable[records_filterd] assign[=] dictionary[[], []]
variable[counter_all_records] assign[=] constant[0]
for taget[name[item]] in starred[name[counter]] begin[:]
<ast.AugAssign object at 0x7da20c6c6260>
if compare[name[max] greater[>] call[name[counter]][name[item]]] begin[:]
call[name[records_filterd]][name[item]] assign[=] call[name[counter]][name[item]]
call[name[self].stat][constant[user_record_events]] assign[=] name[counter_all_records]
call[name[self].stat][constant[records_filtered]] assign[=] call[name[len], parameter[name[records_filterd]]]
return[name[records_filterd]]
|
keyword[def] identifier[filter_counter] ( identifier[self] , identifier[counter] , identifier[min] = literal[int] , identifier[max] = literal[int] ):
literal[string]
identifier[records_filterd] ={}
identifier[counter_all_records] = literal[int]
keyword[for] identifier[item] keyword[in] identifier[counter] :
identifier[counter_all_records] += literal[int]
keyword[if] identifier[max] > identifier[counter] [ identifier[item] ]>= identifier[min] :
identifier[records_filterd] [ identifier[item] ]= identifier[counter] [ identifier[item] ]
identifier[self] . identifier[stat] [ literal[string] ]= identifier[counter_all_records]
identifier[self] . identifier[stat] [ literal[string] ]= identifier[len] ( identifier[records_filterd] )
keyword[return] identifier[records_filterd]
|
def filter_counter(self, counter, min=2, max=100000000):
"""
Filter the counted records.
Returns: List with record numbers.
"""
records_filterd = {}
counter_all_records = 0
for item in counter:
counter_all_records += 1
if max > counter[item] >= min:
records_filterd[item] = counter[item] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']]
self.stat['user_record_events'] = counter_all_records
self.stat['records_filtered'] = len(records_filterd)
return records_filterd
|
def from_file(cls, fmt, filename=None,
structure_file=None, are_coops=False):
"""
Creates a CompleteCohp object from an output file of a COHP
calculation. Valid formats are either LMTO (for the Stuttgart
LMTO-ASA code) or LOBSTER (for the LOBSTER code).
Args:
cohp_file: Name of the COHP output file. Defaults to COPL
for LMTO and COHPCAR.lobster/COOPCAR.lobster for LOBSTER.
are_coops: Indicates whether the populations are COOPs or
COHPs. Defaults to False for COHPs.
fmt: A string for the code that was used to calculate
the COHPs so that the output file can be handled
correctly. Can take the values "LMTO" or "LOBSTER".
structure_file: Name of the file containing the structure.
If no file name is given, use CTRL for LMTO and POSCAR
for LOBSTER.
Returns:
A CompleteCohp object.
"""
fmt = fmt.upper()
if fmt == "LMTO":
# LMTO COOPs and orbital-resolved COHP cannot be handled yet.
are_coops = False
orb_res_cohp = None
if structure_file is None:
structure_file = "CTRL"
if filename is None:
filename = "COPL"
cohp_file = LMTOCopl(filename=filename, to_eV=True)
elif fmt == "LOBSTER":
if structure_file is None:
structure_file = "POSCAR"
if filename is None:
filename = "COOPCAR.lobster" if are_coops \
else "COHPCAR.lobster"
warnings.warn(
"The bond labels are currently consistent with ICOHPLIST.lobster/ICOOPLIST.lobster, not with COHPCAR.lobster/COOPCAR.lobster. Please be aware!")
cohp_file = Cohpcar(filename=filename, are_coops=are_coops)
orb_res_cohp = cohp_file.orb_res_cohp
else:
raise ValueError("Unknown format %s. Valid formats are LMTO "
"and LOBSTER." % fmt)
structure = Structure.from_file(structure_file)
efermi = cohp_file.efermi
cohp_data = cohp_file.cohp_data
energies = cohp_file.energies
# Lobster shifts the energies so that the Fermi energy is at zero.
# Shifting should be done by the plotter object though.
spins = [Spin.up, Spin.down] if cohp_file.is_spin_polarized \
else [Spin.up]
if fmt == "LOBSTER":
energies += efermi
if orb_res_cohp is not None:
# If no total COHPs are present, calculate the total
# COHPs from the single-orbital populations. Total COHPs
# may not be present when the cohpgenerator keyword is used
# in LOBSTER versions 2.2.0 and earlier.
# TODO: Test this more extensively
for label in orb_res_cohp:
if cohp_file.cohp_data[label]["COHP"] is None:
# print(label)
cohp_data[label]["COHP"] = {
sp: np.sum([orb_res_cohp[label][orbs]["COHP"][sp] for orbs in orb_res_cohp[label]], axis=0) for
sp
in spins}
if cohp_file.cohp_data[label]["ICOHP"] is None:
cohp_data[label]["ICOHP"] = \
{sp: np.sum([orb_res_cohp[label][orbs]["ICOHP"][sp]
for orbs in orb_res_cohp[label]],
axis=0) for sp in spins}
if fmt == "LMTO":
# Calculate the average COHP for the LMTO file to be
# consistent with LOBSTER output.
avg_data = {"COHP": {}, "ICOHP": {}}
for i in avg_data:
for spin in spins:
rows = np.array([cohp_data[label][i][spin]
for label in cohp_data])
avg = np.average(rows, axis=0)
# LMTO COHPs have 5 significant figures
avg_data[i].update({spin:
np.array([round_to_sigfigs(a, 5)
for a in avg],
dtype=float)})
avg_cohp = Cohp(efermi, energies,
avg_data["COHP"],
icohp=avg_data["ICOHP"])
else:
avg_cohp = Cohp(efermi, energies,
cohp_data["average"]["COHP"],
icohp=cohp_data["average"]["COHP"],
are_coops=are_coops)
del cohp_data["average"]
cohp_dict = {label: Cohp(efermi, energies,
cohp_data[label]["COHP"],
icohp=cohp_data[label]["ICOHP"],
are_coops=are_coops)
for label in cohp_data}
bond_dict = {label: {"length": cohp_data[label]["length"],
"sites": [structure.sites[site]
for site in cohp_data[label]["sites"]]}
for label in cohp_data}
return CompleteCohp(structure, avg_cohp, cohp_dict, bonds=bond_dict,
are_coops=are_coops, orb_res_cohp=orb_res_cohp)
|
def function[from_file, parameter[cls, fmt, filename, structure_file, are_coops]]:
constant[
Creates a CompleteCohp object from an output file of a COHP
calculation. Valid formats are either LMTO (for the Stuttgart
LMTO-ASA code) or LOBSTER (for the LOBSTER code).
Args:
cohp_file: Name of the COHP output file. Defaults to COPL
for LMTO and COHPCAR.lobster/COOPCAR.lobster for LOBSTER.
are_coops: Indicates whether the populations are COOPs or
COHPs. Defaults to False for COHPs.
fmt: A string for the code that was used to calculate
the COHPs so that the output file can be handled
correctly. Can take the values "LMTO" or "LOBSTER".
structure_file: Name of the file containing the structure.
If no file name is given, use CTRL for LMTO and POSCAR
for LOBSTER.
Returns:
A CompleteCohp object.
]
variable[fmt] assign[=] call[name[fmt].upper, parameter[]]
if compare[name[fmt] equal[==] constant[LMTO]] begin[:]
variable[are_coops] assign[=] constant[False]
variable[orb_res_cohp] assign[=] constant[None]
if compare[name[structure_file] is constant[None]] begin[:]
variable[structure_file] assign[=] constant[CTRL]
if compare[name[filename] is constant[None]] begin[:]
variable[filename] assign[=] constant[COPL]
variable[cohp_file] assign[=] call[name[LMTOCopl], parameter[]]
variable[structure] assign[=] call[name[Structure].from_file, parameter[name[structure_file]]]
variable[efermi] assign[=] name[cohp_file].efermi
variable[cohp_data] assign[=] name[cohp_file].cohp_data
variable[energies] assign[=] name[cohp_file].energies
variable[spins] assign[=] <ast.IfExp object at 0x7da1b1cb18a0>
if compare[name[fmt] equal[==] constant[LOBSTER]] begin[:]
<ast.AugAssign object at 0x7da1b1cb1b70>
if compare[name[orb_res_cohp] is_not constant[None]] begin[:]
for taget[name[label]] in starred[name[orb_res_cohp]] begin[:]
if compare[call[call[name[cohp_file].cohp_data][name[label]]][constant[COHP]] is constant[None]] begin[:]
call[call[name[cohp_data]][name[label]]][constant[COHP]] assign[=] <ast.DictComp object at 0x7da1b1cb2020>
if compare[call[call[name[cohp_file].cohp_data][name[label]]][constant[ICOHP]] is constant[None]] begin[:]
call[call[name[cohp_data]][name[label]]][constant[ICOHP]] assign[=] <ast.DictComp object at 0x7da1b1cb2800>
if compare[name[fmt] equal[==] constant[LMTO]] begin[:]
variable[avg_data] assign[=] dictionary[[<ast.Constant object at 0x7da1b1cb2e60>, <ast.Constant object at 0x7da1b1cb2e90>], [<ast.Dict object at 0x7da1b1cb2ec0>, <ast.Dict object at 0x7da1b1cb2ef0>]]
for taget[name[i]] in starred[name[avg_data]] begin[:]
for taget[name[spin]] in starred[name[spins]] begin[:]
variable[rows] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da1b1cb3130>]]
variable[avg] assign[=] call[name[np].average, parameter[name[rows]]]
call[call[name[avg_data]][name[i]].update, parameter[dictionary[[<ast.Name object at 0x7da1b1cb3640>], [<ast.Call object at 0x7da1b1cb3670>]]]]
variable[avg_cohp] assign[=] call[name[Cohp], parameter[name[efermi], name[energies], call[name[avg_data]][constant[COHP]]]]
variable[cohp_dict] assign[=] <ast.DictComp object at 0x7da1b2196b30>
variable[bond_dict] assign[=] <ast.DictComp object at 0x7da1b2197010>
return[call[name[CompleteCohp], parameter[name[structure], name[avg_cohp], name[cohp_dict]]]]
|
keyword[def] identifier[from_file] ( identifier[cls] , identifier[fmt] , identifier[filename] = keyword[None] ,
identifier[structure_file] = keyword[None] , identifier[are_coops] = keyword[False] ):
literal[string]
identifier[fmt] = identifier[fmt] . identifier[upper] ()
keyword[if] identifier[fmt] == literal[string] :
identifier[are_coops] = keyword[False]
identifier[orb_res_cohp] = keyword[None]
keyword[if] identifier[structure_file] keyword[is] keyword[None] :
identifier[structure_file] = literal[string]
keyword[if] identifier[filename] keyword[is] keyword[None] :
identifier[filename] = literal[string]
identifier[cohp_file] = identifier[LMTOCopl] ( identifier[filename] = identifier[filename] , identifier[to_eV] = keyword[True] )
keyword[elif] identifier[fmt] == literal[string] :
keyword[if] identifier[structure_file] keyword[is] keyword[None] :
identifier[structure_file] = literal[string]
keyword[if] identifier[filename] keyword[is] keyword[None] :
identifier[filename] = literal[string] keyword[if] identifier[are_coops] keyword[else] literal[string]
identifier[warnings] . identifier[warn] (
literal[string] )
identifier[cohp_file] = identifier[Cohpcar] ( identifier[filename] = identifier[filename] , identifier[are_coops] = identifier[are_coops] )
identifier[orb_res_cohp] = identifier[cohp_file] . identifier[orb_res_cohp]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] % identifier[fmt] )
identifier[structure] = identifier[Structure] . identifier[from_file] ( identifier[structure_file] )
identifier[efermi] = identifier[cohp_file] . identifier[efermi]
identifier[cohp_data] = identifier[cohp_file] . identifier[cohp_data]
identifier[energies] = identifier[cohp_file] . identifier[energies]
identifier[spins] =[ identifier[Spin] . identifier[up] , identifier[Spin] . identifier[down] ] keyword[if] identifier[cohp_file] . identifier[is_spin_polarized] keyword[else] [ identifier[Spin] . identifier[up] ]
keyword[if] identifier[fmt] == literal[string] :
identifier[energies] += identifier[efermi]
keyword[if] identifier[orb_res_cohp] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[label] keyword[in] identifier[orb_res_cohp] :
keyword[if] identifier[cohp_file] . identifier[cohp_data] [ identifier[label] ][ literal[string] ] keyword[is] keyword[None] :
identifier[cohp_data] [ identifier[label] ][ literal[string] ]={
identifier[sp] : identifier[np] . identifier[sum] ([ identifier[orb_res_cohp] [ identifier[label] ][ identifier[orbs] ][ literal[string] ][ identifier[sp] ] keyword[for] identifier[orbs] keyword[in] identifier[orb_res_cohp] [ identifier[label] ]], identifier[axis] = literal[int] ) keyword[for]
identifier[sp]
keyword[in] identifier[spins] }
keyword[if] identifier[cohp_file] . identifier[cohp_data] [ identifier[label] ][ literal[string] ] keyword[is] keyword[None] :
identifier[cohp_data] [ identifier[label] ][ literal[string] ]={ identifier[sp] : identifier[np] . identifier[sum] ([ identifier[orb_res_cohp] [ identifier[label] ][ identifier[orbs] ][ literal[string] ][ identifier[sp] ]
keyword[for] identifier[orbs] keyword[in] identifier[orb_res_cohp] [ identifier[label] ]],
identifier[axis] = literal[int] ) keyword[for] identifier[sp] keyword[in] identifier[spins] }
keyword[if] identifier[fmt] == literal[string] :
identifier[avg_data] ={ literal[string] :{}, literal[string] :{}}
keyword[for] identifier[i] keyword[in] identifier[avg_data] :
keyword[for] identifier[spin] keyword[in] identifier[spins] :
identifier[rows] = identifier[np] . identifier[array] ([ identifier[cohp_data] [ identifier[label] ][ identifier[i] ][ identifier[spin] ]
keyword[for] identifier[label] keyword[in] identifier[cohp_data] ])
identifier[avg] = identifier[np] . identifier[average] ( identifier[rows] , identifier[axis] = literal[int] )
identifier[avg_data] [ identifier[i] ]. identifier[update] ({ identifier[spin] :
identifier[np] . identifier[array] ([ identifier[round_to_sigfigs] ( identifier[a] , literal[int] )
keyword[for] identifier[a] keyword[in] identifier[avg] ],
identifier[dtype] = identifier[float] )})
identifier[avg_cohp] = identifier[Cohp] ( identifier[efermi] , identifier[energies] ,
identifier[avg_data] [ literal[string] ],
identifier[icohp] = identifier[avg_data] [ literal[string] ])
keyword[else] :
identifier[avg_cohp] = identifier[Cohp] ( identifier[efermi] , identifier[energies] ,
identifier[cohp_data] [ literal[string] ][ literal[string] ],
identifier[icohp] = identifier[cohp_data] [ literal[string] ][ literal[string] ],
identifier[are_coops] = identifier[are_coops] )
keyword[del] identifier[cohp_data] [ literal[string] ]
identifier[cohp_dict] ={ identifier[label] : identifier[Cohp] ( identifier[efermi] , identifier[energies] ,
identifier[cohp_data] [ identifier[label] ][ literal[string] ],
identifier[icohp] = identifier[cohp_data] [ identifier[label] ][ literal[string] ],
identifier[are_coops] = identifier[are_coops] )
keyword[for] identifier[label] keyword[in] identifier[cohp_data] }
identifier[bond_dict] ={ identifier[label] :{ literal[string] : identifier[cohp_data] [ identifier[label] ][ literal[string] ],
literal[string] :[ identifier[structure] . identifier[sites] [ identifier[site] ]
keyword[for] identifier[site] keyword[in] identifier[cohp_data] [ identifier[label] ][ literal[string] ]]}
keyword[for] identifier[label] keyword[in] identifier[cohp_data] }
keyword[return] identifier[CompleteCohp] ( identifier[structure] , identifier[avg_cohp] , identifier[cohp_dict] , identifier[bonds] = identifier[bond_dict] ,
identifier[are_coops] = identifier[are_coops] , identifier[orb_res_cohp] = identifier[orb_res_cohp] )
|
def from_file(cls, fmt, filename=None, structure_file=None, are_coops=False):
"""
Creates a CompleteCohp object from an output file of a COHP
calculation. Valid formats are either LMTO (for the Stuttgart
LMTO-ASA code) or LOBSTER (for the LOBSTER code).
Args:
cohp_file: Name of the COHP output file. Defaults to COPL
for LMTO and COHPCAR.lobster/COOPCAR.lobster for LOBSTER.
are_coops: Indicates whether the populations are COOPs or
COHPs. Defaults to False for COHPs.
fmt: A string for the code that was used to calculate
the COHPs so that the output file can be handled
correctly. Can take the values "LMTO" or "LOBSTER".
structure_file: Name of the file containing the structure.
If no file name is given, use CTRL for LMTO and POSCAR
for LOBSTER.
Returns:
A CompleteCohp object.
"""
fmt = fmt.upper()
if fmt == 'LMTO':
# LMTO COOPs and orbital-resolved COHP cannot be handled yet.
are_coops = False
orb_res_cohp = None
if structure_file is None:
structure_file = 'CTRL' # depends on [control=['if'], data=['structure_file']]
if filename is None:
filename = 'COPL' # depends on [control=['if'], data=['filename']]
cohp_file = LMTOCopl(filename=filename, to_eV=True) # depends on [control=['if'], data=[]]
elif fmt == 'LOBSTER':
if structure_file is None:
structure_file = 'POSCAR' # depends on [control=['if'], data=['structure_file']]
if filename is None:
filename = 'COOPCAR.lobster' if are_coops else 'COHPCAR.lobster' # depends on [control=['if'], data=['filename']]
warnings.warn('The bond labels are currently consistent with ICOHPLIST.lobster/ICOOPLIST.lobster, not with COHPCAR.lobster/COOPCAR.lobster. Please be aware!')
cohp_file = Cohpcar(filename=filename, are_coops=are_coops)
orb_res_cohp = cohp_file.orb_res_cohp # depends on [control=['if'], data=[]]
else:
raise ValueError('Unknown format %s. Valid formats are LMTO and LOBSTER.' % fmt)
structure = Structure.from_file(structure_file)
efermi = cohp_file.efermi
cohp_data = cohp_file.cohp_data
energies = cohp_file.energies
# Lobster shifts the energies so that the Fermi energy is at zero.
# Shifting should be done by the plotter object though.
spins = [Spin.up, Spin.down] if cohp_file.is_spin_polarized else [Spin.up]
if fmt == 'LOBSTER':
energies += efermi # depends on [control=['if'], data=[]]
if orb_res_cohp is not None:
# If no total COHPs are present, calculate the total
# COHPs from the single-orbital populations. Total COHPs
# may not be present when the cohpgenerator keyword is used
# in LOBSTER versions 2.2.0 and earlier.
# TODO: Test this more extensively
for label in orb_res_cohp:
if cohp_file.cohp_data[label]['COHP'] is None:
# print(label)
cohp_data[label]['COHP'] = {sp: np.sum([orb_res_cohp[label][orbs]['COHP'][sp] for orbs in orb_res_cohp[label]], axis=0) for sp in spins} # depends on [control=['if'], data=[]]
if cohp_file.cohp_data[label]['ICOHP'] is None:
cohp_data[label]['ICOHP'] = {sp: np.sum([orb_res_cohp[label][orbs]['ICOHP'][sp] for orbs in orb_res_cohp[label]], axis=0) for sp in spins} # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['label']] # depends on [control=['if'], data=['orb_res_cohp']]
if fmt == 'LMTO':
# Calculate the average COHP for the LMTO file to be
# consistent with LOBSTER output.
avg_data = {'COHP': {}, 'ICOHP': {}}
for i in avg_data:
for spin in spins:
rows = np.array([cohp_data[label][i][spin] for label in cohp_data])
avg = np.average(rows, axis=0)
# LMTO COHPs have 5 significant figures
avg_data[i].update({spin: np.array([round_to_sigfigs(a, 5) for a in avg], dtype=float)}) # depends on [control=['for'], data=['spin']] # depends on [control=['for'], data=['i']]
avg_cohp = Cohp(efermi, energies, avg_data['COHP'], icohp=avg_data['ICOHP']) # depends on [control=['if'], data=[]]
else:
avg_cohp = Cohp(efermi, energies, cohp_data['average']['COHP'], icohp=cohp_data['average']['COHP'], are_coops=are_coops)
del cohp_data['average']
cohp_dict = {label: Cohp(efermi, energies, cohp_data[label]['COHP'], icohp=cohp_data[label]['ICOHP'], are_coops=are_coops) for label in cohp_data}
bond_dict = {label: {'length': cohp_data[label]['length'], 'sites': [structure.sites[site] for site in cohp_data[label]['sites']]} for label in cohp_data}
return CompleteCohp(structure, avg_cohp, cohp_dict, bonds=bond_dict, are_coops=are_coops, orb_res_cohp=orb_res_cohp)
|
def accept(self, f, *args):
"""Like 'match', but consume the token (tokenizer advances.)"""
match = self.match(f, *args)
if match is None:
return
self.tokenizer.skip(len(match.tokens))
return match
|
def function[accept, parameter[self, f]]:
constant[Like 'match', but consume the token (tokenizer advances.)]
variable[match] assign[=] call[name[self].match, parameter[name[f], <ast.Starred object at 0x7da1b0fd17b0>]]
if compare[name[match] is constant[None]] begin[:]
return[None]
call[name[self].tokenizer.skip, parameter[call[name[len], parameter[name[match].tokens]]]]
return[name[match]]
|
keyword[def] identifier[accept] ( identifier[self] , identifier[f] ,* identifier[args] ):
literal[string]
identifier[match] = identifier[self] . identifier[match] ( identifier[f] ,* identifier[args] )
keyword[if] identifier[match] keyword[is] keyword[None] :
keyword[return]
identifier[self] . identifier[tokenizer] . identifier[skip] ( identifier[len] ( identifier[match] . identifier[tokens] ))
keyword[return] identifier[match]
|
def accept(self, f, *args):
"""Like 'match', but consume the token (tokenizer advances.)"""
match = self.match(f, *args)
if match is None:
return # depends on [control=['if'], data=[]]
self.tokenizer.skip(len(match.tokens))
return match
|
def display_leader_imbalance(cluster_topologies):
"""Display leader count and weight imbalance statistics.
:param cluster_topologies: A dictionary mapping a string name to a
ClusterTopology object.
"""
broker_ids = list(next(six.itervalues(cluster_topologies)).brokers.keys())
assert all(
set(broker_ids) == set(cluster_topology.brokers.keys())
for cluster_topology in six.itervalues(cluster_topologies)
)
broker_leader_counts = [
stats.get_broker_leader_counts(
cluster_topology.brokers[broker_id]
for broker_id in broker_ids
)
for cluster_topology in six.itervalues(cluster_topologies)
]
broker_leader_weights = [
stats.get_broker_leader_weights(
cluster_topology.brokers[broker_id]
for broker_id in broker_ids
)
for cluster_topology in six.itervalues(cluster_topologies)
]
_display_table_title_multicolumn(
'Leader Count',
'Brokers',
broker_ids,
list(cluster_topologies.keys()),
broker_leader_counts,
)
print('')
_display_table_title_multicolumn(
'Leader weight',
'Brokers',
broker_ids,
list(cluster_topologies.keys()),
broker_leader_weights,
)
for name, blc, blw in zip(
list(cluster_topologies.keys()),
broker_leader_counts,
broker_leader_weights
):
print(
'\n'
'{name}'
'Leader count imbalance: {net_imbalance}\n'
'Broker leader weight mean: {weight_mean}\n'
'Broker leader weight stdev: {weight_stdev}\n'
'Broker leader weight cv: {weight_cv}'
.format(
name='' if len(cluster_topologies) == 1 else name + '\n',
net_imbalance=stats.get_net_imbalance(blc),
weight_mean=stats.mean(blw),
weight_stdev=stats.stdevp(blw),
weight_cv=stats.coefficient_of_variation(blw),
)
)
|
def function[display_leader_imbalance, parameter[cluster_topologies]]:
constant[Display leader count and weight imbalance statistics.
:param cluster_topologies: A dictionary mapping a string name to a
ClusterTopology object.
]
variable[broker_ids] assign[=] call[name[list], parameter[call[call[name[next], parameter[call[name[six].itervalues, parameter[name[cluster_topologies]]]]].brokers.keys, parameter[]]]]
assert[call[name[all], parameter[<ast.GeneratorExp object at 0x7da1b084e380>]]]
variable[broker_leader_counts] assign[=] <ast.ListComp object at 0x7da1b084f220>
variable[broker_leader_weights] assign[=] <ast.ListComp object at 0x7da1b084c820>
call[name[_display_table_title_multicolumn], parameter[constant[Leader Count], constant[Brokers], name[broker_ids], call[name[list], parameter[call[name[cluster_topologies].keys, parameter[]]]], name[broker_leader_counts]]]
call[name[print], parameter[constant[]]]
call[name[_display_table_title_multicolumn], parameter[constant[Leader weight], constant[Brokers], name[broker_ids], call[name[list], parameter[call[name[cluster_topologies].keys, parameter[]]]], name[broker_leader_weights]]]
for taget[tuple[[<ast.Name object at 0x7da1b07b1930>, <ast.Name object at 0x7da1b07b3280>, <ast.Name object at 0x7da1b07b2aa0>]]] in starred[call[name[zip], parameter[call[name[list], parameter[call[name[cluster_topologies].keys, parameter[]]]], name[broker_leader_counts], name[broker_leader_weights]]]] begin[:]
call[name[print], parameter[call[constant[
{name}Leader count imbalance: {net_imbalance}
Broker leader weight mean: {weight_mean}
Broker leader weight stdev: {weight_stdev}
Broker leader weight cv: {weight_cv}].format, parameter[]]]]
|
keyword[def] identifier[display_leader_imbalance] ( identifier[cluster_topologies] ):
literal[string]
identifier[broker_ids] = identifier[list] ( identifier[next] ( identifier[six] . identifier[itervalues] ( identifier[cluster_topologies] )). identifier[brokers] . identifier[keys] ())
keyword[assert] identifier[all] (
identifier[set] ( identifier[broker_ids] )== identifier[set] ( identifier[cluster_topology] . identifier[brokers] . identifier[keys] ())
keyword[for] identifier[cluster_topology] keyword[in] identifier[six] . identifier[itervalues] ( identifier[cluster_topologies] )
)
identifier[broker_leader_counts] =[
identifier[stats] . identifier[get_broker_leader_counts] (
identifier[cluster_topology] . identifier[brokers] [ identifier[broker_id] ]
keyword[for] identifier[broker_id] keyword[in] identifier[broker_ids]
)
keyword[for] identifier[cluster_topology] keyword[in] identifier[six] . identifier[itervalues] ( identifier[cluster_topologies] )
]
identifier[broker_leader_weights] =[
identifier[stats] . identifier[get_broker_leader_weights] (
identifier[cluster_topology] . identifier[brokers] [ identifier[broker_id] ]
keyword[for] identifier[broker_id] keyword[in] identifier[broker_ids]
)
keyword[for] identifier[cluster_topology] keyword[in] identifier[six] . identifier[itervalues] ( identifier[cluster_topologies] )
]
identifier[_display_table_title_multicolumn] (
literal[string] ,
literal[string] ,
identifier[broker_ids] ,
identifier[list] ( identifier[cluster_topologies] . identifier[keys] ()),
identifier[broker_leader_counts] ,
)
identifier[print] ( literal[string] )
identifier[_display_table_title_multicolumn] (
literal[string] ,
literal[string] ,
identifier[broker_ids] ,
identifier[list] ( identifier[cluster_topologies] . identifier[keys] ()),
identifier[broker_leader_weights] ,
)
keyword[for] identifier[name] , identifier[blc] , identifier[blw] keyword[in] identifier[zip] (
identifier[list] ( identifier[cluster_topologies] . identifier[keys] ()),
identifier[broker_leader_counts] ,
identifier[broker_leader_weights]
):
identifier[print] (
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
. identifier[format] (
identifier[name] = literal[string] keyword[if] identifier[len] ( identifier[cluster_topologies] )== literal[int] keyword[else] identifier[name] + literal[string] ,
identifier[net_imbalance] = identifier[stats] . identifier[get_net_imbalance] ( identifier[blc] ),
identifier[weight_mean] = identifier[stats] . identifier[mean] ( identifier[blw] ),
identifier[weight_stdev] = identifier[stats] . identifier[stdevp] ( identifier[blw] ),
identifier[weight_cv] = identifier[stats] . identifier[coefficient_of_variation] ( identifier[blw] ),
)
)
|
def display_leader_imbalance(cluster_topologies):
"""Display leader count and weight imbalance statistics.
:param cluster_topologies: A dictionary mapping a string name to a
ClusterTopology object.
"""
broker_ids = list(next(six.itervalues(cluster_topologies)).brokers.keys())
assert all((set(broker_ids) == set(cluster_topology.brokers.keys()) for cluster_topology in six.itervalues(cluster_topologies)))
broker_leader_counts = [stats.get_broker_leader_counts((cluster_topology.brokers[broker_id] for broker_id in broker_ids)) for cluster_topology in six.itervalues(cluster_topologies)]
broker_leader_weights = [stats.get_broker_leader_weights((cluster_topology.brokers[broker_id] for broker_id in broker_ids)) for cluster_topology in six.itervalues(cluster_topologies)]
_display_table_title_multicolumn('Leader Count', 'Brokers', broker_ids, list(cluster_topologies.keys()), broker_leader_counts)
print('')
_display_table_title_multicolumn('Leader weight', 'Brokers', broker_ids, list(cluster_topologies.keys()), broker_leader_weights)
for (name, blc, blw) in zip(list(cluster_topologies.keys()), broker_leader_counts, broker_leader_weights):
print('\n{name}Leader count imbalance: {net_imbalance}\nBroker leader weight mean: {weight_mean}\nBroker leader weight stdev: {weight_stdev}\nBroker leader weight cv: {weight_cv}'.format(name='' if len(cluster_topologies) == 1 else name + '\n', net_imbalance=stats.get_net_imbalance(blc), weight_mean=stats.mean(blw), weight_stdev=stats.stdevp(blw), weight_cv=stats.coefficient_of_variation(blw))) # depends on [control=['for'], data=[]]
|
def output_cleaned(self, process_data):
"""
Return verified and cleaned output.
Parameters
----------
process_data : raw process output
Returns
-------
NumPy array or list of features.
"""
if self.METADATA["data_type"] == "raster":
if is_numpy_or_masked_array(process_data):
return process_data
elif is_numpy_or_masked_array_with_tags(process_data):
data, tags = process_data
return self.output_cleaned(data), tags
elif self.METADATA["data_type"] == "vector":
return list(process_data)
|
def function[output_cleaned, parameter[self, process_data]]:
constant[
Return verified and cleaned output.
Parameters
----------
process_data : raw process output
Returns
-------
NumPy array or list of features.
]
if compare[call[name[self].METADATA][constant[data_type]] equal[==] constant[raster]] begin[:]
if call[name[is_numpy_or_masked_array], parameter[name[process_data]]] begin[:]
return[name[process_data]]
|
keyword[def] identifier[output_cleaned] ( identifier[self] , identifier[process_data] ):
literal[string]
keyword[if] identifier[self] . identifier[METADATA] [ literal[string] ]== literal[string] :
keyword[if] identifier[is_numpy_or_masked_array] ( identifier[process_data] ):
keyword[return] identifier[process_data]
keyword[elif] identifier[is_numpy_or_masked_array_with_tags] ( identifier[process_data] ):
identifier[data] , identifier[tags] = identifier[process_data]
keyword[return] identifier[self] . identifier[output_cleaned] ( identifier[data] ), identifier[tags]
keyword[elif] identifier[self] . identifier[METADATA] [ literal[string] ]== literal[string] :
keyword[return] identifier[list] ( identifier[process_data] )
|
def output_cleaned(self, process_data):
"""
Return verified and cleaned output.
Parameters
----------
process_data : raw process output
Returns
-------
NumPy array or list of features.
"""
if self.METADATA['data_type'] == 'raster':
if is_numpy_or_masked_array(process_data):
return process_data # depends on [control=['if'], data=[]]
elif is_numpy_or_masked_array_with_tags(process_data):
(data, tags) = process_data
return (self.output_cleaned(data), tags) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif self.METADATA['data_type'] == 'vector':
return list(process_data) # depends on [control=['if'], data=[]]
|
def set_total_deposit(
self,
given_block_identifier: BlockSpecification,
channel_identifier: ChannelID,
total_deposit: TokenAmount,
partner: Address,
):
""" Set channel's total deposit.
`total_deposit` has to be monotonically increasing, this is enforced by
the `TokenNetwork` smart contract. This is done for the same reason why
the balance proofs have a monotonically increasing transferred amount,
it simplifies the analysis of bad behavior and the handling code of
out-dated balance proofs.
Races to `set_total_deposit` are handled by the smart contract, where
largest total deposit wins. The end balance of the funding accounts is
undefined. E.g.
- Acc1 calls set_total_deposit with 10 tokens
- Acc2 calls set_total_deposit with 13 tokens
- If Acc2's transaction is mined first, then Acc1 token supply is left intact.
- If Acc1's transaction is mined first, then Acc2 will only move 3 tokens.
Races for the same account don't have any unexpeted side-effect.
Raises:
DepositMismatch: If the new request total deposit is lower than the
existing total deposit on-chain for the `given_block_identifier`.
RaidenRecoverableError: If the channel was closed meanwhile the
deposit was in transit.
RaidenUnrecoverableError: If the transaction was sucessful and the
deposit_amount is not as large as the requested value.
RuntimeError: If the token address is empty.
ValueError: If an argument is of the invalid type.
"""
if not isinstance(total_deposit, int):
raise ValueError('total_deposit needs to be an integer number.')
token_address = self.token_address()
token = Token(
jsonrpc_client=self.client,
token_address=token_address,
contract_manager=self.contract_manager,
)
checking_block = self.client.get_checking_block()
error_prefix = 'setTotalDeposit call will fail'
with self.channel_operations_lock[partner], self.deposit_lock:
previous_total_deposit = self._detail_participant(
channel_identifier=channel_identifier,
participant=self.node_address,
partner=partner,
block_identifier=given_block_identifier,
).deposit
amount_to_deposit = TokenAmount(total_deposit - previous_total_deposit)
log_details = {
'token_network': pex(self.address),
'channel_identifier': channel_identifier,
'node': pex(self.node_address),
'partner': pex(partner),
'new_total_deposit': total_deposit,
'previous_total_deposit': previous_total_deposit,
}
try:
self._deposit_preconditions(
channel_identifier=channel_identifier,
total_deposit=total_deposit,
partner=partner,
token=token,
previous_total_deposit=previous_total_deposit,
log_details=log_details,
block_identifier=given_block_identifier,
)
except NoStateForBlockIdentifier:
# If preconditions end up being on pruned state skip them. Estimate
# gas will stop us from sending a transaction that will fail
pass
# If there are channels being set up concurrenlty either the
# allowance must be accumulated *or* the calls to `approve` and
# `setTotalDeposit` must be serialized. This is necessary otherwise
# the deposit will fail.
#
# Calls to approve and setTotalDeposit are serialized with the
# deposit_lock to avoid transaction failure, because with two
# concurrent deposits, we may have the transactions executed in the
# following order
#
# - approve
# - approve
# - setTotalDeposit
# - setTotalDeposit
#
# in which case the second `approve` will overwrite the first,
# and the first `setTotalDeposit` will consume the allowance,
# making the second deposit fail.
token.approve(
allowed_address=Address(self.address),
allowance=amount_to_deposit,
)
gas_limit = self.proxy.estimate_gas(
checking_block,
'setTotalDeposit',
channel_identifier=channel_identifier,
participant=self.node_address,
total_deposit=total_deposit,
partner=partner,
)
if gas_limit:
gas_limit = safe_gas_limit(gas_limit, GAS_REQUIRED_FOR_SET_TOTAL_DEPOSIT)
error_prefix = 'setTotalDeposit call failed'
log.debug('setTotalDeposit called', **log_details)
transaction_hash = self.proxy.transact(
'setTotalDeposit',
gas_limit,
channel_identifier=channel_identifier,
participant=self.node_address,
total_deposit=total_deposit,
partner=partner,
)
self.client.poll(transaction_hash)
receipt_or_none = check_transaction_threw(self.client, transaction_hash)
transaction_executed = gas_limit is not None
if not transaction_executed or receipt_or_none:
if transaction_executed:
block = receipt_or_none['blockNumber']
else:
block = checking_block
self.proxy.jsonrpc_client.check_for_insufficient_eth(
transaction_name='setTotalDeposit',
transaction_executed=transaction_executed,
required_gas=GAS_REQUIRED_FOR_SET_TOTAL_DEPOSIT,
block_identifier=block,
)
error_type, msg = self._check_why_deposit_failed(
channel_identifier=channel_identifier,
partner=partner,
token=token,
amount_to_deposit=amount_to_deposit,
total_deposit=total_deposit,
transaction_executed=transaction_executed,
block_identifier=block,
)
error_msg = f'{error_prefix}. {msg}'
if error_type == RaidenRecoverableError:
log.warning(error_msg, **log_details)
else:
log.critical(error_msg, **log_details)
raise error_type(error_msg)
log.info('setTotalDeposit successful', **log_details)
|
def function[set_total_deposit, parameter[self, given_block_identifier, channel_identifier, total_deposit, partner]]:
constant[ Set channel's total deposit.
`total_deposit` has to be monotonically increasing, this is enforced by
the `TokenNetwork` smart contract. This is done for the same reason why
the balance proofs have a monotonically increasing transferred amount,
it simplifies the analysis of bad behavior and the handling code of
out-dated balance proofs.
Races to `set_total_deposit` are handled by the smart contract, where
largest total deposit wins. The end balance of the funding accounts is
undefined. E.g.
- Acc1 calls set_total_deposit with 10 tokens
- Acc2 calls set_total_deposit with 13 tokens
- If Acc2's transaction is mined first, then Acc1 token supply is left intact.
- If Acc1's transaction is mined first, then Acc2 will only move 3 tokens.
Races for the same account don't have any unexpeted side-effect.
Raises:
DepositMismatch: If the new request total deposit is lower than the
existing total deposit on-chain for the `given_block_identifier`.
RaidenRecoverableError: If the channel was closed meanwhile the
deposit was in transit.
RaidenUnrecoverableError: If the transaction was sucessful and the
deposit_amount is not as large as the requested value.
RuntimeError: If the token address is empty.
ValueError: If an argument is of the invalid type.
]
if <ast.UnaryOp object at 0x7da1b170a860> begin[:]
<ast.Raise object at 0x7da1b170a6b0>
variable[token_address] assign[=] call[name[self].token_address, parameter[]]
variable[token] assign[=] call[name[Token], parameter[]]
variable[checking_block] assign[=] call[name[self].client.get_checking_block, parameter[]]
variable[error_prefix] assign[=] constant[setTotalDeposit call will fail]
with call[name[self].channel_operations_lock][name[partner]] begin[:]
variable[previous_total_deposit] assign[=] call[name[self]._detail_participant, parameter[]].deposit
variable[amount_to_deposit] assign[=] call[name[TokenAmount], parameter[binary_operation[name[total_deposit] - name[previous_total_deposit]]]]
variable[log_details] assign[=] dictionary[[<ast.Constant object at 0x7da1b170a230>, <ast.Constant object at 0x7da1b170a200>, <ast.Constant object at 0x7da1b170a1d0>, <ast.Constant object at 0x7da1b170a1a0>, <ast.Constant object at 0x7da1b170a170>, <ast.Constant object at 0x7da1b170a140>], [<ast.Call object at 0x7da1b170a0e0>, <ast.Name object at 0x7da1b170a020>, <ast.Call object at 0x7da1b1709ff0>, <ast.Call object at 0x7da1b1709f30>, <ast.Name object at 0x7da1b1709ea0>, <ast.Name object at 0x7da1b1709e70>]]
<ast.Try object at 0x7da1b1709e40>
call[name[token].approve, parameter[]]
variable[gas_limit] assign[=] call[name[self].proxy.estimate_gas, parameter[name[checking_block], constant[setTotalDeposit]]]
if name[gas_limit] begin[:]
variable[gas_limit] assign[=] call[name[safe_gas_limit], parameter[name[gas_limit], name[GAS_REQUIRED_FOR_SET_TOTAL_DEPOSIT]]]
variable[error_prefix] assign[=] constant[setTotalDeposit call failed]
call[name[log].debug, parameter[constant[setTotalDeposit called]]]
variable[transaction_hash] assign[=] call[name[self].proxy.transact, parameter[constant[setTotalDeposit], name[gas_limit]]]
call[name[self].client.poll, parameter[name[transaction_hash]]]
variable[receipt_or_none] assign[=] call[name[check_transaction_threw], parameter[name[self].client, name[transaction_hash]]]
variable[transaction_executed] assign[=] compare[name[gas_limit] is_not constant[None]]
if <ast.BoolOp object at 0x7da1b1708d00> begin[:]
if name[transaction_executed] begin[:]
variable[block] assign[=] call[name[receipt_or_none]][constant[blockNumber]]
call[name[self].proxy.jsonrpc_client.check_for_insufficient_eth, parameter[]]
<ast.Tuple object at 0x7da1b170b190> assign[=] call[name[self]._check_why_deposit_failed, parameter[]]
variable[error_msg] assign[=] <ast.JoinedStr object at 0x7da1b170b5e0>
if compare[name[error_type] equal[==] name[RaidenRecoverableError]] begin[:]
call[name[log].warning, parameter[name[error_msg]]]
<ast.Raise object at 0x7da1b17169e0>
call[name[log].info, parameter[constant[setTotalDeposit successful]]]
|
keyword[def] identifier[set_total_deposit] (
identifier[self] ,
identifier[given_block_identifier] : identifier[BlockSpecification] ,
identifier[channel_identifier] : identifier[ChannelID] ,
identifier[total_deposit] : identifier[TokenAmount] ,
identifier[partner] : identifier[Address] ,
):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[total_deposit] , identifier[int] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[token_address] = identifier[self] . identifier[token_address] ()
identifier[token] = identifier[Token] (
identifier[jsonrpc_client] = identifier[self] . identifier[client] ,
identifier[token_address] = identifier[token_address] ,
identifier[contract_manager] = identifier[self] . identifier[contract_manager] ,
)
identifier[checking_block] = identifier[self] . identifier[client] . identifier[get_checking_block] ()
identifier[error_prefix] = literal[string]
keyword[with] identifier[self] . identifier[channel_operations_lock] [ identifier[partner] ], identifier[self] . identifier[deposit_lock] :
identifier[previous_total_deposit] = identifier[self] . identifier[_detail_participant] (
identifier[channel_identifier] = identifier[channel_identifier] ,
identifier[participant] = identifier[self] . identifier[node_address] ,
identifier[partner] = identifier[partner] ,
identifier[block_identifier] = identifier[given_block_identifier] ,
). identifier[deposit]
identifier[amount_to_deposit] = identifier[TokenAmount] ( identifier[total_deposit] - identifier[previous_total_deposit] )
identifier[log_details] ={
literal[string] : identifier[pex] ( identifier[self] . identifier[address] ),
literal[string] : identifier[channel_identifier] ,
literal[string] : identifier[pex] ( identifier[self] . identifier[node_address] ),
literal[string] : identifier[pex] ( identifier[partner] ),
literal[string] : identifier[total_deposit] ,
literal[string] : identifier[previous_total_deposit] ,
}
keyword[try] :
identifier[self] . identifier[_deposit_preconditions] (
identifier[channel_identifier] = identifier[channel_identifier] ,
identifier[total_deposit] = identifier[total_deposit] ,
identifier[partner] = identifier[partner] ,
identifier[token] = identifier[token] ,
identifier[previous_total_deposit] = identifier[previous_total_deposit] ,
identifier[log_details] = identifier[log_details] ,
identifier[block_identifier] = identifier[given_block_identifier] ,
)
keyword[except] identifier[NoStateForBlockIdentifier] :
keyword[pass]
identifier[token] . identifier[approve] (
identifier[allowed_address] = identifier[Address] ( identifier[self] . identifier[address] ),
identifier[allowance] = identifier[amount_to_deposit] ,
)
identifier[gas_limit] = identifier[self] . identifier[proxy] . identifier[estimate_gas] (
identifier[checking_block] ,
literal[string] ,
identifier[channel_identifier] = identifier[channel_identifier] ,
identifier[participant] = identifier[self] . identifier[node_address] ,
identifier[total_deposit] = identifier[total_deposit] ,
identifier[partner] = identifier[partner] ,
)
keyword[if] identifier[gas_limit] :
identifier[gas_limit] = identifier[safe_gas_limit] ( identifier[gas_limit] , identifier[GAS_REQUIRED_FOR_SET_TOTAL_DEPOSIT] )
identifier[error_prefix] = literal[string]
identifier[log] . identifier[debug] ( literal[string] ,** identifier[log_details] )
identifier[transaction_hash] = identifier[self] . identifier[proxy] . identifier[transact] (
literal[string] ,
identifier[gas_limit] ,
identifier[channel_identifier] = identifier[channel_identifier] ,
identifier[participant] = identifier[self] . identifier[node_address] ,
identifier[total_deposit] = identifier[total_deposit] ,
identifier[partner] = identifier[partner] ,
)
identifier[self] . identifier[client] . identifier[poll] ( identifier[transaction_hash] )
identifier[receipt_or_none] = identifier[check_transaction_threw] ( identifier[self] . identifier[client] , identifier[transaction_hash] )
identifier[transaction_executed] = identifier[gas_limit] keyword[is] keyword[not] keyword[None]
keyword[if] keyword[not] identifier[transaction_executed] keyword[or] identifier[receipt_or_none] :
keyword[if] identifier[transaction_executed] :
identifier[block] = identifier[receipt_or_none] [ literal[string] ]
keyword[else] :
identifier[block] = identifier[checking_block]
identifier[self] . identifier[proxy] . identifier[jsonrpc_client] . identifier[check_for_insufficient_eth] (
identifier[transaction_name] = literal[string] ,
identifier[transaction_executed] = identifier[transaction_executed] ,
identifier[required_gas] = identifier[GAS_REQUIRED_FOR_SET_TOTAL_DEPOSIT] ,
identifier[block_identifier] = identifier[block] ,
)
identifier[error_type] , identifier[msg] = identifier[self] . identifier[_check_why_deposit_failed] (
identifier[channel_identifier] = identifier[channel_identifier] ,
identifier[partner] = identifier[partner] ,
identifier[token] = identifier[token] ,
identifier[amount_to_deposit] = identifier[amount_to_deposit] ,
identifier[total_deposit] = identifier[total_deposit] ,
identifier[transaction_executed] = identifier[transaction_executed] ,
identifier[block_identifier] = identifier[block] ,
)
identifier[error_msg] = literal[string]
keyword[if] identifier[error_type] == identifier[RaidenRecoverableError] :
identifier[log] . identifier[warning] ( identifier[error_msg] ,** identifier[log_details] )
keyword[else] :
identifier[log] . identifier[critical] ( identifier[error_msg] ,** identifier[log_details] )
keyword[raise] identifier[error_type] ( identifier[error_msg] )
identifier[log] . identifier[info] ( literal[string] ,** identifier[log_details] )
|
def set_total_deposit(self, given_block_identifier: BlockSpecification, channel_identifier: ChannelID, total_deposit: TokenAmount, partner: Address):
""" Set channel's total deposit.
`total_deposit` has to be monotonically increasing, this is enforced by
the `TokenNetwork` smart contract. This is done for the same reason why
the balance proofs have a monotonically increasing transferred amount,
it simplifies the analysis of bad behavior and the handling code of
out-dated balance proofs.
Races to `set_total_deposit` are handled by the smart contract, where
largest total deposit wins. The end balance of the funding accounts is
undefined. E.g.
- Acc1 calls set_total_deposit with 10 tokens
- Acc2 calls set_total_deposit with 13 tokens
- If Acc2's transaction is mined first, then Acc1 token supply is left intact.
- If Acc1's transaction is mined first, then Acc2 will only move 3 tokens.
Races for the same account don't have any unexpeted side-effect.
Raises:
DepositMismatch: If the new request total deposit is lower than the
existing total deposit on-chain for the `given_block_identifier`.
RaidenRecoverableError: If the channel was closed meanwhile the
deposit was in transit.
RaidenUnrecoverableError: If the transaction was sucessful and the
deposit_amount is not as large as the requested value.
RuntimeError: If the token address is empty.
ValueError: If an argument is of the invalid type.
"""
if not isinstance(total_deposit, int):
raise ValueError('total_deposit needs to be an integer number.') # depends on [control=['if'], data=[]]
token_address = self.token_address()
token = Token(jsonrpc_client=self.client, token_address=token_address, contract_manager=self.contract_manager)
checking_block = self.client.get_checking_block()
error_prefix = 'setTotalDeposit call will fail'
with self.channel_operations_lock[partner], self.deposit_lock:
previous_total_deposit = self._detail_participant(channel_identifier=channel_identifier, participant=self.node_address, partner=partner, block_identifier=given_block_identifier).deposit
amount_to_deposit = TokenAmount(total_deposit - previous_total_deposit)
log_details = {'token_network': pex(self.address), 'channel_identifier': channel_identifier, 'node': pex(self.node_address), 'partner': pex(partner), 'new_total_deposit': total_deposit, 'previous_total_deposit': previous_total_deposit}
try:
self._deposit_preconditions(channel_identifier=channel_identifier, total_deposit=total_deposit, partner=partner, token=token, previous_total_deposit=previous_total_deposit, log_details=log_details, block_identifier=given_block_identifier) # depends on [control=['try'], data=[]]
except NoStateForBlockIdentifier:
# If preconditions end up being on pruned state skip them. Estimate
# gas will stop us from sending a transaction that will fail
pass # depends on [control=['except'], data=[]]
# If there are channels being set up concurrenlty either the
# allowance must be accumulated *or* the calls to `approve` and
# `setTotalDeposit` must be serialized. This is necessary otherwise
# the deposit will fail.
#
# Calls to approve and setTotalDeposit are serialized with the
# deposit_lock to avoid transaction failure, because with two
# concurrent deposits, we may have the transactions executed in the
# following order
#
# - approve
# - approve
# - setTotalDeposit
# - setTotalDeposit
#
# in which case the second `approve` will overwrite the first,
# and the first `setTotalDeposit` will consume the allowance,
# making the second deposit fail.
token.approve(allowed_address=Address(self.address), allowance=amount_to_deposit)
gas_limit = self.proxy.estimate_gas(checking_block, 'setTotalDeposit', channel_identifier=channel_identifier, participant=self.node_address, total_deposit=total_deposit, partner=partner)
if gas_limit:
gas_limit = safe_gas_limit(gas_limit, GAS_REQUIRED_FOR_SET_TOTAL_DEPOSIT)
error_prefix = 'setTotalDeposit call failed'
log.debug('setTotalDeposit called', **log_details)
transaction_hash = self.proxy.transact('setTotalDeposit', gas_limit, channel_identifier=channel_identifier, participant=self.node_address, total_deposit=total_deposit, partner=partner)
self.client.poll(transaction_hash)
receipt_or_none = check_transaction_threw(self.client, transaction_hash) # depends on [control=['if'], data=[]]
transaction_executed = gas_limit is not None
if not transaction_executed or receipt_or_none:
if transaction_executed:
block = receipt_or_none['blockNumber'] # depends on [control=['if'], data=[]]
else:
block = checking_block
self.proxy.jsonrpc_client.check_for_insufficient_eth(transaction_name='setTotalDeposit', transaction_executed=transaction_executed, required_gas=GAS_REQUIRED_FOR_SET_TOTAL_DEPOSIT, block_identifier=block)
(error_type, msg) = self._check_why_deposit_failed(channel_identifier=channel_identifier, partner=partner, token=token, amount_to_deposit=amount_to_deposit, total_deposit=total_deposit, transaction_executed=transaction_executed, block_identifier=block)
error_msg = f'{error_prefix}. {msg}'
if error_type == RaidenRecoverableError:
log.warning(error_msg, **log_details) # depends on [control=['if'], data=[]]
else:
log.critical(error_msg, **log_details)
raise error_type(error_msg) # depends on [control=['if'], data=[]]
log.info('setTotalDeposit successful', **log_details) # depends on [control=['with'], data=[]]
|
def get_win_launcher(type):
"""
Load the Windows launcher (executable) suitable for launching a script.
`type` should be either 'cli' or 'gui'
Returns the executable as a byte string.
"""
launcher_fn = '%s.exe' % type
if is_64bit():
launcher_fn = launcher_fn.replace(".", "-64.")
else:
launcher_fn = launcher_fn.replace(".", "-32.")
return resource_string('setuptools', launcher_fn)
|
def function[get_win_launcher, parameter[type]]:
constant[
Load the Windows launcher (executable) suitable for launching a script.
`type` should be either 'cli' or 'gui'
Returns the executable as a byte string.
]
variable[launcher_fn] assign[=] binary_operation[constant[%s.exe] <ast.Mod object at 0x7da2590d6920> name[type]]
if call[name[is_64bit], parameter[]] begin[:]
variable[launcher_fn] assign[=] call[name[launcher_fn].replace, parameter[constant[.], constant[-64.]]]
return[call[name[resource_string], parameter[constant[setuptools], name[launcher_fn]]]]
|
keyword[def] identifier[get_win_launcher] ( identifier[type] ):
literal[string]
identifier[launcher_fn] = literal[string] % identifier[type]
keyword[if] identifier[is_64bit] ():
identifier[launcher_fn] = identifier[launcher_fn] . identifier[replace] ( literal[string] , literal[string] )
keyword[else] :
identifier[launcher_fn] = identifier[launcher_fn] . identifier[replace] ( literal[string] , literal[string] )
keyword[return] identifier[resource_string] ( literal[string] , identifier[launcher_fn] )
|
def get_win_launcher(type):
"""
Load the Windows launcher (executable) suitable for launching a script.
`type` should be either 'cli' or 'gui'
Returns the executable as a byte string.
"""
launcher_fn = '%s.exe' % type
if is_64bit():
launcher_fn = launcher_fn.replace('.', '-64.') # depends on [control=['if'], data=[]]
else:
launcher_fn = launcher_fn.replace('.', '-32.')
return resource_string('setuptools', launcher_fn)
|
def image_files_download(self, image_id):
"""Get data file for image with given identifier.
Parameters
----------
image_id : string
Unique image identifier
Returns
-------
FileInfo
Information about image file on disk or None if identifier
is unknown
"""
# Retrieve image to ensure that it exist
img = self.image_files_get(image_id)
if img is None:
# Return None if image is unknown
return None
else:
# Reference and information for original uploaded file
return FileInfo(
img.image_file,
img.properties[datastore.PROPERTY_MIMETYPE],
img.properties[datastore.PROPERTY_FILENAME]
)
|
def function[image_files_download, parameter[self, image_id]]:
constant[Get data file for image with given identifier.
Parameters
----------
image_id : string
Unique image identifier
Returns
-------
FileInfo
Information about image file on disk or None if identifier
is unknown
]
variable[img] assign[=] call[name[self].image_files_get, parameter[name[image_id]]]
if compare[name[img] is constant[None]] begin[:]
return[constant[None]]
|
keyword[def] identifier[image_files_download] ( identifier[self] , identifier[image_id] ):
literal[string]
identifier[img] = identifier[self] . identifier[image_files_get] ( identifier[image_id] )
keyword[if] identifier[img] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[else] :
keyword[return] identifier[FileInfo] (
identifier[img] . identifier[image_file] ,
identifier[img] . identifier[properties] [ identifier[datastore] . identifier[PROPERTY_MIMETYPE] ],
identifier[img] . identifier[properties] [ identifier[datastore] . identifier[PROPERTY_FILENAME] ]
)
|
def image_files_download(self, image_id):
"""Get data file for image with given identifier.
Parameters
----------
image_id : string
Unique image identifier
Returns
-------
FileInfo
Information about image file on disk or None if identifier
is unknown
"""
# Retrieve image to ensure that it exist
img = self.image_files_get(image_id)
if img is None:
# Return None if image is unknown
return None # depends on [control=['if'], data=[]]
else:
# Reference and information for original uploaded file
return FileInfo(img.image_file, img.properties[datastore.PROPERTY_MIMETYPE], img.properties[datastore.PROPERTY_FILENAME])
|
def get_parchg(self, poscar, kpoint, band, spin=None, phase=False,
scale=2):
"""
Generates a Chgcar object, which is the charge density of the specified
wavefunction.
This function generates a Chgcar object with the charge density of the
wavefunction specified by band and kpoint (and spin, if the WAVECAR
corresponds to a spin-polarized calculation). The phase tag is a
feature that is not present in VASP. For a real wavefunction, the phase
tag being turned on means that the charge density is multiplied by the
sign of the wavefunction at that point in space. A warning is generated
if the phase tag is on and the chosen kpoint is not Gamma.
Note: Augmentation from the PAWs is NOT included in this function. The
maximal charge density will differ from the PARCHG from VASP, but the
qualitative shape of the charge density will match.
Args:
poscar (pymatgen.io.vasp.inputs.Poscar): Poscar object that has the
structure associated with the WAVECAR file
kpoint (int): the index of the kpoint for the wavefunction
band (int): the index of the band for the wavefunction
spin (int): optional argument to specify the spin. If the
Wavecar has ISPIN = 2, spin == None generates a
Chgcar with total spin and magnetization, and
spin == {0, 1} specifies just the spin up or
down component.
phase (bool): flag to determine if the charge density is
multiplied by the sign of the wavefunction.
Only valid for real wavefunctions.
scale (int): scaling for the FFT grid. The default value of 2 is
at least as fine as the VASP default.
Returns:
a pymatgen.io.vasp.outputs.Chgcar object
"""
if phase and not np.all(self.kpoints[kpoint] == 0.):
warnings.warn('phase == True should only be used for the Gamma '
'kpoint! I hope you know what you\'re doing!')
# scaling of ng for the fft grid, need to restore value at the end
temp_ng = self.ng
self.ng = self.ng * scale
N = np.prod(self.ng)
data = {}
if self.spin == 2:
if spin is not None:
wfr = np.fft.ifftn(self.fft_mesh(kpoint, band, spin=spin)) * N
den = np.abs(np.conj(wfr) * wfr)
if phase:
den = np.sign(np.real(wfr)) * den
data['total'] = den
else:
wfr = np.fft.ifftn(self.fft_mesh(kpoint, band, spin=0)) * N
denup = np.abs(np.conj(wfr) * wfr)
wfr = np.fft.ifftn(self.fft_mesh(kpoint, band, spin=1)) * N
dendn = np.abs(np.conj(wfr) * wfr)
data['total'] = denup + dendn
data['diff'] = denup - dendn
else:
wfr = np.fft.ifftn(self.fft_mesh(kpoint, band)) * N
den = np.abs(np.conj(wfr) * wfr)
if phase:
den = np.sign(np.real(wfr)) * den
data['total'] = den
self.ng = temp_ng
return Chgcar(poscar, data)
|
def function[get_parchg, parameter[self, poscar, kpoint, band, spin, phase, scale]]:
constant[
Generates a Chgcar object, which is the charge density of the specified
wavefunction.
This function generates a Chgcar object with the charge density of the
wavefunction specified by band and kpoint (and spin, if the WAVECAR
corresponds to a spin-polarized calculation). The phase tag is a
feature that is not present in VASP. For a real wavefunction, the phase
tag being turned on means that the charge density is multiplied by the
sign of the wavefunction at that point in space. A warning is generated
if the phase tag is on and the chosen kpoint is not Gamma.
Note: Augmentation from the PAWs is NOT included in this function. The
maximal charge density will differ from the PARCHG from VASP, but the
qualitative shape of the charge density will match.
Args:
poscar (pymatgen.io.vasp.inputs.Poscar): Poscar object that has the
structure associated with the WAVECAR file
kpoint (int): the index of the kpoint for the wavefunction
band (int): the index of the band for the wavefunction
spin (int): optional argument to specify the spin. If the
Wavecar has ISPIN = 2, spin == None generates a
Chgcar with total spin and magnetization, and
spin == {0, 1} specifies just the spin up or
down component.
phase (bool): flag to determine if the charge density is
multiplied by the sign of the wavefunction.
Only valid for real wavefunctions.
scale (int): scaling for the FFT grid. The default value of 2 is
at least as fine as the VASP default.
Returns:
a pymatgen.io.vasp.outputs.Chgcar object
]
if <ast.BoolOp object at 0x7da18eb55b40> begin[:]
call[name[warnings].warn, parameter[constant[phase == True should only be used for the Gamma kpoint! I hope you know what you're doing!]]]
variable[temp_ng] assign[=] name[self].ng
name[self].ng assign[=] binary_operation[name[self].ng * name[scale]]
variable[N] assign[=] call[name[np].prod, parameter[name[self].ng]]
variable[data] assign[=] dictionary[[], []]
if compare[name[self].spin equal[==] constant[2]] begin[:]
if compare[name[spin] is_not constant[None]] begin[:]
variable[wfr] assign[=] binary_operation[call[name[np].fft.ifftn, parameter[call[name[self].fft_mesh, parameter[name[kpoint], name[band]]]]] * name[N]]
variable[den] assign[=] call[name[np].abs, parameter[binary_operation[call[name[np].conj, parameter[name[wfr]]] * name[wfr]]]]
if name[phase] begin[:]
variable[den] assign[=] binary_operation[call[name[np].sign, parameter[call[name[np].real, parameter[name[wfr]]]]] * name[den]]
call[name[data]][constant[total]] assign[=] name[den]
name[self].ng assign[=] name[temp_ng]
return[call[name[Chgcar], parameter[name[poscar], name[data]]]]
|
keyword[def] identifier[get_parchg] ( identifier[self] , identifier[poscar] , identifier[kpoint] , identifier[band] , identifier[spin] = keyword[None] , identifier[phase] = keyword[False] ,
identifier[scale] = literal[int] ):
literal[string]
keyword[if] identifier[phase] keyword[and] keyword[not] identifier[np] . identifier[all] ( identifier[self] . identifier[kpoints] [ identifier[kpoint] ]== literal[int] ):
identifier[warnings] . identifier[warn] ( literal[string]
literal[string] )
identifier[temp_ng] = identifier[self] . identifier[ng]
identifier[self] . identifier[ng] = identifier[self] . identifier[ng] * identifier[scale]
identifier[N] = identifier[np] . identifier[prod] ( identifier[self] . identifier[ng] )
identifier[data] ={}
keyword[if] identifier[self] . identifier[spin] == literal[int] :
keyword[if] identifier[spin] keyword[is] keyword[not] keyword[None] :
identifier[wfr] = identifier[np] . identifier[fft] . identifier[ifftn] ( identifier[self] . identifier[fft_mesh] ( identifier[kpoint] , identifier[band] , identifier[spin] = identifier[spin] ))* identifier[N]
identifier[den] = identifier[np] . identifier[abs] ( identifier[np] . identifier[conj] ( identifier[wfr] )* identifier[wfr] )
keyword[if] identifier[phase] :
identifier[den] = identifier[np] . identifier[sign] ( identifier[np] . identifier[real] ( identifier[wfr] ))* identifier[den]
identifier[data] [ literal[string] ]= identifier[den]
keyword[else] :
identifier[wfr] = identifier[np] . identifier[fft] . identifier[ifftn] ( identifier[self] . identifier[fft_mesh] ( identifier[kpoint] , identifier[band] , identifier[spin] = literal[int] ))* identifier[N]
identifier[denup] = identifier[np] . identifier[abs] ( identifier[np] . identifier[conj] ( identifier[wfr] )* identifier[wfr] )
identifier[wfr] = identifier[np] . identifier[fft] . identifier[ifftn] ( identifier[self] . identifier[fft_mesh] ( identifier[kpoint] , identifier[band] , identifier[spin] = literal[int] ))* identifier[N]
identifier[dendn] = identifier[np] . identifier[abs] ( identifier[np] . identifier[conj] ( identifier[wfr] )* identifier[wfr] )
identifier[data] [ literal[string] ]= identifier[denup] + identifier[dendn]
identifier[data] [ literal[string] ]= identifier[denup] - identifier[dendn]
keyword[else] :
identifier[wfr] = identifier[np] . identifier[fft] . identifier[ifftn] ( identifier[self] . identifier[fft_mesh] ( identifier[kpoint] , identifier[band] ))* identifier[N]
identifier[den] = identifier[np] . identifier[abs] ( identifier[np] . identifier[conj] ( identifier[wfr] )* identifier[wfr] )
keyword[if] identifier[phase] :
identifier[den] = identifier[np] . identifier[sign] ( identifier[np] . identifier[real] ( identifier[wfr] ))* identifier[den]
identifier[data] [ literal[string] ]= identifier[den]
identifier[self] . identifier[ng] = identifier[temp_ng]
keyword[return] identifier[Chgcar] ( identifier[poscar] , identifier[data] )
|
def get_parchg(self, poscar, kpoint, band, spin=None, phase=False, scale=2):
"""
Generates a Chgcar object, which is the charge density of the specified
wavefunction.
This function generates a Chgcar object with the charge density of the
wavefunction specified by band and kpoint (and spin, if the WAVECAR
corresponds to a spin-polarized calculation). The phase tag is a
feature that is not present in VASP. For a real wavefunction, the phase
tag being turned on means that the charge density is multiplied by the
sign of the wavefunction at that point in space. A warning is generated
if the phase tag is on and the chosen kpoint is not Gamma.
Note: Augmentation from the PAWs is NOT included in this function. The
maximal charge density will differ from the PARCHG from VASP, but the
qualitative shape of the charge density will match.
Args:
poscar (pymatgen.io.vasp.inputs.Poscar): Poscar object that has the
structure associated with the WAVECAR file
kpoint (int): the index of the kpoint for the wavefunction
band (int): the index of the band for the wavefunction
spin (int): optional argument to specify the spin. If the
Wavecar has ISPIN = 2, spin == None generates a
Chgcar with total spin and magnetization, and
spin == {0, 1} specifies just the spin up or
down component.
phase (bool): flag to determine if the charge density is
multiplied by the sign of the wavefunction.
Only valid for real wavefunctions.
scale (int): scaling for the FFT grid. The default value of 2 is
at least as fine as the VASP default.
Returns:
a pymatgen.io.vasp.outputs.Chgcar object
"""
if phase and (not np.all(self.kpoints[kpoint] == 0.0)):
warnings.warn("phase == True should only be used for the Gamma kpoint! I hope you know what you're doing!") # depends on [control=['if'], data=[]]
# scaling of ng for the fft grid, need to restore value at the end
temp_ng = self.ng
self.ng = self.ng * scale
N = np.prod(self.ng)
data = {}
if self.spin == 2:
if spin is not None:
wfr = np.fft.ifftn(self.fft_mesh(kpoint, band, spin=spin)) * N
den = np.abs(np.conj(wfr) * wfr)
if phase:
den = np.sign(np.real(wfr)) * den # depends on [control=['if'], data=[]]
data['total'] = den # depends on [control=['if'], data=['spin']]
else:
wfr = np.fft.ifftn(self.fft_mesh(kpoint, band, spin=0)) * N
denup = np.abs(np.conj(wfr) * wfr)
wfr = np.fft.ifftn(self.fft_mesh(kpoint, band, spin=1)) * N
dendn = np.abs(np.conj(wfr) * wfr)
data['total'] = denup + dendn
data['diff'] = denup - dendn # depends on [control=['if'], data=[]]
else:
wfr = np.fft.ifftn(self.fft_mesh(kpoint, band)) * N
den = np.abs(np.conj(wfr) * wfr)
if phase:
den = np.sign(np.real(wfr)) * den # depends on [control=['if'], data=[]]
data['total'] = den
self.ng = temp_ng
return Chgcar(poscar, data)
|
def get_current_hgnc_id(hgnc_name):
"""Return the HGNC ID(s) corresponding to a current or outdate HGNC symbol.
Parameters
----------
hgnc_name : str
The HGNC symbol to be converted, possibly an outdated symbol.
Returns
-------
str or list of str or None
If there is a single HGNC ID corresponding to the given current or
outdated HGNC symbol, that ID is returned as a string. If the symbol
is outdated and maps to multiple current IDs, a list of these
IDs is returned. If the given name doesn't correspond to either
a current or an outdated HGNC symbol, None is returned.
"""
hgnc_id = get_hgnc_id(hgnc_name)
if hgnc_id:
return hgnc_id
hgnc_id = prev_sym_map.get(hgnc_name)
return hgnc_id
|
def function[get_current_hgnc_id, parameter[hgnc_name]]:
constant[Return the HGNC ID(s) corresponding to a current or outdate HGNC symbol.
Parameters
----------
hgnc_name : str
The HGNC symbol to be converted, possibly an outdated symbol.
Returns
-------
str or list of str or None
If there is a single HGNC ID corresponding to the given current or
outdated HGNC symbol, that ID is returned as a string. If the symbol
is outdated and maps to multiple current IDs, a list of these
IDs is returned. If the given name doesn't correspond to either
a current or an outdated HGNC symbol, None is returned.
]
variable[hgnc_id] assign[=] call[name[get_hgnc_id], parameter[name[hgnc_name]]]
if name[hgnc_id] begin[:]
return[name[hgnc_id]]
variable[hgnc_id] assign[=] call[name[prev_sym_map].get, parameter[name[hgnc_name]]]
return[name[hgnc_id]]
|
keyword[def] identifier[get_current_hgnc_id] ( identifier[hgnc_name] ):
literal[string]
identifier[hgnc_id] = identifier[get_hgnc_id] ( identifier[hgnc_name] )
keyword[if] identifier[hgnc_id] :
keyword[return] identifier[hgnc_id]
identifier[hgnc_id] = identifier[prev_sym_map] . identifier[get] ( identifier[hgnc_name] )
keyword[return] identifier[hgnc_id]
|
def get_current_hgnc_id(hgnc_name):
"""Return the HGNC ID(s) corresponding to a current or outdate HGNC symbol.
Parameters
----------
hgnc_name : str
The HGNC symbol to be converted, possibly an outdated symbol.
Returns
-------
str or list of str or None
If there is a single HGNC ID corresponding to the given current or
outdated HGNC symbol, that ID is returned as a string. If the symbol
is outdated and maps to multiple current IDs, a list of these
IDs is returned. If the given name doesn't correspond to either
a current or an outdated HGNC symbol, None is returned.
"""
hgnc_id = get_hgnc_id(hgnc_name)
if hgnc_id:
return hgnc_id # depends on [control=['if'], data=[]]
hgnc_id = prev_sym_map.get(hgnc_name)
return hgnc_id
|
def next_token(self):
"""Lexical analyser of the raw input."""
while self.char is not None:
if self.char.isspace():
# The current character is a whitespace
self.whitespace()
continue
elif self.char == '#':
# The current character is `#`
self.advance()
self.comment()
continue
elif self.char.isalpha() or self.char == '_':
# The current character is a letter or `_`
return self._id()
elif self.char == ';':
# The current character is `;`
self.advance()
return Token(Nature.SEMI, ';')
elif self.char == ',':
# The current character is `,`
self.advance()
return Token(Nature.COMMA, ';')
elif self.char.isdigit():
# The current character is a number
return self.number()
elif self.char == '=' and self.peek() == '=':
# The current character is `==`
self.advance()
self.advance()
return Token(Nature.EQ, '==')
elif self.char == '!' and self.peek() == '=':
# The current character is `!=`
self.advance()
self.advance()
return Token(Nature.NE, '!=')
elif self.char == '<' and self.peek() == '=':
# The current character is `<=`
self.advance()
self.advance()
return Token(Nature.LE, '<=')
elif self.char == '>' and self.peek() == '=':
# The current character is `>=`
self.advance()
self.advance()
return Token(Nature.GE, '>=')
elif self.char == '<':
# The current character is `<`
self.advance()
return Token(Nature.LT, '<')
elif self.char == '>':
# The current character is `>`
self.advance()
return Token(Nature.GT, '>')
elif self.char == '=':
# The current character is `=`
self.advance()
return Token(Nature.ASSIGN, '=')
elif self.char == '+':
# The current character is `+`
self.advance()
return Token(Nature.PLUS, '+')
elif self.char == '-':
# The current character is `-`
self.advance()
return Token(Nature.MINUS, '-')
elif self.char == '*':
# The current character is `*`
self.advance()
return Token(Nature.MUL, '*')
elif self.char == '/' and self.peek() == '/':
# The current character is `//`
self.advance()
self.advance()
return Token(Nature.INT_DIV, '//')
elif self.char == '/':
# The current character is `/`
self.advance()
return Token(Nature.DIV, '/')
elif self.char == '(':
# The current character is `(`
self.advance()
return Token(Nature.LPAREN, '(')
elif self.char == ')':
# The current character is `)`
self.advance()
return Token(Nature.RPAREN, ')')
elif self.char == '{':
# The current character is `{`
self.advance()
return Token(Nature.LBRACKET, '{')
elif self.char == '}':
# The current character is `}`
self.advance()
return Token(Nature.RBRACKET, '}')
else:
# The current character is unknown
raise LexicalError(f"Invalid character `{self.char}`.")
# End of raw input
return Token(Nature.EOF, None)
|
def function[next_token, parameter[self]]:
constant[Lexical analyser of the raw input.]
while compare[name[self].char is_not constant[None]] begin[:]
if call[name[self].char.isspace, parameter[]] begin[:]
call[name[self].whitespace, parameter[]]
continue
return[call[name[Token], parameter[name[Nature].EOF, constant[None]]]]
|
keyword[def] identifier[next_token] ( identifier[self] ):
literal[string]
keyword[while] identifier[self] . identifier[char] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[self] . identifier[char] . identifier[isspace] ():
identifier[self] . identifier[whitespace] ()
keyword[continue]
keyword[elif] identifier[self] . identifier[char] == literal[string] :
identifier[self] . identifier[advance] ()
identifier[self] . identifier[comment] ()
keyword[continue]
keyword[elif] identifier[self] . identifier[char] . identifier[isalpha] () keyword[or] identifier[self] . identifier[char] == literal[string] :
keyword[return] identifier[self] . identifier[_id] ()
keyword[elif] identifier[self] . identifier[char] == literal[string] :
identifier[self] . identifier[advance] ()
keyword[return] identifier[Token] ( identifier[Nature] . identifier[SEMI] , literal[string] )
keyword[elif] identifier[self] . identifier[char] == literal[string] :
identifier[self] . identifier[advance] ()
keyword[return] identifier[Token] ( identifier[Nature] . identifier[COMMA] , literal[string] )
keyword[elif] identifier[self] . identifier[char] . identifier[isdigit] ():
keyword[return] identifier[self] . identifier[number] ()
keyword[elif] identifier[self] . identifier[char] == literal[string] keyword[and] identifier[self] . identifier[peek] ()== literal[string] :
identifier[self] . identifier[advance] ()
identifier[self] . identifier[advance] ()
keyword[return] identifier[Token] ( identifier[Nature] . identifier[EQ] , literal[string] )
keyword[elif] identifier[self] . identifier[char] == literal[string] keyword[and] identifier[self] . identifier[peek] ()== literal[string] :
identifier[self] . identifier[advance] ()
identifier[self] . identifier[advance] ()
keyword[return] identifier[Token] ( identifier[Nature] . identifier[NE] , literal[string] )
keyword[elif] identifier[self] . identifier[char] == literal[string] keyword[and] identifier[self] . identifier[peek] ()== literal[string] :
identifier[self] . identifier[advance] ()
identifier[self] . identifier[advance] ()
keyword[return] identifier[Token] ( identifier[Nature] . identifier[LE] , literal[string] )
keyword[elif] identifier[self] . identifier[char] == literal[string] keyword[and] identifier[self] . identifier[peek] ()== literal[string] :
identifier[self] . identifier[advance] ()
identifier[self] . identifier[advance] ()
keyword[return] identifier[Token] ( identifier[Nature] . identifier[GE] , literal[string] )
keyword[elif] identifier[self] . identifier[char] == literal[string] :
identifier[self] . identifier[advance] ()
keyword[return] identifier[Token] ( identifier[Nature] . identifier[LT] , literal[string] )
keyword[elif] identifier[self] . identifier[char] == literal[string] :
identifier[self] . identifier[advance] ()
keyword[return] identifier[Token] ( identifier[Nature] . identifier[GT] , literal[string] )
keyword[elif] identifier[self] . identifier[char] == literal[string] :
identifier[self] . identifier[advance] ()
keyword[return] identifier[Token] ( identifier[Nature] . identifier[ASSIGN] , literal[string] )
keyword[elif] identifier[self] . identifier[char] == literal[string] :
identifier[self] . identifier[advance] ()
keyword[return] identifier[Token] ( identifier[Nature] . identifier[PLUS] , literal[string] )
keyword[elif] identifier[self] . identifier[char] == literal[string] :
identifier[self] . identifier[advance] ()
keyword[return] identifier[Token] ( identifier[Nature] . identifier[MINUS] , literal[string] )
keyword[elif] identifier[self] . identifier[char] == literal[string] :
identifier[self] . identifier[advance] ()
keyword[return] identifier[Token] ( identifier[Nature] . identifier[MUL] , literal[string] )
keyword[elif] identifier[self] . identifier[char] == literal[string] keyword[and] identifier[self] . identifier[peek] ()== literal[string] :
identifier[self] . identifier[advance] ()
identifier[self] . identifier[advance] ()
keyword[return] identifier[Token] ( identifier[Nature] . identifier[INT_DIV] , literal[string] )
keyword[elif] identifier[self] . identifier[char] == literal[string] :
identifier[self] . identifier[advance] ()
keyword[return] identifier[Token] ( identifier[Nature] . identifier[DIV] , literal[string] )
keyword[elif] identifier[self] . identifier[char] == literal[string] :
identifier[self] . identifier[advance] ()
keyword[return] identifier[Token] ( identifier[Nature] . identifier[LPAREN] , literal[string] )
keyword[elif] identifier[self] . identifier[char] == literal[string] :
identifier[self] . identifier[advance] ()
keyword[return] identifier[Token] ( identifier[Nature] . identifier[RPAREN] , literal[string] )
keyword[elif] identifier[self] . identifier[char] == literal[string] :
identifier[self] . identifier[advance] ()
keyword[return] identifier[Token] ( identifier[Nature] . identifier[LBRACKET] , literal[string] )
keyword[elif] identifier[self] . identifier[char] == literal[string] :
identifier[self] . identifier[advance] ()
keyword[return] identifier[Token] ( identifier[Nature] . identifier[RBRACKET] , literal[string] )
keyword[else] :
keyword[raise] identifier[LexicalError] ( literal[string] )
keyword[return] identifier[Token] ( identifier[Nature] . identifier[EOF] , keyword[None] )
|
def next_token(self):
"""Lexical analyser of the raw input."""
while self.char is not None:
if self.char.isspace():
# The current character is a whitespace
self.whitespace()
continue # depends on [control=['if'], data=[]]
elif self.char == '#':
# The current character is `#`
self.advance()
self.comment()
continue # depends on [control=['if'], data=[]]
elif self.char.isalpha() or self.char == '_':
# The current character is a letter or `_`
return self._id() # depends on [control=['if'], data=[]]
elif self.char == ';':
# The current character is `;`
self.advance()
return Token(Nature.SEMI, ';') # depends on [control=['if'], data=[]]
elif self.char == ',':
# The current character is `,`
self.advance()
return Token(Nature.COMMA, ';') # depends on [control=['if'], data=[]]
elif self.char.isdigit():
# The current character is a number
return self.number() # depends on [control=['if'], data=[]]
elif self.char == '=' and self.peek() == '=':
# The current character is `==`
self.advance()
self.advance()
return Token(Nature.EQ, '==') # depends on [control=['if'], data=[]]
elif self.char == '!' and self.peek() == '=':
# The current character is `!=`
self.advance()
self.advance()
return Token(Nature.NE, '!=') # depends on [control=['if'], data=[]]
elif self.char == '<' and self.peek() == '=':
# The current character is `<=`
self.advance()
self.advance()
return Token(Nature.LE, '<=') # depends on [control=['if'], data=[]]
elif self.char == '>' and self.peek() == '=':
# The current character is `>=`
self.advance()
self.advance()
return Token(Nature.GE, '>=') # depends on [control=['if'], data=[]]
elif self.char == '<':
# The current character is `<`
self.advance()
return Token(Nature.LT, '<') # depends on [control=['if'], data=[]]
elif self.char == '>':
# The current character is `>`
self.advance()
return Token(Nature.GT, '>') # depends on [control=['if'], data=[]]
elif self.char == '=':
# The current character is `=`
self.advance()
return Token(Nature.ASSIGN, '=') # depends on [control=['if'], data=[]]
elif self.char == '+':
# The current character is `+`
self.advance()
return Token(Nature.PLUS, '+') # depends on [control=['if'], data=[]]
elif self.char == '-':
# The current character is `-`
self.advance()
return Token(Nature.MINUS, '-') # depends on [control=['if'], data=[]]
elif self.char == '*':
# The current character is `*`
self.advance()
return Token(Nature.MUL, '*') # depends on [control=['if'], data=[]]
elif self.char == '/' and self.peek() == '/':
# The current character is `//`
self.advance()
self.advance()
return Token(Nature.INT_DIV, '//') # depends on [control=['if'], data=[]]
elif self.char == '/':
# The current character is `/`
self.advance()
return Token(Nature.DIV, '/') # depends on [control=['if'], data=[]]
elif self.char == '(':
# The current character is `(`
self.advance()
return Token(Nature.LPAREN, '(') # depends on [control=['if'], data=[]]
elif self.char == ')':
# The current character is `)`
self.advance()
return Token(Nature.RPAREN, ')') # depends on [control=['if'], data=[]]
elif self.char == '{':
# The current character is `{`
self.advance()
return Token(Nature.LBRACKET, '{') # depends on [control=['if'], data=[]]
elif self.char == '}':
# The current character is `}`
self.advance()
return Token(Nature.RBRACKET, '}') # depends on [control=['if'], data=[]]
else:
# The current character is unknown
raise LexicalError(f'Invalid character `{self.char}`.') # depends on [control=['while'], data=[]]
# End of raw input
return Token(Nature.EOF, None)
|
def _parse_header_id(line):
"""
Pull the transcript or protein identifier from the header line
which starts with '>'
"""
if type(line) is not binary_type:
raise TypeError("Expected header line to be of type %s but got %s" % (
binary_type, type(line)))
if len(line) <= 1:
raise ValueError("No identifier on FASTA line")
# split line at first space to get the unique identifier for
# this sequence
space_index = line.find(b" ")
if space_index >= 0:
identifier = line[1:space_index]
else:
identifier = line[1:]
# annoyingly Ensembl83 reformatted the transcript IDs of its
# cDNA FASTA to include sequence version numbers
# .e.g.
# "ENST00000448914.1" instead of "ENST00000448914"
# So now we have to parse out the identifier
dot_index = identifier.find(b".")
if dot_index >= 0:
identifier = identifier[:dot_index]
return identifier.decode("ascii")
|
def function[_parse_header_id, parameter[line]]:
constant[
Pull the transcript or protein identifier from the header line
which starts with '>'
]
if compare[call[name[type], parameter[name[line]]] is_not name[binary_type]] begin[:]
<ast.Raise object at 0x7da1b08d4a30>
if compare[call[name[len], parameter[name[line]]] less_or_equal[<=] constant[1]] begin[:]
<ast.Raise object at 0x7da1b08d7dc0>
variable[space_index] assign[=] call[name[line].find, parameter[constant[b' ']]]
if compare[name[space_index] greater_or_equal[>=] constant[0]] begin[:]
variable[identifier] assign[=] call[name[line]][<ast.Slice object at 0x7da1b08d7c70>]
variable[dot_index] assign[=] call[name[identifier].find, parameter[constant[b'.']]]
if compare[name[dot_index] greater_or_equal[>=] constant[0]] begin[:]
variable[identifier] assign[=] call[name[identifier]][<ast.Slice object at 0x7da1b08bd480>]
return[call[name[identifier].decode, parameter[constant[ascii]]]]
|
keyword[def] identifier[_parse_header_id] ( identifier[line] ):
literal[string]
keyword[if] identifier[type] ( identifier[line] ) keyword[is] keyword[not] identifier[binary_type] :
keyword[raise] identifier[TypeError] ( literal[string] %(
identifier[binary_type] , identifier[type] ( identifier[line] )))
keyword[if] identifier[len] ( identifier[line] )<= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[space_index] = identifier[line] . identifier[find] ( literal[string] )
keyword[if] identifier[space_index] >= literal[int] :
identifier[identifier] = identifier[line] [ literal[int] : identifier[space_index] ]
keyword[else] :
identifier[identifier] = identifier[line] [ literal[int] :]
identifier[dot_index] = identifier[identifier] . identifier[find] ( literal[string] )
keyword[if] identifier[dot_index] >= literal[int] :
identifier[identifier] = identifier[identifier] [: identifier[dot_index] ]
keyword[return] identifier[identifier] . identifier[decode] ( literal[string] )
|
def _parse_header_id(line):
"""
Pull the transcript or protein identifier from the header line
which starts with '>'
"""
if type(line) is not binary_type:
raise TypeError('Expected header line to be of type %s but got %s' % (binary_type, type(line))) # depends on [control=['if'], data=['binary_type']]
if len(line) <= 1:
raise ValueError('No identifier on FASTA line') # depends on [control=['if'], data=[]]
# split line at first space to get the unique identifier for
# this sequence
space_index = line.find(b' ')
if space_index >= 0:
identifier = line[1:space_index] # depends on [control=['if'], data=['space_index']]
else:
identifier = line[1:]
# annoyingly Ensembl83 reformatted the transcript IDs of its
# cDNA FASTA to include sequence version numbers
# .e.g.
# "ENST00000448914.1" instead of "ENST00000448914"
# So now we have to parse out the identifier
dot_index = identifier.find(b'.')
if dot_index >= 0:
identifier = identifier[:dot_index] # depends on [control=['if'], data=['dot_index']]
return identifier.decode('ascii')
|
def getTextFromNode(node):
"""
Scans through all children of node and gathers the
text. If node has non-text child-nodes then
NotTextNodeError is raised.
"""
t = ""
for n in node.childNodes:
if n.nodeType == n.TEXT_NODE:
t += n.nodeValue
else:
raise NotTextNodeError
return t
|
def function[getTextFromNode, parameter[node]]:
constant[
Scans through all children of node and gathers the
text. If node has non-text child-nodes then
NotTextNodeError is raised.
]
variable[t] assign[=] constant[]
for taget[name[n]] in starred[name[node].childNodes] begin[:]
if compare[name[n].nodeType equal[==] name[n].TEXT_NODE] begin[:]
<ast.AugAssign object at 0x7da2047e9990>
return[name[t]]
|
keyword[def] identifier[getTextFromNode] ( identifier[node] ):
literal[string]
identifier[t] = literal[string]
keyword[for] identifier[n] keyword[in] identifier[node] . identifier[childNodes] :
keyword[if] identifier[n] . identifier[nodeType] == identifier[n] . identifier[TEXT_NODE] :
identifier[t] += identifier[n] . identifier[nodeValue]
keyword[else] :
keyword[raise] identifier[NotTextNodeError]
keyword[return] identifier[t]
|
def getTextFromNode(node):
"""
Scans through all children of node and gathers the
text. If node has non-text child-nodes then
NotTextNodeError is raised.
"""
t = ''
for n in node.childNodes:
if n.nodeType == n.TEXT_NODE:
t += n.nodeValue # depends on [control=['if'], data=[]]
else:
raise NotTextNodeError # depends on [control=['for'], data=['n']]
return t
|
def create_session_entity_type(
self,
parent,
session_entity_type,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Creates a session entity type.
Example:
>>> import dialogflow_v2
>>>
>>> client = dialogflow_v2.SessionEntityTypesClient()
>>>
>>> parent = client.session_path('[PROJECT]', '[SESSION]')
>>>
>>> # TODO: Initialize ``session_entity_type``:
>>> session_entity_type = {}
>>>
>>> response = client.create_session_entity_type(parent, session_entity_type)
Args:
parent (str): Required. The session to create a session entity type for.
Format: ``projects/<Project ID>/agent/sessions/<Session ID>``.
session_entity_type (Union[dict, ~google.cloud.dialogflow_v2.types.SessionEntityType]): Required. The session entity type to create.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.dialogflow_v2.types.SessionEntityType`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.dialogflow_v2.types.SessionEntityType` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'create_session_entity_type' not in self._inner_api_calls:
self._inner_api_calls[
'create_session_entity_type'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_session_entity_type,
default_retry=self._method_configs[
'CreateSessionEntityType'].retry,
default_timeout=self._method_configs[
'CreateSessionEntityType'].timeout,
client_info=self._client_info,
)
request = session_entity_type_pb2.CreateSessionEntityTypeRequest(
parent=parent,
session_entity_type=session_entity_type,
)
return self._inner_api_calls['create_session_entity_type'](
request, retry=retry, timeout=timeout, metadata=metadata)
|
def function[create_session_entity_type, parameter[self, parent, session_entity_type, retry, timeout, metadata]]:
constant[
Creates a session entity type.
Example:
>>> import dialogflow_v2
>>>
>>> client = dialogflow_v2.SessionEntityTypesClient()
>>>
>>> parent = client.session_path('[PROJECT]', '[SESSION]')
>>>
>>> # TODO: Initialize ``session_entity_type``:
>>> session_entity_type = {}
>>>
>>> response = client.create_session_entity_type(parent, session_entity_type)
Args:
parent (str): Required. The session to create a session entity type for.
Format: ``projects/<Project ID>/agent/sessions/<Session ID>``.
session_entity_type (Union[dict, ~google.cloud.dialogflow_v2.types.SessionEntityType]): Required. The session entity type to create.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.dialogflow_v2.types.SessionEntityType`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.dialogflow_v2.types.SessionEntityType` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
]
if compare[constant[create_session_entity_type] <ast.NotIn object at 0x7da2590d7190> name[self]._inner_api_calls] begin[:]
call[name[self]._inner_api_calls][constant[create_session_entity_type]] assign[=] call[name[google].api_core.gapic_v1.method.wrap_method, parameter[name[self].transport.create_session_entity_type]]
variable[request] assign[=] call[name[session_entity_type_pb2].CreateSessionEntityTypeRequest, parameter[]]
return[call[call[name[self]._inner_api_calls][constant[create_session_entity_type]], parameter[name[request]]]]
|
keyword[def] identifier[create_session_entity_type] (
identifier[self] ,
identifier[parent] ,
identifier[session_entity_type] ,
identifier[retry] = identifier[google] . identifier[api_core] . identifier[gapic_v1] . identifier[method] . identifier[DEFAULT] ,
identifier[timeout] = identifier[google] . identifier[api_core] . identifier[gapic_v1] . identifier[method] . identifier[DEFAULT] ,
identifier[metadata] = keyword[None] ):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[_inner_api_calls] :
identifier[self] . identifier[_inner_api_calls] [
literal[string] ]= identifier[google] . identifier[api_core] . identifier[gapic_v1] . identifier[method] . identifier[wrap_method] (
identifier[self] . identifier[transport] . identifier[create_session_entity_type] ,
identifier[default_retry] = identifier[self] . identifier[_method_configs] [
literal[string] ]. identifier[retry] ,
identifier[default_timeout] = identifier[self] . identifier[_method_configs] [
literal[string] ]. identifier[timeout] ,
identifier[client_info] = identifier[self] . identifier[_client_info] ,
)
identifier[request] = identifier[session_entity_type_pb2] . identifier[CreateSessionEntityTypeRequest] (
identifier[parent] = identifier[parent] ,
identifier[session_entity_type] = identifier[session_entity_type] ,
)
keyword[return] identifier[self] . identifier[_inner_api_calls] [ literal[string] ](
identifier[request] , identifier[retry] = identifier[retry] , identifier[timeout] = identifier[timeout] , identifier[metadata] = identifier[metadata] )
|
def create_session_entity_type(self, parent, session_entity_type, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None):
"""
Creates a session entity type.
Example:
>>> import dialogflow_v2
>>>
>>> client = dialogflow_v2.SessionEntityTypesClient()
>>>
>>> parent = client.session_path('[PROJECT]', '[SESSION]')
>>>
>>> # TODO: Initialize ``session_entity_type``:
>>> session_entity_type = {}
>>>
>>> response = client.create_session_entity_type(parent, session_entity_type)
Args:
parent (str): Required. The session to create a session entity type for.
Format: ``projects/<Project ID>/agent/sessions/<Session ID>``.
session_entity_type (Union[dict, ~google.cloud.dialogflow_v2.types.SessionEntityType]): Required. The session entity type to create.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.dialogflow_v2.types.SessionEntityType`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.dialogflow_v2.types.SessionEntityType` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'create_session_entity_type' not in self._inner_api_calls:
self._inner_api_calls['create_session_entity_type'] = google.api_core.gapic_v1.method.wrap_method(self.transport.create_session_entity_type, default_retry=self._method_configs['CreateSessionEntityType'].retry, default_timeout=self._method_configs['CreateSessionEntityType'].timeout, client_info=self._client_info) # depends on [control=['if'], data=[]]
request = session_entity_type_pb2.CreateSessionEntityTypeRequest(parent=parent, session_entity_type=session_entity_type)
return self._inner_api_calls['create_session_entity_type'](request, retry=retry, timeout=timeout, metadata=metadata)
|
def _readNamelist(currentlyIncluding, cache, namFilename, unique_glyphs):
""" Detect infinite recursion and prevent it.
This is an implementation detail of readNamelist.
Raises NamelistRecursionError if namFilename is in the process of being included
"""
# normalize
filename = os.path.abspath(os.path.normcase(namFilename))
if filename in currentlyIncluding:
raise NamelistRecursionError(filename)
currentlyIncluding.add(filename)
try:
result = __readNamelist(cache, filename, unique_glyphs)
finally:
currentlyIncluding.remove(filename)
return result
|
def function[_readNamelist, parameter[currentlyIncluding, cache, namFilename, unique_glyphs]]:
constant[ Detect infinite recursion and prevent it.
This is an implementation detail of readNamelist.
Raises NamelistRecursionError if namFilename is in the process of being included
]
variable[filename] assign[=] call[name[os].path.abspath, parameter[call[name[os].path.normcase, parameter[name[namFilename]]]]]
if compare[name[filename] in name[currentlyIncluding]] begin[:]
<ast.Raise object at 0x7da20e954f10>
call[name[currentlyIncluding].add, parameter[name[filename]]]
<ast.Try object at 0x7da20e957100>
return[name[result]]
|
keyword[def] identifier[_readNamelist] ( identifier[currentlyIncluding] , identifier[cache] , identifier[namFilename] , identifier[unique_glyphs] ):
literal[string]
identifier[filename] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[path] . identifier[normcase] ( identifier[namFilename] ))
keyword[if] identifier[filename] keyword[in] identifier[currentlyIncluding] :
keyword[raise] identifier[NamelistRecursionError] ( identifier[filename] )
identifier[currentlyIncluding] . identifier[add] ( identifier[filename] )
keyword[try] :
identifier[result] = identifier[__readNamelist] ( identifier[cache] , identifier[filename] , identifier[unique_glyphs] )
keyword[finally] :
identifier[currentlyIncluding] . identifier[remove] ( identifier[filename] )
keyword[return] identifier[result]
|
def _readNamelist(currentlyIncluding, cache, namFilename, unique_glyphs):
""" Detect infinite recursion and prevent it.
This is an implementation detail of readNamelist.
Raises NamelistRecursionError if namFilename is in the process of being included
"""
# normalize
filename = os.path.abspath(os.path.normcase(namFilename))
if filename in currentlyIncluding:
raise NamelistRecursionError(filename) # depends on [control=['if'], data=['filename']]
currentlyIncluding.add(filename)
try:
result = __readNamelist(cache, filename, unique_glyphs) # depends on [control=['try'], data=[]]
finally:
currentlyIncluding.remove(filename)
return result
|
def record(self, tags=None):
"""records all the measures at the same time with a tag_map.
tag_map could either be explicitly passed to the method, or implicitly
read from current runtime context.
"""
if tags is None:
tags = TagContext.get()
if self._invalid:
logger.warning("Measurement map has included negative value "
"measurements, refusing to record")
return
for measure, value in self.measurement_map.items():
if value < 0:
self._invalid = True
logger.warning("Dropping values, value to record must be "
"non-negative")
logger.info("Measure '{}' has negative value ({}), refusing "
"to record measurements from {}"
.format(measure.name, value, self))
return
self.measure_to_view_map.record(
tags=tags,
measurement_map=self.measurement_map,
timestamp=utils.to_iso_str(),
attachments=self.attachments
)
|
def function[record, parameter[self, tags]]:
constant[records all the measures at the same time with a tag_map.
tag_map could either be explicitly passed to the method, or implicitly
read from current runtime context.
]
if compare[name[tags] is constant[None]] begin[:]
variable[tags] assign[=] call[name[TagContext].get, parameter[]]
if name[self]._invalid begin[:]
call[name[logger].warning, parameter[constant[Measurement map has included negative value measurements, refusing to record]]]
return[None]
for taget[tuple[[<ast.Name object at 0x7da1b17ab130>, <ast.Name object at 0x7da1b17a8dc0>]]] in starred[call[name[self].measurement_map.items, parameter[]]] begin[:]
if compare[name[value] less[<] constant[0]] begin[:]
name[self]._invalid assign[=] constant[True]
call[name[logger].warning, parameter[constant[Dropping values, value to record must be non-negative]]]
call[name[logger].info, parameter[call[constant[Measure '{}' has negative value ({}), refusing to record measurements from {}].format, parameter[name[measure].name, name[value], name[self]]]]]
return[None]
call[name[self].measure_to_view_map.record, parameter[]]
|
keyword[def] identifier[record] ( identifier[self] , identifier[tags] = keyword[None] ):
literal[string]
keyword[if] identifier[tags] keyword[is] keyword[None] :
identifier[tags] = identifier[TagContext] . identifier[get] ()
keyword[if] identifier[self] . identifier[_invalid] :
identifier[logger] . identifier[warning] ( literal[string]
literal[string] )
keyword[return]
keyword[for] identifier[measure] , identifier[value] keyword[in] identifier[self] . identifier[measurement_map] . identifier[items] ():
keyword[if] identifier[value] < literal[int] :
identifier[self] . identifier[_invalid] = keyword[True]
identifier[logger] . identifier[warning] ( literal[string]
literal[string] )
identifier[logger] . identifier[info] ( literal[string]
literal[string]
. identifier[format] ( identifier[measure] . identifier[name] , identifier[value] , identifier[self] ))
keyword[return]
identifier[self] . identifier[measure_to_view_map] . identifier[record] (
identifier[tags] = identifier[tags] ,
identifier[measurement_map] = identifier[self] . identifier[measurement_map] ,
identifier[timestamp] = identifier[utils] . identifier[to_iso_str] (),
identifier[attachments] = identifier[self] . identifier[attachments]
)
|
def record(self, tags=None):
"""records all the measures at the same time with a tag_map.
tag_map could either be explicitly passed to the method, or implicitly
read from current runtime context.
"""
if tags is None:
tags = TagContext.get() # depends on [control=['if'], data=['tags']]
if self._invalid:
logger.warning('Measurement map has included negative value measurements, refusing to record')
return # depends on [control=['if'], data=[]]
for (measure, value) in self.measurement_map.items():
if value < 0:
self._invalid = True
logger.warning('Dropping values, value to record must be non-negative')
logger.info("Measure '{}' has negative value ({}), refusing to record measurements from {}".format(measure.name, value, self))
return # depends on [control=['if'], data=['value']] # depends on [control=['for'], data=[]]
self.measure_to_view_map.record(tags=tags, measurement_map=self.measurement_map, timestamp=utils.to_iso_str(), attachments=self.attachments)
|
def add_live_points(self):
"""Add the remaining set of live points to the current set of dead
points. Instantiates a generator that will be called by
the user. Returns the same outputs as :meth:`sample`."""
# Check if the remaining live points have already been added
# to the output set of samples.
if self.added_live:
raise ValueError("The remaining live points have already "
"been added to the list of samples!")
else:
self.added_live = True
# After N samples have been taken out, the remaining volume is
# `e^(-N / nlive)`. The remaining points are distributed uniformly
# within the remaining volume so that the expected volume enclosed
# by the `i`-th worst likelihood is
# `e^(-N / nlive) * (nlive + 1 - i) / (nlive + 1)`.
logvols = self.saved_logvol[-1]
logvols += np.log(1. - (np.arange(self.nlive)+1.) / (self.nlive+1.))
logvols_pad = np.concatenate(([self.saved_logvol[-1]], logvols))
logdvols = logsumexp(a=np.c_[logvols_pad[:-1], logvols_pad[1:]],
axis=1, b=np.c_[np.ones(self.nlive),
-np.ones(self.nlive)])
logdvols += math.log(0.5)
# Defining change in `logvol` used in `logzvar` approximation.
dlvs = logvols_pad[:-1] - logvols_pad[1:]
# Sorting remaining live points.
lsort_idx = np.argsort(self.live_logl)
loglmax = max(self.live_logl)
# Grabbing relevant values from the last dead point.
logz = self.saved_logz[-1]
logzvar = self.saved_logzvar[-1]
h = self.saved_h[-1]
loglstar = self.saved_logl[-1]
if self._beyond_unit_bound(loglstar):
bounditer = self.nbound - 1
else:
bounditer = 0
# Add contributions from the remaining live points in order
# from the lowest to the highest log-likelihoods.
for i in range(self.nlive):
# Grab live point with `i`-th lowest log-likelihood along with
# ancillary quantities.
idx = lsort_idx[i]
logvol, logdvol, dlv = logvols[i], logdvols[i], dlvs[i]
ustar = np.array(self.live_u[idx])
vstar = np.array(self.live_v[idx])
loglstar_new = self.live_logl[idx]
boundidx = self.live_bound[idx]
point_it = self.live_it[idx]
# Compute relative contribution to results.
logwt = np.logaddexp(loglstar_new, loglstar) + logdvol # weight
logz_new = np.logaddexp(logz, logwt) # ln(evidence)
lzterm = (math.exp(loglstar - logz_new) * loglstar +
math.exp(loglstar_new - logz_new) * loglstar_new)
h_new = (math.exp(logdvol) * lzterm +
math.exp(logz - logz_new) * (h + logz) -
logz_new) # information
dh = h_new - h
h = h_new
logz = logz_new
logzvar += dh * dlv # var[ln(evidence)] estimate
loglstar = loglstar_new
logz_remain = loglmax + logvol # remaining ln(evidence)
delta_logz = np.logaddexp(logz, logz_remain) - logz # dlogz
# Save results.
if self.save_samples:
self.saved_id.append(idx)
self.saved_u.append(ustar)
self.saved_v.append(vstar)
self.saved_logl.append(loglstar)
self.saved_logvol.append(logvol)
self.saved_logwt.append(logwt)
self.saved_logz.append(logz)
self.saved_logzvar.append(logzvar)
self.saved_h.append(h)
self.saved_nc.append(1)
self.saved_boundidx.append(boundidx)
self.saved_it.append(point_it)
self.saved_bounditer.append(bounditer)
self.saved_scale.append(self.scale)
self.eff = 100. * (self.it + i) / self.ncall # efficiency
# Return our new "dead" point and ancillary quantities.
yield (idx, ustar, vstar, loglstar, logvol, logwt,
logz, logzvar, h, 1, point_it, boundidx, bounditer,
self.eff, delta_logz)
|
def function[add_live_points, parameter[self]]:
constant[Add the remaining set of live points to the current set of dead
points. Instantiates a generator that will be called by
the user. Returns the same outputs as :meth:`sample`.]
if name[self].added_live begin[:]
<ast.Raise object at 0x7da1b1ed71c0>
variable[logvols] assign[=] call[name[self].saved_logvol][<ast.UnaryOp object at 0x7da1b1ed4d90>]
<ast.AugAssign object at 0x7da1b1ed4bb0>
variable[logvols_pad] assign[=] call[name[np].concatenate, parameter[tuple[[<ast.List object at 0x7da1b1ed73a0>, <ast.Name object at 0x7da1b1ed69e0>]]]]
variable[logdvols] assign[=] call[name[logsumexp], parameter[]]
<ast.AugAssign object at 0x7da1b1ed49d0>
variable[dlvs] assign[=] binary_operation[call[name[logvols_pad]][<ast.Slice object at 0x7da1b1ed4f10>] - call[name[logvols_pad]][<ast.Slice object at 0x7da1b1ed4a90>]]
variable[lsort_idx] assign[=] call[name[np].argsort, parameter[name[self].live_logl]]
variable[loglmax] assign[=] call[name[max], parameter[name[self].live_logl]]
variable[logz] assign[=] call[name[self].saved_logz][<ast.UnaryOp object at 0x7da1b1ed4eb0>]
variable[logzvar] assign[=] call[name[self].saved_logzvar][<ast.UnaryOp object at 0x7da1b1ed6200>]
variable[h] assign[=] call[name[self].saved_h][<ast.UnaryOp object at 0x7da1b1ed5cf0>]
variable[loglstar] assign[=] call[name[self].saved_logl][<ast.UnaryOp object at 0x7da1b1ed5420>]
if call[name[self]._beyond_unit_bound, parameter[name[loglstar]]] begin[:]
variable[bounditer] assign[=] binary_operation[name[self].nbound - constant[1]]
for taget[name[i]] in starred[call[name[range], parameter[name[self].nlive]]] begin[:]
variable[idx] assign[=] call[name[lsort_idx]][name[i]]
<ast.Tuple object at 0x7da1b1ed5360> assign[=] tuple[[<ast.Subscript object at 0x7da1b1ed4c10>, <ast.Subscript object at 0x7da1b1ed4a30>, <ast.Subscript object at 0x7da1b1ed5c30>]]
variable[ustar] assign[=] call[name[np].array, parameter[call[name[self].live_u][name[idx]]]]
variable[vstar] assign[=] call[name[np].array, parameter[call[name[self].live_v][name[idx]]]]
variable[loglstar_new] assign[=] call[name[self].live_logl][name[idx]]
variable[boundidx] assign[=] call[name[self].live_bound][name[idx]]
variable[point_it] assign[=] call[name[self].live_it][name[idx]]
variable[logwt] assign[=] binary_operation[call[name[np].logaddexp, parameter[name[loglstar_new], name[loglstar]]] + name[logdvol]]
variable[logz_new] assign[=] call[name[np].logaddexp, parameter[name[logz], name[logwt]]]
variable[lzterm] assign[=] binary_operation[binary_operation[call[name[math].exp, parameter[binary_operation[name[loglstar] - name[logz_new]]]] * name[loglstar]] + binary_operation[call[name[math].exp, parameter[binary_operation[name[loglstar_new] - name[logz_new]]]] * name[loglstar_new]]]
variable[h_new] assign[=] binary_operation[binary_operation[binary_operation[call[name[math].exp, parameter[name[logdvol]]] * name[lzterm]] + binary_operation[call[name[math].exp, parameter[binary_operation[name[logz] - name[logz_new]]]] * binary_operation[name[h] + name[logz]]]] - name[logz_new]]
variable[dh] assign[=] binary_operation[name[h_new] - name[h]]
variable[h] assign[=] name[h_new]
variable[logz] assign[=] name[logz_new]
<ast.AugAssign object at 0x7da1b1e09d50>
variable[loglstar] assign[=] name[loglstar_new]
variable[logz_remain] assign[=] binary_operation[name[loglmax] + name[logvol]]
variable[delta_logz] assign[=] binary_operation[call[name[np].logaddexp, parameter[name[logz], name[logz_remain]]] - name[logz]]
if name[self].save_samples begin[:]
call[name[self].saved_id.append, parameter[name[idx]]]
call[name[self].saved_u.append, parameter[name[ustar]]]
call[name[self].saved_v.append, parameter[name[vstar]]]
call[name[self].saved_logl.append, parameter[name[loglstar]]]
call[name[self].saved_logvol.append, parameter[name[logvol]]]
call[name[self].saved_logwt.append, parameter[name[logwt]]]
call[name[self].saved_logz.append, parameter[name[logz]]]
call[name[self].saved_logzvar.append, parameter[name[logzvar]]]
call[name[self].saved_h.append, parameter[name[h]]]
call[name[self].saved_nc.append, parameter[constant[1]]]
call[name[self].saved_boundidx.append, parameter[name[boundidx]]]
call[name[self].saved_it.append, parameter[name[point_it]]]
call[name[self].saved_bounditer.append, parameter[name[bounditer]]]
call[name[self].saved_scale.append, parameter[name[self].scale]]
name[self].eff assign[=] binary_operation[binary_operation[constant[100.0] * binary_operation[name[self].it + name[i]]] / name[self].ncall]
<ast.Yield object at 0x7da1b1d52470>
|
keyword[def] identifier[add_live_points] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[added_live] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[else] :
identifier[self] . identifier[added_live] = keyword[True]
identifier[logvols] = identifier[self] . identifier[saved_logvol] [- literal[int] ]
identifier[logvols] += identifier[np] . identifier[log] ( literal[int] -( identifier[np] . identifier[arange] ( identifier[self] . identifier[nlive] )+ literal[int] )/( identifier[self] . identifier[nlive] + literal[int] ))
identifier[logvols_pad] = identifier[np] . identifier[concatenate] (([ identifier[self] . identifier[saved_logvol] [- literal[int] ]], identifier[logvols] ))
identifier[logdvols] = identifier[logsumexp] ( identifier[a] = identifier[np] . identifier[c_] [ identifier[logvols_pad] [:- literal[int] ], identifier[logvols_pad] [ literal[int] :]],
identifier[axis] = literal[int] , identifier[b] = identifier[np] . identifier[c_] [ identifier[np] . identifier[ones] ( identifier[self] . identifier[nlive] ),
- identifier[np] . identifier[ones] ( identifier[self] . identifier[nlive] )])
identifier[logdvols] += identifier[math] . identifier[log] ( literal[int] )
identifier[dlvs] = identifier[logvols_pad] [:- literal[int] ]- identifier[logvols_pad] [ literal[int] :]
identifier[lsort_idx] = identifier[np] . identifier[argsort] ( identifier[self] . identifier[live_logl] )
identifier[loglmax] = identifier[max] ( identifier[self] . identifier[live_logl] )
identifier[logz] = identifier[self] . identifier[saved_logz] [- literal[int] ]
identifier[logzvar] = identifier[self] . identifier[saved_logzvar] [- literal[int] ]
identifier[h] = identifier[self] . identifier[saved_h] [- literal[int] ]
identifier[loglstar] = identifier[self] . identifier[saved_logl] [- literal[int] ]
keyword[if] identifier[self] . identifier[_beyond_unit_bound] ( identifier[loglstar] ):
identifier[bounditer] = identifier[self] . identifier[nbound] - literal[int]
keyword[else] :
identifier[bounditer] = literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[nlive] ):
identifier[idx] = identifier[lsort_idx] [ identifier[i] ]
identifier[logvol] , identifier[logdvol] , identifier[dlv] = identifier[logvols] [ identifier[i] ], identifier[logdvols] [ identifier[i] ], identifier[dlvs] [ identifier[i] ]
identifier[ustar] = identifier[np] . identifier[array] ( identifier[self] . identifier[live_u] [ identifier[idx] ])
identifier[vstar] = identifier[np] . identifier[array] ( identifier[self] . identifier[live_v] [ identifier[idx] ])
identifier[loglstar_new] = identifier[self] . identifier[live_logl] [ identifier[idx] ]
identifier[boundidx] = identifier[self] . identifier[live_bound] [ identifier[idx] ]
identifier[point_it] = identifier[self] . identifier[live_it] [ identifier[idx] ]
identifier[logwt] = identifier[np] . identifier[logaddexp] ( identifier[loglstar_new] , identifier[loglstar] )+ identifier[logdvol]
identifier[logz_new] = identifier[np] . identifier[logaddexp] ( identifier[logz] , identifier[logwt] )
identifier[lzterm] =( identifier[math] . identifier[exp] ( identifier[loglstar] - identifier[logz_new] )* identifier[loglstar] +
identifier[math] . identifier[exp] ( identifier[loglstar_new] - identifier[logz_new] )* identifier[loglstar_new] )
identifier[h_new] =( identifier[math] . identifier[exp] ( identifier[logdvol] )* identifier[lzterm] +
identifier[math] . identifier[exp] ( identifier[logz] - identifier[logz_new] )*( identifier[h] + identifier[logz] )-
identifier[logz_new] )
identifier[dh] = identifier[h_new] - identifier[h]
identifier[h] = identifier[h_new]
identifier[logz] = identifier[logz_new]
identifier[logzvar] += identifier[dh] * identifier[dlv]
identifier[loglstar] = identifier[loglstar_new]
identifier[logz_remain] = identifier[loglmax] + identifier[logvol]
identifier[delta_logz] = identifier[np] . identifier[logaddexp] ( identifier[logz] , identifier[logz_remain] )- identifier[logz]
keyword[if] identifier[self] . identifier[save_samples] :
identifier[self] . identifier[saved_id] . identifier[append] ( identifier[idx] )
identifier[self] . identifier[saved_u] . identifier[append] ( identifier[ustar] )
identifier[self] . identifier[saved_v] . identifier[append] ( identifier[vstar] )
identifier[self] . identifier[saved_logl] . identifier[append] ( identifier[loglstar] )
identifier[self] . identifier[saved_logvol] . identifier[append] ( identifier[logvol] )
identifier[self] . identifier[saved_logwt] . identifier[append] ( identifier[logwt] )
identifier[self] . identifier[saved_logz] . identifier[append] ( identifier[logz] )
identifier[self] . identifier[saved_logzvar] . identifier[append] ( identifier[logzvar] )
identifier[self] . identifier[saved_h] . identifier[append] ( identifier[h] )
identifier[self] . identifier[saved_nc] . identifier[append] ( literal[int] )
identifier[self] . identifier[saved_boundidx] . identifier[append] ( identifier[boundidx] )
identifier[self] . identifier[saved_it] . identifier[append] ( identifier[point_it] )
identifier[self] . identifier[saved_bounditer] . identifier[append] ( identifier[bounditer] )
identifier[self] . identifier[saved_scale] . identifier[append] ( identifier[self] . identifier[scale] )
identifier[self] . identifier[eff] = literal[int] *( identifier[self] . identifier[it] + identifier[i] )/ identifier[self] . identifier[ncall]
keyword[yield] ( identifier[idx] , identifier[ustar] , identifier[vstar] , identifier[loglstar] , identifier[logvol] , identifier[logwt] ,
identifier[logz] , identifier[logzvar] , identifier[h] , literal[int] , identifier[point_it] , identifier[boundidx] , identifier[bounditer] ,
identifier[self] . identifier[eff] , identifier[delta_logz] )
|
def add_live_points(self):
"""Add the remaining set of live points to the current set of dead
points. Instantiates a generator that will be called by
the user. Returns the same outputs as :meth:`sample`."""
# Check if the remaining live points have already been added
# to the output set of samples.
if self.added_live:
raise ValueError('The remaining live points have already been added to the list of samples!') # depends on [control=['if'], data=[]]
else:
self.added_live = True
# After N samples have been taken out, the remaining volume is
# `e^(-N / nlive)`. The remaining points are distributed uniformly
# within the remaining volume so that the expected volume enclosed
# by the `i`-th worst likelihood is
# `e^(-N / nlive) * (nlive + 1 - i) / (nlive + 1)`.
logvols = self.saved_logvol[-1]
logvols += np.log(1.0 - (np.arange(self.nlive) + 1.0) / (self.nlive + 1.0))
logvols_pad = np.concatenate(([self.saved_logvol[-1]], logvols))
logdvols = logsumexp(a=np.c_[logvols_pad[:-1], logvols_pad[1:]], axis=1, b=np.c_[np.ones(self.nlive), -np.ones(self.nlive)])
logdvols += math.log(0.5)
# Defining change in `logvol` used in `logzvar` approximation.
dlvs = logvols_pad[:-1] - logvols_pad[1:]
# Sorting remaining live points.
lsort_idx = np.argsort(self.live_logl)
loglmax = max(self.live_logl)
# Grabbing relevant values from the last dead point.
logz = self.saved_logz[-1]
logzvar = self.saved_logzvar[-1]
h = self.saved_h[-1]
loglstar = self.saved_logl[-1]
if self._beyond_unit_bound(loglstar):
bounditer = self.nbound - 1 # depends on [control=['if'], data=[]]
else:
bounditer = 0
# Add contributions from the remaining live points in order
# from the lowest to the highest log-likelihoods.
for i in range(self.nlive):
# Grab live point with `i`-th lowest log-likelihood along with
# ancillary quantities.
idx = lsort_idx[i]
(logvol, logdvol, dlv) = (logvols[i], logdvols[i], dlvs[i])
ustar = np.array(self.live_u[idx])
vstar = np.array(self.live_v[idx])
loglstar_new = self.live_logl[idx]
boundidx = self.live_bound[idx]
point_it = self.live_it[idx]
# Compute relative contribution to results.
logwt = np.logaddexp(loglstar_new, loglstar) + logdvol # weight
logz_new = np.logaddexp(logz, logwt) # ln(evidence)
lzterm = math.exp(loglstar - logz_new) * loglstar + math.exp(loglstar_new - logz_new) * loglstar_new
h_new = math.exp(logdvol) * lzterm + math.exp(logz - logz_new) * (h + logz) - logz_new # information
dh = h_new - h
h = h_new
logz = logz_new
logzvar += dh * dlv # var[ln(evidence)] estimate
loglstar = loglstar_new
logz_remain = loglmax + logvol # remaining ln(evidence)
delta_logz = np.logaddexp(logz, logz_remain) - logz # dlogz
# Save results.
if self.save_samples:
self.saved_id.append(idx)
self.saved_u.append(ustar)
self.saved_v.append(vstar)
self.saved_logl.append(loglstar)
self.saved_logvol.append(logvol)
self.saved_logwt.append(logwt)
self.saved_logz.append(logz)
self.saved_logzvar.append(logzvar)
self.saved_h.append(h)
self.saved_nc.append(1)
self.saved_boundidx.append(boundidx)
self.saved_it.append(point_it)
self.saved_bounditer.append(bounditer)
self.saved_scale.append(self.scale) # depends on [control=['if'], data=[]]
self.eff = 100.0 * (self.it + i) / self.ncall # efficiency
# Return our new "dead" point and ancillary quantities.
yield (idx, ustar, vstar, loglstar, logvol, logwt, logz, logzvar, h, 1, point_it, boundidx, bounditer, self.eff, delta_logz) # depends on [control=['for'], data=['i']]
|
def eval_js(self, expr):
"""Evaluate a Javascript expression."""
if not self.is_built():
self._pending_js_eval.append(expr)
return
logger.log(5, "Evaluate Javascript: `%s`.", expr)
out = self.page().mainFrame().evaluateJavaScript(expr)
return _to_py(out)
|
def function[eval_js, parameter[self, expr]]:
constant[Evaluate a Javascript expression.]
if <ast.UnaryOp object at 0x7da1b120a410> begin[:]
call[name[self]._pending_js_eval.append, parameter[name[expr]]]
return[None]
call[name[logger].log, parameter[constant[5], constant[Evaluate Javascript: `%s`.], name[expr]]]
variable[out] assign[=] call[call[call[name[self].page, parameter[]].mainFrame, parameter[]].evaluateJavaScript, parameter[name[expr]]]
return[call[name[_to_py], parameter[name[out]]]]
|
keyword[def] identifier[eval_js] ( identifier[self] , identifier[expr] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[is_built] ():
identifier[self] . identifier[_pending_js_eval] . identifier[append] ( identifier[expr] )
keyword[return]
identifier[logger] . identifier[log] ( literal[int] , literal[string] , identifier[expr] )
identifier[out] = identifier[self] . identifier[page] (). identifier[mainFrame] (). identifier[evaluateJavaScript] ( identifier[expr] )
keyword[return] identifier[_to_py] ( identifier[out] )
|
def eval_js(self, expr):
"""Evaluate a Javascript expression."""
if not self.is_built():
self._pending_js_eval.append(expr)
return # depends on [control=['if'], data=[]]
logger.log(5, 'Evaluate Javascript: `%s`.', expr)
out = self.page().mainFrame().evaluateJavaScript(expr)
return _to_py(out)
|
def applet_list_projects(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /applet-xxxx/listProjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Cloning#API-method%3A-%2Fclass-xxxx%2FlistProjects
"""
return DXHTTPRequest('/%s/listProjects' % object_id, input_params, always_retry=always_retry, **kwargs)
|
def function[applet_list_projects, parameter[object_id, input_params, always_retry]]:
constant[
Invokes the /applet-xxxx/listProjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Cloning#API-method%3A-%2Fclass-xxxx%2FlistProjects
]
return[call[name[DXHTTPRequest], parameter[binary_operation[constant[/%s/listProjects] <ast.Mod object at 0x7da2590d6920> name[object_id]], name[input_params]]]]
|
keyword[def] identifier[applet_list_projects] ( identifier[object_id] , identifier[input_params] ={}, identifier[always_retry] = keyword[True] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[DXHTTPRequest] ( literal[string] % identifier[object_id] , identifier[input_params] , identifier[always_retry] = identifier[always_retry] ,** identifier[kwargs] )
|
def applet_list_projects(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /applet-xxxx/listProjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Cloning#API-method%3A-%2Fclass-xxxx%2FlistProjects
"""
return DXHTTPRequest('/%s/listProjects' % object_id, input_params, always_retry=always_retry, **kwargs)
|
def overhang(self, tile):
"""
Get the left and right absolute overflow -- the amount of box
overhanging `tile`, can be viewed as self \\ tile (set theory relative
complement, but in a bounding sense)
"""
ll = np.abs(amin(self.l - tile.l, aN(0, dim=self.dim)))
rr = np.abs(amax(self.r - tile.r, aN(0, dim=self.dim)))
return ll, rr
|
def function[overhang, parameter[self, tile]]:
constant[
Get the left and right absolute overflow -- the amount of box
overhanging `tile`, can be viewed as self \ tile (set theory relative
complement, but in a bounding sense)
]
variable[ll] assign[=] call[name[np].abs, parameter[call[name[amin], parameter[binary_operation[name[self].l - name[tile].l], call[name[aN], parameter[constant[0]]]]]]]
variable[rr] assign[=] call[name[np].abs, parameter[call[name[amax], parameter[binary_operation[name[self].r - name[tile].r], call[name[aN], parameter[constant[0]]]]]]]
return[tuple[[<ast.Name object at 0x7da18f09c4c0>, <ast.Name object at 0x7da18f09e350>]]]
|
keyword[def] identifier[overhang] ( identifier[self] , identifier[tile] ):
literal[string]
identifier[ll] = identifier[np] . identifier[abs] ( identifier[amin] ( identifier[self] . identifier[l] - identifier[tile] . identifier[l] , identifier[aN] ( literal[int] , identifier[dim] = identifier[self] . identifier[dim] )))
identifier[rr] = identifier[np] . identifier[abs] ( identifier[amax] ( identifier[self] . identifier[r] - identifier[tile] . identifier[r] , identifier[aN] ( literal[int] , identifier[dim] = identifier[self] . identifier[dim] )))
keyword[return] identifier[ll] , identifier[rr]
|
def overhang(self, tile):
"""
Get the left and right absolute overflow -- the amount of box
overhanging `tile`, can be viewed as self \\ tile (set theory relative
complement, but in a bounding sense)
"""
ll = np.abs(amin(self.l - tile.l, aN(0, dim=self.dim)))
rr = np.abs(amax(self.r - tile.r, aN(0, dim=self.dim)))
return (ll, rr)
|
def _mkpda(self, nonterms, productions, productions_struct, terminals, splitstring=1):
"""
This function generates a PDA from a CNF grammar as described in:
- http://www.oit.edu/faculty/sherry.yang/CST229/Lectures/7_pda.pdf
- http://www.eng.utah.edu/~cs3100/lectures/l18/pda-notes.pdf
If all of the grammar productions are in the Chomsky Normal Form,
then follow the template for constructing a pushdown symautomata:
1. Start
2. Push S
3. Pop
4. Case:
Nonterminal A: For every production rule of this form: A: BC, Push C and then Push B
Args:
nonterms (list): Non terminals list
productions (dict): productions in the CNF form:
A -> a or A -> b0b1, or S -> e
productions_struct (dict): productions in the CNF form in structure form
object.a for A -> a,
object.b0 and object.b1 for A -> b0b1
and object.type where type is
1 for A-->a and 2 for A-->b0b1
terminals (list): All terminals
splitstring (bool): If enabled an extra space is added after each symbol.
Returns:
PDA: The generated PDA
"""
pda = PDA(self.alphabet)
pda.nonterminals = nonterms
pda.terminals = terminals
pda.s[pda.n] = PDAState()
pda.s[pda.n].id = pda.n
pda.s[pda.n].sym = '@closing'
pda.s[pda.n].type = 1
pda.s[pda.n].trans[1] = [0]
pda.n = pda.n + 1
pda.s[pda.n] = PDAState()
pda.s[pda.n].id = pda.n
pda.s[pda.n].type = 1
pda.s[pda.n].sym = nonterms[0]
pda.s[pda.n].trans[2] = [0]
pda.n = pda.n + 1
pda.s[pda.n] = PDAState()
pda.s[pda.n].id = pda.n
pda.s[pda.n].type = 2
pda.s[pda.n].trans[0] = ['@closing']
counter = 0
i = 0
while i < len(nonterms):
j = 0
while j < len(productions[nonterms[i]]):
if productions_struct[counter].type == 1:
# ADD AND CONNECT STATE
pda.n = pda.n + 1
pda.s[pda.n] = PDAState()
pda.s[pda.n].id = pda.n
if pda.n not in pda.s[2].trans:
pda.s[2].trans[pda.n] = []
pda.s[2].trans[pda.n].append(nonterms[i])
if splitstring == 0:
# FILL NEW STATE READ
pda.s[pda.n].type = 3
pda.s[pda.n].trans[2] = [productions_struct[counter].a]
else:
# THE FOLLOWIN SWITCH IS DUE TO THE REQUIREMENT OF
# HAVING STRINGS SPLITTED TO SYMBOLS AND CAN INTERSECT
# WITH DFA
if productions_struct[counter].a not in terminals or \
len(productions_struct[counter].a) == 1:
# FILL NEW STATE READ
pda.s[pda.n].type = 3
pda.s[pda.n].trans[pda.n + 1] = [productions_struct[counter].a.lower()]
pda.n = pda.n + 1
pda.s[pda.n] = PDAState()
pda.s[pda.n].id = pda.n
pda.s[pda.n].type = 3
pda.s[pda.n].trans[2] = [' ']
else:
pda.s[pda.n].type = 3
pda.s[pda.n].trans[pda.n + 1] = \
[productions_struct[counter].a[0].lower()]
k = 1
while k < len(productions_struct[counter].a) - 1:
pda.n = pda.n + 1
pda.s[pda.n] = PDAState()
pda.s[pda.n].id = pda.n
pda.s[pda.n].type = 3
pda.s[pda.n].trans[pda.n +1] = \
[productions_struct[counter].a[k].lower()]
k = k + 1
pda.n = pda.n + 1
pda.s[pda.n] = PDAState()
pda.s[pda.n].id = pda.n
pda.s[pda.n].type = 3
pda.s[pda.n].trans[pda.n + 1] = \
[productions_struct[counter].a[-1].lower()]
pda.n = pda.n + 1
pda.s[pda.n] = PDAState()
pda.s[pda.n].id = pda.n
pda.s[pda.n].type = 3
pda.s[pda.n].trans[2] = [' ']
else:
# ADD AND CONNECT PUSH STATE
pda.n = pda.n + 1
pda.s[pda.n] = PDAState()
pda.s[pda.n].id = pda.n
if pda.n not in pda.s[2].trans:
pda.s[2].trans[pda.n] = []
pda.s[2].trans[pda.n].append(nonterms[i])
# FILL NEW STATE
pda.s[pda.n].type = 1
pda.s[pda.n].sym = productions_struct[counter].b1
pda.s[pda.n].trans[(pda.n) + 1] = [0]
# ADD AND CONNECT PUSH STATE (ALREADY CONNECTED)
pda.n = pda.n + 1
pda.s[pda.n] = PDAState()
pda.s[pda.n].id = pda.n
# FILL NEW STATE
pda.s[pda.n].type = 1
pda.s[pda.n].sym = productions_struct[counter].b0
pda.s[pda.n].trans[2] = [0]
j = j + 1
counter = counter + 1
i = i + 1
return pda
|
def function[_mkpda, parameter[self, nonterms, productions, productions_struct, terminals, splitstring]]:
constant[
This function generates a PDA from a CNF grammar as described in:
- http://www.oit.edu/faculty/sherry.yang/CST229/Lectures/7_pda.pdf
- http://www.eng.utah.edu/~cs3100/lectures/l18/pda-notes.pdf
If all of the grammar productions are in the Chomsky Normal Form,
then follow the template for constructing a pushdown symautomata:
1. Start
2. Push S
3. Pop
4. Case:
Nonterminal A: For every production rule of this form: A: BC, Push C and then Push B
Args:
nonterms (list): Non terminals list
productions (dict): productions in the CNF form:
A -> a or A -> b0b1, or S -> e
productions_struct (dict): productions in the CNF form in structure form
object.a for A -> a,
object.b0 and object.b1 for A -> b0b1
and object.type where type is
1 for A-->a and 2 for A-->b0b1
terminals (list): All terminals
splitstring (bool): If enabled an extra space is added after each symbol.
Returns:
PDA: The generated PDA
]
variable[pda] assign[=] call[name[PDA], parameter[name[self].alphabet]]
name[pda].nonterminals assign[=] name[nonterms]
name[pda].terminals assign[=] name[terminals]
call[name[pda].s][name[pda].n] assign[=] call[name[PDAState], parameter[]]
call[name[pda].s][name[pda].n].id assign[=] name[pda].n
call[name[pda].s][name[pda].n].sym assign[=] constant[@closing]
call[name[pda].s][name[pda].n].type assign[=] constant[1]
call[call[name[pda].s][name[pda].n].trans][constant[1]] assign[=] list[[<ast.Constant object at 0x7da212d41a80>]]
name[pda].n assign[=] binary_operation[name[pda].n + constant[1]]
call[name[pda].s][name[pda].n] assign[=] call[name[PDAState], parameter[]]
call[name[pda].s][name[pda].n].id assign[=] name[pda].n
call[name[pda].s][name[pda].n].type assign[=] constant[1]
call[name[pda].s][name[pda].n].sym assign[=] call[name[nonterms]][constant[0]]
call[call[name[pda].s][name[pda].n].trans][constant[2]] assign[=] list[[<ast.Constant object at 0x7da20c6e72b0>]]
name[pda].n assign[=] binary_operation[name[pda].n + constant[1]]
call[name[pda].s][name[pda].n] assign[=] call[name[PDAState], parameter[]]
call[name[pda].s][name[pda].n].id assign[=] name[pda].n
call[name[pda].s][name[pda].n].type assign[=] constant[2]
call[call[name[pda].s][name[pda].n].trans][constant[0]] assign[=] list[[<ast.Constant object at 0x7da20c6e7d60>]]
variable[counter] assign[=] constant[0]
variable[i] assign[=] constant[0]
while compare[name[i] less[<] call[name[len], parameter[name[nonterms]]]] begin[:]
variable[j] assign[=] constant[0]
while compare[name[j] less[<] call[name[len], parameter[call[name[productions]][call[name[nonterms]][name[i]]]]]] begin[:]
if compare[call[name[productions_struct]][name[counter]].type equal[==] constant[1]] begin[:]
name[pda].n assign[=] binary_operation[name[pda].n + constant[1]]
call[name[pda].s][name[pda].n] assign[=] call[name[PDAState], parameter[]]
call[name[pda].s][name[pda].n].id assign[=] name[pda].n
if compare[name[pda].n <ast.NotIn object at 0x7da2590d7190> call[name[pda].s][constant[2]].trans] begin[:]
call[call[name[pda].s][constant[2]].trans][name[pda].n] assign[=] list[[]]
call[call[call[name[pda].s][constant[2]].trans][name[pda].n].append, parameter[call[name[nonterms]][name[i]]]]
if compare[name[splitstring] equal[==] constant[0]] begin[:]
call[name[pda].s][name[pda].n].type assign[=] constant[3]
call[call[name[pda].s][name[pda].n].trans][constant[2]] assign[=] list[[<ast.Attribute object at 0x7da204567ee0>]]
variable[j] assign[=] binary_operation[name[j] + constant[1]]
variable[counter] assign[=] binary_operation[name[counter] + constant[1]]
variable[i] assign[=] binary_operation[name[i] + constant[1]]
return[name[pda]]
|
keyword[def] identifier[_mkpda] ( identifier[self] , identifier[nonterms] , identifier[productions] , identifier[productions_struct] , identifier[terminals] , identifier[splitstring] = literal[int] ):
literal[string]
identifier[pda] = identifier[PDA] ( identifier[self] . identifier[alphabet] )
identifier[pda] . identifier[nonterminals] = identifier[nonterms]
identifier[pda] . identifier[terminals] = identifier[terminals]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]= identifier[PDAState] ()
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[id] = identifier[pda] . identifier[n]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[sym] = literal[string]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[type] = literal[int]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[trans] [ literal[int] ]=[ literal[int] ]
identifier[pda] . identifier[n] = identifier[pda] . identifier[n] + literal[int]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]= identifier[PDAState] ()
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[id] = identifier[pda] . identifier[n]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[type] = literal[int]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[sym] = identifier[nonterms] [ literal[int] ]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[trans] [ literal[int] ]=[ literal[int] ]
identifier[pda] . identifier[n] = identifier[pda] . identifier[n] + literal[int]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]= identifier[PDAState] ()
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[id] = identifier[pda] . identifier[n]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[type] = literal[int]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[trans] [ literal[int] ]=[ literal[string] ]
identifier[counter] = literal[int]
identifier[i] = literal[int]
keyword[while] identifier[i] < identifier[len] ( identifier[nonterms] ):
identifier[j] = literal[int]
keyword[while] identifier[j] < identifier[len] ( identifier[productions] [ identifier[nonterms] [ identifier[i] ]]):
keyword[if] identifier[productions_struct] [ identifier[counter] ]. identifier[type] == literal[int] :
identifier[pda] . identifier[n] = identifier[pda] . identifier[n] + literal[int]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]= identifier[PDAState] ()
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[id] = identifier[pda] . identifier[n]
keyword[if] identifier[pda] . identifier[n] keyword[not] keyword[in] identifier[pda] . identifier[s] [ literal[int] ]. identifier[trans] :
identifier[pda] . identifier[s] [ literal[int] ]. identifier[trans] [ identifier[pda] . identifier[n] ]=[]
identifier[pda] . identifier[s] [ literal[int] ]. identifier[trans] [ identifier[pda] . identifier[n] ]. identifier[append] ( identifier[nonterms] [ identifier[i] ])
keyword[if] identifier[splitstring] == literal[int] :
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[type] = literal[int]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[trans] [ literal[int] ]=[ identifier[productions_struct] [ identifier[counter] ]. identifier[a] ]
keyword[else] :
keyword[if] identifier[productions_struct] [ identifier[counter] ]. identifier[a] keyword[not] keyword[in] identifier[terminals] keyword[or] identifier[len] ( identifier[productions_struct] [ identifier[counter] ]. identifier[a] )== literal[int] :
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[type] = literal[int]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[trans] [ identifier[pda] . identifier[n] + literal[int] ]=[ identifier[productions_struct] [ identifier[counter] ]. identifier[a] . identifier[lower] ()]
identifier[pda] . identifier[n] = identifier[pda] . identifier[n] + literal[int]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]= identifier[PDAState] ()
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[id] = identifier[pda] . identifier[n]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[type] = literal[int]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[trans] [ literal[int] ]=[ literal[string] ]
keyword[else] :
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[type] = literal[int]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[trans] [ identifier[pda] . identifier[n] + literal[int] ]=[ identifier[productions_struct] [ identifier[counter] ]. identifier[a] [ literal[int] ]. identifier[lower] ()]
identifier[k] = literal[int]
keyword[while] identifier[k] < identifier[len] ( identifier[productions_struct] [ identifier[counter] ]. identifier[a] )- literal[int] :
identifier[pda] . identifier[n] = identifier[pda] . identifier[n] + literal[int]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]= identifier[PDAState] ()
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[id] = identifier[pda] . identifier[n]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[type] = literal[int]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[trans] [ identifier[pda] . identifier[n] + literal[int] ]=[ identifier[productions_struct] [ identifier[counter] ]. identifier[a] [ identifier[k] ]. identifier[lower] ()]
identifier[k] = identifier[k] + literal[int]
identifier[pda] . identifier[n] = identifier[pda] . identifier[n] + literal[int]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]= identifier[PDAState] ()
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[id] = identifier[pda] . identifier[n]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[type] = literal[int]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[trans] [ identifier[pda] . identifier[n] + literal[int] ]=[ identifier[productions_struct] [ identifier[counter] ]. identifier[a] [- literal[int] ]. identifier[lower] ()]
identifier[pda] . identifier[n] = identifier[pda] . identifier[n] + literal[int]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]= identifier[PDAState] ()
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[id] = identifier[pda] . identifier[n]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[type] = literal[int]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[trans] [ literal[int] ]=[ literal[string] ]
keyword[else] :
identifier[pda] . identifier[n] = identifier[pda] . identifier[n] + literal[int]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]= identifier[PDAState] ()
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[id] = identifier[pda] . identifier[n]
keyword[if] identifier[pda] . identifier[n] keyword[not] keyword[in] identifier[pda] . identifier[s] [ literal[int] ]. identifier[trans] :
identifier[pda] . identifier[s] [ literal[int] ]. identifier[trans] [ identifier[pda] . identifier[n] ]=[]
identifier[pda] . identifier[s] [ literal[int] ]. identifier[trans] [ identifier[pda] . identifier[n] ]. identifier[append] ( identifier[nonterms] [ identifier[i] ])
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[type] = literal[int]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[sym] = identifier[productions_struct] [ identifier[counter] ]. identifier[b1]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[trans] [( identifier[pda] . identifier[n] )+ literal[int] ]=[ literal[int] ]
identifier[pda] . identifier[n] = identifier[pda] . identifier[n] + literal[int]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]= identifier[PDAState] ()
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[id] = identifier[pda] . identifier[n]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[type] = literal[int]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[sym] = identifier[productions_struct] [ identifier[counter] ]. identifier[b0]
identifier[pda] . identifier[s] [ identifier[pda] . identifier[n] ]. identifier[trans] [ literal[int] ]=[ literal[int] ]
identifier[j] = identifier[j] + literal[int]
identifier[counter] = identifier[counter] + literal[int]
identifier[i] = identifier[i] + literal[int]
keyword[return] identifier[pda]
|
def _mkpda(self, nonterms, productions, productions_struct, terminals, splitstring=1):
"""
This function generates a PDA from a CNF grammar as described in:
- http://www.oit.edu/faculty/sherry.yang/CST229/Lectures/7_pda.pdf
- http://www.eng.utah.edu/~cs3100/lectures/l18/pda-notes.pdf
If all of the grammar productions are in the Chomsky Normal Form,
then follow the template for constructing a pushdown symautomata:
1. Start
2. Push S
3. Pop
4. Case:
Nonterminal A: For every production rule of this form: A: BC, Push C and then Push B
Args:
nonterms (list): Non terminals list
productions (dict): productions in the CNF form:
A -> a or A -> b0b1, or S -> e
productions_struct (dict): productions in the CNF form in structure form
object.a for A -> a,
object.b0 and object.b1 for A -> b0b1
and object.type where type is
1 for A-->a and 2 for A-->b0b1
terminals (list): All terminals
splitstring (bool): If enabled an extra space is added after each symbol.
Returns:
PDA: The generated PDA
"""
pda = PDA(self.alphabet)
pda.nonterminals = nonterms
pda.terminals = terminals
pda.s[pda.n] = PDAState()
pda.s[pda.n].id = pda.n
pda.s[pda.n].sym = '@closing'
pda.s[pda.n].type = 1
pda.s[pda.n].trans[1] = [0]
pda.n = pda.n + 1
pda.s[pda.n] = PDAState()
pda.s[pda.n].id = pda.n
pda.s[pda.n].type = 1
pda.s[pda.n].sym = nonterms[0]
pda.s[pda.n].trans[2] = [0]
pda.n = pda.n + 1
pda.s[pda.n] = PDAState()
pda.s[pda.n].id = pda.n
pda.s[pda.n].type = 2
pda.s[pda.n].trans[0] = ['@closing']
counter = 0
i = 0
while i < len(nonterms):
j = 0
while j < len(productions[nonterms[i]]):
if productions_struct[counter].type == 1:
# ADD AND CONNECT STATE
pda.n = pda.n + 1
pda.s[pda.n] = PDAState()
pda.s[pda.n].id = pda.n
if pda.n not in pda.s[2].trans:
pda.s[2].trans[pda.n] = [] # depends on [control=['if'], data=[]]
pda.s[2].trans[pda.n].append(nonterms[i])
if splitstring == 0:
# FILL NEW STATE READ
pda.s[pda.n].type = 3
pda.s[pda.n].trans[2] = [productions_struct[counter].a] # depends on [control=['if'], data=[]]
# THE FOLLOWIN SWITCH IS DUE TO THE REQUIREMENT OF
# HAVING STRINGS SPLITTED TO SYMBOLS AND CAN INTERSECT
# WITH DFA
elif productions_struct[counter].a not in terminals or len(productions_struct[counter].a) == 1:
# FILL NEW STATE READ
pda.s[pda.n].type = 3
pda.s[pda.n].trans[pda.n + 1] = [productions_struct[counter].a.lower()]
pda.n = pda.n + 1
pda.s[pda.n] = PDAState()
pda.s[pda.n].id = pda.n
pda.s[pda.n].type = 3
pda.s[pda.n].trans[2] = [' '] # depends on [control=['if'], data=[]]
else:
pda.s[pda.n].type = 3
pda.s[pda.n].trans[pda.n + 1] = [productions_struct[counter].a[0].lower()]
k = 1
while k < len(productions_struct[counter].a) - 1:
pda.n = pda.n + 1
pda.s[pda.n] = PDAState()
pda.s[pda.n].id = pda.n
pda.s[pda.n].type = 3
pda.s[pda.n].trans[pda.n + 1] = [productions_struct[counter].a[k].lower()]
k = k + 1 # depends on [control=['while'], data=['k']]
pda.n = pda.n + 1
pda.s[pda.n] = PDAState()
pda.s[pda.n].id = pda.n
pda.s[pda.n].type = 3
pda.s[pda.n].trans[pda.n + 1] = [productions_struct[counter].a[-1].lower()]
pda.n = pda.n + 1
pda.s[pda.n] = PDAState()
pda.s[pda.n].id = pda.n
pda.s[pda.n].type = 3
pda.s[pda.n].trans[2] = [' '] # depends on [control=['if'], data=[]]
else:
# ADD AND CONNECT PUSH STATE
pda.n = pda.n + 1
pda.s[pda.n] = PDAState()
pda.s[pda.n].id = pda.n
if pda.n not in pda.s[2].trans:
pda.s[2].trans[pda.n] = [] # depends on [control=['if'], data=[]]
pda.s[2].trans[pda.n].append(nonterms[i])
# FILL NEW STATE
pda.s[pda.n].type = 1
pda.s[pda.n].sym = productions_struct[counter].b1
pda.s[pda.n].trans[pda.n + 1] = [0]
# ADD AND CONNECT PUSH STATE (ALREADY CONNECTED)
pda.n = pda.n + 1
pda.s[pda.n] = PDAState()
pda.s[pda.n].id = pda.n
# FILL NEW STATE
pda.s[pda.n].type = 1
pda.s[pda.n].sym = productions_struct[counter].b0
pda.s[pda.n].trans[2] = [0]
j = j + 1
counter = counter + 1 # depends on [control=['while'], data=['j']]
i = i + 1 # depends on [control=['while'], data=['i']]
return pda
|
def insult(rest):
"Generate a random insult from datahamster"
# not supplying any style will automatically redirect to a random
url = 'http://autoinsult.datahamster.com/'
ins_type = random.randrange(4)
ins_url = url + "?style={ins_type}".format(**locals())
insre = re.compile('<div class="insult" id="insult">(.*?)</div>')
resp = requests.get(ins_url)
resp.raise_for_status()
insult = insre.search(resp.text).group(1)
if not insult:
return
if rest:
insultee = rest.strip()
karma.Karma.store.change(insultee, -1)
if ins_type in (0, 2):
cinsre = re.compile(r'\b(your)\b', re.IGNORECASE)
insult = cinsre.sub("%s's" % insultee, insult)
elif ins_type in (1, 3):
cinsre = re.compile(r'^([TY])')
insult = cinsre.sub(
lambda m: "%s, %s" % (
insultee, m.group(1).lower()), insult)
return insult
|
def function[insult, parameter[rest]]:
constant[Generate a random insult from datahamster]
variable[url] assign[=] constant[http://autoinsult.datahamster.com/]
variable[ins_type] assign[=] call[name[random].randrange, parameter[constant[4]]]
variable[ins_url] assign[=] binary_operation[name[url] + call[constant[?style={ins_type}].format, parameter[]]]
variable[insre] assign[=] call[name[re].compile, parameter[constant[<div class="insult" id="insult">(.*?)</div>]]]
variable[resp] assign[=] call[name[requests].get, parameter[name[ins_url]]]
call[name[resp].raise_for_status, parameter[]]
variable[insult] assign[=] call[call[name[insre].search, parameter[name[resp].text]].group, parameter[constant[1]]]
if <ast.UnaryOp object at 0x7da1b03ada50> begin[:]
return[None]
if name[rest] begin[:]
variable[insultee] assign[=] call[name[rest].strip, parameter[]]
call[name[karma].Karma.store.change, parameter[name[insultee], <ast.UnaryOp object at 0x7da1b03aef80>]]
if compare[name[ins_type] in tuple[[<ast.Constant object at 0x7da1b039b6d0>, <ast.Constant object at 0x7da1b03992a0>]]] begin[:]
variable[cinsre] assign[=] call[name[re].compile, parameter[constant[\b(your)\b], name[re].IGNORECASE]]
variable[insult] assign[=] call[name[cinsre].sub, parameter[binary_operation[constant[%s's] <ast.Mod object at 0x7da2590d6920> name[insultee]], name[insult]]]
return[name[insult]]
|
keyword[def] identifier[insult] ( identifier[rest] ):
literal[string]
identifier[url] = literal[string]
identifier[ins_type] = identifier[random] . identifier[randrange] ( literal[int] )
identifier[ins_url] = identifier[url] + literal[string] . identifier[format] (** identifier[locals] ())
identifier[insre] = identifier[re] . identifier[compile] ( literal[string] )
identifier[resp] = identifier[requests] . identifier[get] ( identifier[ins_url] )
identifier[resp] . identifier[raise_for_status] ()
identifier[insult] = identifier[insre] . identifier[search] ( identifier[resp] . identifier[text] ). identifier[group] ( literal[int] )
keyword[if] keyword[not] identifier[insult] :
keyword[return]
keyword[if] identifier[rest] :
identifier[insultee] = identifier[rest] . identifier[strip] ()
identifier[karma] . identifier[Karma] . identifier[store] . identifier[change] ( identifier[insultee] ,- literal[int] )
keyword[if] identifier[ins_type] keyword[in] ( literal[int] , literal[int] ):
identifier[cinsre] = identifier[re] . identifier[compile] ( literal[string] , identifier[re] . identifier[IGNORECASE] )
identifier[insult] = identifier[cinsre] . identifier[sub] ( literal[string] % identifier[insultee] , identifier[insult] )
keyword[elif] identifier[ins_type] keyword[in] ( literal[int] , literal[int] ):
identifier[cinsre] = identifier[re] . identifier[compile] ( literal[string] )
identifier[insult] = identifier[cinsre] . identifier[sub] (
keyword[lambda] identifier[m] : literal[string] %(
identifier[insultee] , identifier[m] . identifier[group] ( literal[int] ). identifier[lower] ()), identifier[insult] )
keyword[return] identifier[insult]
|
def insult(rest):
"""Generate a random insult from datahamster""" # not supplying any style will automatically redirect to a random
url = 'http://autoinsult.datahamster.com/'
ins_type = random.randrange(4)
ins_url = url + '?style={ins_type}'.format(**locals())
insre = re.compile('<div class="insult" id="insult">(.*?)</div>')
resp = requests.get(ins_url)
resp.raise_for_status()
insult = insre.search(resp.text).group(1)
if not insult:
return # depends on [control=['if'], data=[]]
if rest:
insultee = rest.strip()
karma.Karma.store.change(insultee, -1)
if ins_type in (0, 2):
cinsre = re.compile('\\b(your)\\b', re.IGNORECASE)
insult = cinsre.sub("%s's" % insultee, insult) # depends on [control=['if'], data=[]]
elif ins_type in (1, 3):
cinsre = re.compile('^([TY])')
insult = cinsre.sub(lambda m: '%s, %s' % (insultee, m.group(1).lower()), insult) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return insult
|
def receive_nak_rebinding(self, pkt):
"""Receive NAK in REBINDING state."""
logger.debug("C3.1. Received NAK?, in RENEWING state.")
if self.process_received_nak(pkt):
logger.debug("C3.1: T. Received NAK, in RENEWING state, "
"raise INIT.")
raise self.INIT()
|
def function[receive_nak_rebinding, parameter[self, pkt]]:
constant[Receive NAK in REBINDING state.]
call[name[logger].debug, parameter[constant[C3.1. Received NAK?, in RENEWING state.]]]
if call[name[self].process_received_nak, parameter[name[pkt]]] begin[:]
call[name[logger].debug, parameter[constant[C3.1: T. Received NAK, in RENEWING state, raise INIT.]]]
<ast.Raise object at 0x7da20c6a8c40>
|
keyword[def] identifier[receive_nak_rebinding] ( identifier[self] , identifier[pkt] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] )
keyword[if] identifier[self] . identifier[process_received_nak] ( identifier[pkt] ):
identifier[logger] . identifier[debug] ( literal[string]
literal[string] )
keyword[raise] identifier[self] . identifier[INIT] ()
|
def receive_nak_rebinding(self, pkt):
"""Receive NAK in REBINDING state."""
logger.debug('C3.1. Received NAK?, in RENEWING state.')
if self.process_received_nak(pkt):
logger.debug('C3.1: T. Received NAK, in RENEWING state, raise INIT.')
raise self.INIT() # depends on [control=['if'], data=[]]
|
def do_shell(self, arg):
"""
! - spawn a system shell
shell - spawn a system shell
! <command> [arguments...] - execute a single shell command
shell <command> [arguments...] - execute a single shell command
"""
if self.cmdprefix:
raise CmdError("prefix not allowed")
# Try to use the environment to locate cmd.exe.
# If not found, it's usually OK to just use the filename,
# since cmd.exe is one of those "magic" programs that
# can be automatically found by CreateProcess.
shell = os.getenv('ComSpec', 'cmd.exe')
# When given a command, run it and return.
# When no command is given, spawn a shell.
if arg:
arg = '%s /c %s' % (shell, arg)
else:
arg = shell
process = self.debug.system.start_process(arg, bConsole = True)
process.wait()
|
def function[do_shell, parameter[self, arg]]:
constant[
! - spawn a system shell
shell - spawn a system shell
! <command> [arguments...] - execute a single shell command
shell <command> [arguments...] - execute a single shell command
]
if name[self].cmdprefix begin[:]
<ast.Raise object at 0x7da1b08a5d80>
variable[shell] assign[=] call[name[os].getenv, parameter[constant[ComSpec], constant[cmd.exe]]]
if name[arg] begin[:]
variable[arg] assign[=] binary_operation[constant[%s /c %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18fe93940>, <ast.Name object at 0x7da18fe936d0>]]]
variable[process] assign[=] call[name[self].debug.system.start_process, parameter[name[arg]]]
call[name[process].wait, parameter[]]
|
keyword[def] identifier[do_shell] ( identifier[self] , identifier[arg] ):
literal[string]
keyword[if] identifier[self] . identifier[cmdprefix] :
keyword[raise] identifier[CmdError] ( literal[string] )
identifier[shell] = identifier[os] . identifier[getenv] ( literal[string] , literal[string] )
keyword[if] identifier[arg] :
identifier[arg] = literal[string] %( identifier[shell] , identifier[arg] )
keyword[else] :
identifier[arg] = identifier[shell]
identifier[process] = identifier[self] . identifier[debug] . identifier[system] . identifier[start_process] ( identifier[arg] , identifier[bConsole] = keyword[True] )
identifier[process] . identifier[wait] ()
|
def do_shell(self, arg):
"""
! - spawn a system shell
shell - spawn a system shell
! <command> [arguments...] - execute a single shell command
shell <command> [arguments...] - execute a single shell command
"""
if self.cmdprefix:
raise CmdError('prefix not allowed') # depends on [control=['if'], data=[]]
# Try to use the environment to locate cmd.exe.
# If not found, it's usually OK to just use the filename,
# since cmd.exe is one of those "magic" programs that
# can be automatically found by CreateProcess.
shell = os.getenv('ComSpec', 'cmd.exe')
# When given a command, run it and return.
# When no command is given, spawn a shell.
if arg:
arg = '%s /c %s' % (shell, arg) # depends on [control=['if'], data=[]]
else:
arg = shell
process = self.debug.system.start_process(arg, bConsole=True)
process.wait()
|
def add_template_network_events(self, columns, vectors):
""" Add a vector indexed """
# initialize with zeros - since vectors can be None, look for the
# longest one that isn't
new_events = None
new_events = numpy.zeros(
max([len(v) for v in vectors if v is not None]),
dtype=self.network_event_dtype
)
# they shouldn't all be None
assert new_events is not None
new_events['template_id'] = self.template_index
for c, v in zip(columns, vectors):
if v is not None:
if isinstance(v, Array):
new_events[c] = v.numpy()
else:
new_events[c] = v
self.template_events = numpy.append(self.template_events, new_events)
|
def function[add_template_network_events, parameter[self, columns, vectors]]:
constant[ Add a vector indexed ]
variable[new_events] assign[=] constant[None]
variable[new_events] assign[=] call[name[numpy].zeros, parameter[call[name[max], parameter[<ast.ListComp object at 0x7da2054a74c0>]]]]
assert[compare[name[new_events] is_not constant[None]]]
call[name[new_events]][constant[template_id]] assign[=] name[self].template_index
for taget[tuple[[<ast.Name object at 0x7da20e9626e0>, <ast.Name object at 0x7da20e963d30>]]] in starred[call[name[zip], parameter[name[columns], name[vectors]]]] begin[:]
if compare[name[v] is_not constant[None]] begin[:]
if call[name[isinstance], parameter[name[v], name[Array]]] begin[:]
call[name[new_events]][name[c]] assign[=] call[name[v].numpy, parameter[]]
name[self].template_events assign[=] call[name[numpy].append, parameter[name[self].template_events, name[new_events]]]
|
keyword[def] identifier[add_template_network_events] ( identifier[self] , identifier[columns] , identifier[vectors] ):
literal[string]
identifier[new_events] = keyword[None]
identifier[new_events] = identifier[numpy] . identifier[zeros] (
identifier[max] ([ identifier[len] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[vectors] keyword[if] identifier[v] keyword[is] keyword[not] keyword[None] ]),
identifier[dtype] = identifier[self] . identifier[network_event_dtype]
)
keyword[assert] identifier[new_events] keyword[is] keyword[not] keyword[None]
identifier[new_events] [ literal[string] ]= identifier[self] . identifier[template_index]
keyword[for] identifier[c] , identifier[v] keyword[in] identifier[zip] ( identifier[columns] , identifier[vectors] ):
keyword[if] identifier[v] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[isinstance] ( identifier[v] , identifier[Array] ):
identifier[new_events] [ identifier[c] ]= identifier[v] . identifier[numpy] ()
keyword[else] :
identifier[new_events] [ identifier[c] ]= identifier[v]
identifier[self] . identifier[template_events] = identifier[numpy] . identifier[append] ( identifier[self] . identifier[template_events] , identifier[new_events] )
|
def add_template_network_events(self, columns, vectors):
""" Add a vector indexed """
# initialize with zeros - since vectors can be None, look for the
# longest one that isn't
new_events = None
new_events = numpy.zeros(max([len(v) for v in vectors if v is not None]), dtype=self.network_event_dtype)
# they shouldn't all be None
assert new_events is not None
new_events['template_id'] = self.template_index
for (c, v) in zip(columns, vectors):
if v is not None:
if isinstance(v, Array):
new_events[c] = v.numpy() # depends on [control=['if'], data=[]]
else:
new_events[c] = v # depends on [control=['if'], data=['v']] # depends on [control=['for'], data=[]]
self.template_events = numpy.append(self.template_events, new_events)
|
def _timestamp_handler(c, ctx):
"""Handles timestamp values. Entered after the year component has been completed; tokenizes the remaining
components.
"""
assert c in _TIMESTAMP_YEAR_DELIMITERS
ctx.set_ion_type(IonType.TIMESTAMP)
if len(ctx.value) != 4:
_illegal_character(c, ctx, 'Timestamp year is %d digits; expected 4.' % (len(ctx.value),))
prev = c
c, self = yield
trans = ctx.immediate_transition(self)
state = _TimestampState.YEAR
nxt = _DIGITS
tokens = _TimestampTokens(ctx.value)
val = None
can_terminate = False
if prev == _T:
nxt += _VALUE_TERMINATORS
can_terminate = True
while True:
is_eof = can_terminate and BufferQueue.is_eof(c)
if c not in nxt and not is_eof:
_illegal_character(c, ctx, 'Expected %r in state %r.' % ([_chr(x) for x in nxt], state))
if c in _VALUE_TERMINATORS or is_eof:
if not can_terminate:
_illegal_character(c, ctx, 'Unexpected termination of timestamp.')
trans = ctx.event_transition(IonThunkEvent, IonEventType.SCALAR, ctx.ion_type, _parse_timestamp(tokens))
if c == _SLASH:
trans = ctx.immediate_transition(_number_slash_end_handler(c, ctx, trans))
else:
can_terminate = False
if c == _Z:
# Z implies UTC, i.e. +00:00 local offset.
tokens.transition(_TimestampState.OFF_HOUR).append(_ZERO)
tokens.transition(_TimestampState.OFF_MINUTE).append(_ZERO)
nxt = _VALUE_TERMINATORS
can_terminate = True
elif c == _T:
nxt = _VALUE_TERMINATORS + _DIGITS
can_terminate = True
elif c in _TIMESTAMP_DELIMITERS:
nxt = _DIGITS
elif c in _DIGITS:
if prev == _PLUS or (state > _TimestampState.MONTH and prev == _HYPHEN):
state = _TimestampState.OFF_HOUR
val = tokens.transition(state)
if prev == _HYPHEN:
val.append(prev)
elif prev in (_TIMESTAMP_DELIMITERS + (_T,)):
state = _TimestampState[state + 1]
val = tokens.transition(state)
if state == _TimestampState.FRACTIONAL:
nxt = _DIGITS + _TIMESTAMP_OFFSET_INDICATORS
elif prev in _DIGITS:
if state == _TimestampState.MONTH:
nxt = _TIMESTAMP_YEAR_DELIMITERS
elif state == _TimestampState.DAY:
nxt = (_T,) + _VALUE_TERMINATORS
can_terminate = True
elif state == _TimestampState.HOUR:
nxt = (_COLON,)
elif state == _TimestampState.MINUTE:
nxt = _TIMESTAMP_OFFSET_INDICATORS + (_COLON,)
elif state == _TimestampState.SECOND:
nxt = _TIMESTAMP_OFFSET_INDICATORS + (_DOT,)
elif state == _TimestampState.FRACTIONAL:
nxt = _DIGITS + _TIMESTAMP_OFFSET_INDICATORS
elif state == _TimestampState.OFF_HOUR:
nxt = (_COLON,)
elif state == _TimestampState.OFF_MINUTE:
nxt = _VALUE_TERMINATORS
can_terminate = True
else:
raise ValueError('Unknown timestamp state %r.' % (state,))
else:
# Reaching this branch would be indicative of a programming error within this state machine.
raise ValueError('Digit following %s in timestamp state %r.' % (_chr(prev), state))
val.append(c)
prev = c
c, _ = yield trans
|
def function[_timestamp_handler, parameter[c, ctx]]:
constant[Handles timestamp values. Entered after the year component has been completed; tokenizes the remaining
components.
]
assert[compare[name[c] in name[_TIMESTAMP_YEAR_DELIMITERS]]]
call[name[ctx].set_ion_type, parameter[name[IonType].TIMESTAMP]]
if compare[call[name[len], parameter[name[ctx].value]] not_equal[!=] constant[4]] begin[:]
call[name[_illegal_character], parameter[name[c], name[ctx], binary_operation[constant[Timestamp year is %d digits; expected 4.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b15ad480>]]]]]
variable[prev] assign[=] name[c]
<ast.Tuple object at 0x7da1b15ad3f0> assign[=] <ast.Yield object at 0x7da1b15acd00>
variable[trans] assign[=] call[name[ctx].immediate_transition, parameter[name[self]]]
variable[state] assign[=] name[_TimestampState].YEAR
variable[nxt] assign[=] name[_DIGITS]
variable[tokens] assign[=] call[name[_TimestampTokens], parameter[name[ctx].value]]
variable[val] assign[=] constant[None]
variable[can_terminate] assign[=] constant[False]
if compare[name[prev] equal[==] name[_T]] begin[:]
<ast.AugAssign object at 0x7da1b15afdf0>
variable[can_terminate] assign[=] constant[True]
while constant[True] begin[:]
variable[is_eof] assign[=] <ast.BoolOp object at 0x7da1b15adcf0>
if <ast.BoolOp object at 0x7da1b15acaf0> begin[:]
call[name[_illegal_character], parameter[name[c], name[ctx], binary_operation[constant[Expected %r in state %r.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.ListComp object at 0x7da1b15ace80>, <ast.Name object at 0x7da1b15acc70>]]]]]
if <ast.BoolOp object at 0x7da1b15ac580> begin[:]
if <ast.UnaryOp object at 0x7da1b15ad510> begin[:]
call[name[_illegal_character], parameter[name[c], name[ctx], constant[Unexpected termination of timestamp.]]]
variable[trans] assign[=] call[name[ctx].event_transition, parameter[name[IonThunkEvent], name[IonEventType].SCALAR, name[ctx].ion_type, call[name[_parse_timestamp], parameter[name[tokens]]]]]
if compare[name[c] equal[==] name[_SLASH]] begin[:]
variable[trans] assign[=] call[name[ctx].immediate_transition, parameter[call[name[_number_slash_end_handler], parameter[name[c], name[ctx], name[trans]]]]]
variable[prev] assign[=] name[c]
<ast.Tuple object at 0x7da1b15cc220> assign[=] <ast.Yield object at 0x7da1b15cda50>
|
keyword[def] identifier[_timestamp_handler] ( identifier[c] , identifier[ctx] ):
literal[string]
keyword[assert] identifier[c] keyword[in] identifier[_TIMESTAMP_YEAR_DELIMITERS]
identifier[ctx] . identifier[set_ion_type] ( identifier[IonType] . identifier[TIMESTAMP] )
keyword[if] identifier[len] ( identifier[ctx] . identifier[value] )!= literal[int] :
identifier[_illegal_character] ( identifier[c] , identifier[ctx] , literal[string] %( identifier[len] ( identifier[ctx] . identifier[value] ),))
identifier[prev] = identifier[c]
identifier[c] , identifier[self] = keyword[yield]
identifier[trans] = identifier[ctx] . identifier[immediate_transition] ( identifier[self] )
identifier[state] = identifier[_TimestampState] . identifier[YEAR]
identifier[nxt] = identifier[_DIGITS]
identifier[tokens] = identifier[_TimestampTokens] ( identifier[ctx] . identifier[value] )
identifier[val] = keyword[None]
identifier[can_terminate] = keyword[False]
keyword[if] identifier[prev] == identifier[_T] :
identifier[nxt] += identifier[_VALUE_TERMINATORS]
identifier[can_terminate] = keyword[True]
keyword[while] keyword[True] :
identifier[is_eof] = identifier[can_terminate] keyword[and] identifier[BufferQueue] . identifier[is_eof] ( identifier[c] )
keyword[if] identifier[c] keyword[not] keyword[in] identifier[nxt] keyword[and] keyword[not] identifier[is_eof] :
identifier[_illegal_character] ( identifier[c] , identifier[ctx] , literal[string] %([ identifier[_chr] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[nxt] ], identifier[state] ))
keyword[if] identifier[c] keyword[in] identifier[_VALUE_TERMINATORS] keyword[or] identifier[is_eof] :
keyword[if] keyword[not] identifier[can_terminate] :
identifier[_illegal_character] ( identifier[c] , identifier[ctx] , literal[string] )
identifier[trans] = identifier[ctx] . identifier[event_transition] ( identifier[IonThunkEvent] , identifier[IonEventType] . identifier[SCALAR] , identifier[ctx] . identifier[ion_type] , identifier[_parse_timestamp] ( identifier[tokens] ))
keyword[if] identifier[c] == identifier[_SLASH] :
identifier[trans] = identifier[ctx] . identifier[immediate_transition] ( identifier[_number_slash_end_handler] ( identifier[c] , identifier[ctx] , identifier[trans] ))
keyword[else] :
identifier[can_terminate] = keyword[False]
keyword[if] identifier[c] == identifier[_Z] :
identifier[tokens] . identifier[transition] ( identifier[_TimestampState] . identifier[OFF_HOUR] ). identifier[append] ( identifier[_ZERO] )
identifier[tokens] . identifier[transition] ( identifier[_TimestampState] . identifier[OFF_MINUTE] ). identifier[append] ( identifier[_ZERO] )
identifier[nxt] = identifier[_VALUE_TERMINATORS]
identifier[can_terminate] = keyword[True]
keyword[elif] identifier[c] == identifier[_T] :
identifier[nxt] = identifier[_VALUE_TERMINATORS] + identifier[_DIGITS]
identifier[can_terminate] = keyword[True]
keyword[elif] identifier[c] keyword[in] identifier[_TIMESTAMP_DELIMITERS] :
identifier[nxt] = identifier[_DIGITS]
keyword[elif] identifier[c] keyword[in] identifier[_DIGITS] :
keyword[if] identifier[prev] == identifier[_PLUS] keyword[or] ( identifier[state] > identifier[_TimestampState] . identifier[MONTH] keyword[and] identifier[prev] == identifier[_HYPHEN] ):
identifier[state] = identifier[_TimestampState] . identifier[OFF_HOUR]
identifier[val] = identifier[tokens] . identifier[transition] ( identifier[state] )
keyword[if] identifier[prev] == identifier[_HYPHEN] :
identifier[val] . identifier[append] ( identifier[prev] )
keyword[elif] identifier[prev] keyword[in] ( identifier[_TIMESTAMP_DELIMITERS] +( identifier[_T] ,)):
identifier[state] = identifier[_TimestampState] [ identifier[state] + literal[int] ]
identifier[val] = identifier[tokens] . identifier[transition] ( identifier[state] )
keyword[if] identifier[state] == identifier[_TimestampState] . identifier[FRACTIONAL] :
identifier[nxt] = identifier[_DIGITS] + identifier[_TIMESTAMP_OFFSET_INDICATORS]
keyword[elif] identifier[prev] keyword[in] identifier[_DIGITS] :
keyword[if] identifier[state] == identifier[_TimestampState] . identifier[MONTH] :
identifier[nxt] = identifier[_TIMESTAMP_YEAR_DELIMITERS]
keyword[elif] identifier[state] == identifier[_TimestampState] . identifier[DAY] :
identifier[nxt] =( identifier[_T] ,)+ identifier[_VALUE_TERMINATORS]
identifier[can_terminate] = keyword[True]
keyword[elif] identifier[state] == identifier[_TimestampState] . identifier[HOUR] :
identifier[nxt] =( identifier[_COLON] ,)
keyword[elif] identifier[state] == identifier[_TimestampState] . identifier[MINUTE] :
identifier[nxt] = identifier[_TIMESTAMP_OFFSET_INDICATORS] +( identifier[_COLON] ,)
keyword[elif] identifier[state] == identifier[_TimestampState] . identifier[SECOND] :
identifier[nxt] = identifier[_TIMESTAMP_OFFSET_INDICATORS] +( identifier[_DOT] ,)
keyword[elif] identifier[state] == identifier[_TimestampState] . identifier[FRACTIONAL] :
identifier[nxt] = identifier[_DIGITS] + identifier[_TIMESTAMP_OFFSET_INDICATORS]
keyword[elif] identifier[state] == identifier[_TimestampState] . identifier[OFF_HOUR] :
identifier[nxt] =( identifier[_COLON] ,)
keyword[elif] identifier[state] == identifier[_TimestampState] . identifier[OFF_MINUTE] :
identifier[nxt] = identifier[_VALUE_TERMINATORS]
identifier[can_terminate] = keyword[True]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[state] ,))
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[_chr] ( identifier[prev] ), identifier[state] ))
identifier[val] . identifier[append] ( identifier[c] )
identifier[prev] = identifier[c]
identifier[c] , identifier[_] = keyword[yield] identifier[trans]
|
def _timestamp_handler(c, ctx):
"""Handles timestamp values. Entered after the year component has been completed; tokenizes the remaining
components.
"""
assert c in _TIMESTAMP_YEAR_DELIMITERS
ctx.set_ion_type(IonType.TIMESTAMP)
if len(ctx.value) != 4:
_illegal_character(c, ctx, 'Timestamp year is %d digits; expected 4.' % (len(ctx.value),)) # depends on [control=['if'], data=[]]
prev = c
(c, self) = (yield)
trans = ctx.immediate_transition(self)
state = _TimestampState.YEAR
nxt = _DIGITS
tokens = _TimestampTokens(ctx.value)
val = None
can_terminate = False
if prev == _T:
nxt += _VALUE_TERMINATORS
can_terminate = True # depends on [control=['if'], data=[]]
while True:
is_eof = can_terminate and BufferQueue.is_eof(c)
if c not in nxt and (not is_eof):
_illegal_character(c, ctx, 'Expected %r in state %r.' % ([_chr(x) for x in nxt], state)) # depends on [control=['if'], data=[]]
if c in _VALUE_TERMINATORS or is_eof:
if not can_terminate:
_illegal_character(c, ctx, 'Unexpected termination of timestamp.') # depends on [control=['if'], data=[]]
trans = ctx.event_transition(IonThunkEvent, IonEventType.SCALAR, ctx.ion_type, _parse_timestamp(tokens))
if c == _SLASH:
trans = ctx.immediate_transition(_number_slash_end_handler(c, ctx, trans)) # depends on [control=['if'], data=['c']] # depends on [control=['if'], data=[]]
else:
can_terminate = False
if c == _Z:
# Z implies UTC, i.e. +00:00 local offset.
tokens.transition(_TimestampState.OFF_HOUR).append(_ZERO)
tokens.transition(_TimestampState.OFF_MINUTE).append(_ZERO)
nxt = _VALUE_TERMINATORS
can_terminate = True # depends on [control=['if'], data=[]]
elif c == _T:
nxt = _VALUE_TERMINATORS + _DIGITS
can_terminate = True # depends on [control=['if'], data=[]]
elif c in _TIMESTAMP_DELIMITERS:
nxt = _DIGITS # depends on [control=['if'], data=[]]
elif c in _DIGITS:
if prev == _PLUS or (state > _TimestampState.MONTH and prev == _HYPHEN):
state = _TimestampState.OFF_HOUR
val = tokens.transition(state)
if prev == _HYPHEN:
val.append(prev) # depends on [control=['if'], data=['prev']] # depends on [control=['if'], data=[]]
elif prev in _TIMESTAMP_DELIMITERS + (_T,):
state = _TimestampState[state + 1]
val = tokens.transition(state)
if state == _TimestampState.FRACTIONAL:
nxt = _DIGITS + _TIMESTAMP_OFFSET_INDICATORS # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif prev in _DIGITS:
if state == _TimestampState.MONTH:
nxt = _TIMESTAMP_YEAR_DELIMITERS # depends on [control=['if'], data=[]]
elif state == _TimestampState.DAY:
nxt = (_T,) + _VALUE_TERMINATORS
can_terminate = True # depends on [control=['if'], data=[]]
elif state == _TimestampState.HOUR:
nxt = (_COLON,) # depends on [control=['if'], data=[]]
elif state == _TimestampState.MINUTE:
nxt = _TIMESTAMP_OFFSET_INDICATORS + (_COLON,) # depends on [control=['if'], data=[]]
elif state == _TimestampState.SECOND:
nxt = _TIMESTAMP_OFFSET_INDICATORS + (_DOT,) # depends on [control=['if'], data=[]]
elif state == _TimestampState.FRACTIONAL:
nxt = _DIGITS + _TIMESTAMP_OFFSET_INDICATORS # depends on [control=['if'], data=[]]
elif state == _TimestampState.OFF_HOUR:
nxt = (_COLON,) # depends on [control=['if'], data=[]]
elif state == _TimestampState.OFF_MINUTE:
nxt = _VALUE_TERMINATORS
can_terminate = True # depends on [control=['if'], data=[]]
else:
raise ValueError('Unknown timestamp state %r.' % (state,)) # depends on [control=['if'], data=['_DIGITS']]
else:
# Reaching this branch would be indicative of a programming error within this state machine.
raise ValueError('Digit following %s in timestamp state %r.' % (_chr(prev), state))
val.append(c) # depends on [control=['if'], data=['c', '_DIGITS']]
prev = c
(c, _) = (yield trans) # depends on [control=['while'], data=[]]
|
def _run_funnel(args):
"""Run funnel TES server with rabix bunny for CWL.
"""
host = "localhost"
port = "8088"
main_file, json_file, project_name = _get_main_and_json(args.directory)
work_dir = utils.safe_makedir(os.path.join(os.getcwd(), "funnel_work"))
log_file = os.path.join(work_dir, "%s-funnel.log" % project_name)
# Create bunny configuration directory with TES backend
orig_config_dir = os.path.join(os.path.dirname(os.path.realpath(utils.which("rabix"))), "config")
work_config_dir = utils.safe_makedir(os.path.join(work_dir, "rabix_config"))
for fname in os.listdir(orig_config_dir):
if fname == "core.properties":
with open(os.path.join(orig_config_dir, fname)) as in_handle:
with open(os.path.join(work_config_dir, fname), "w") as out_handle:
for line in in_handle:
if line.startswith("backend.embedded.types"):
line = "backend.embedded.types=TES\n"
out_handle.write(line)
else:
shutil.copy(os.path.join(orig_config_dir, fname), os.path.join(work_config_dir, fname))
flags = ["-c", work_config_dir,
"-tes-url=http://%s:%s" % (host, port), "-tes-storage=%s" % work_dir]
if args.no_container:
_remove_bcbiovm_path()
flags += ["--no-container"]
cmd = ["rabix"] + flags + [main_file, json_file]
funnelp = subprocess.Popen(["funnel", "server", "run",
"--Server.HostName", host, "--Server.HTTPPort", port,
"--LocalStorage.AllowedDirs", work_dir,
"--Worker.WorkDir", os.path.join(work_dir, "funnel-work")])
try:
with utils.chdir(work_dir):
_run_tool(cmd, not args.no_container, work_dir, log_file)
finally:
funnelp.kill()
|
def function[_run_funnel, parameter[args]]:
constant[Run funnel TES server with rabix bunny for CWL.
]
variable[host] assign[=] constant[localhost]
variable[port] assign[=] constant[8088]
<ast.Tuple object at 0x7da1b17a5840> assign[=] call[name[_get_main_and_json], parameter[name[args].directory]]
variable[work_dir] assign[=] call[name[utils].safe_makedir, parameter[call[name[os].path.join, parameter[call[name[os].getcwd, parameter[]], constant[funnel_work]]]]]
variable[log_file] assign[=] call[name[os].path.join, parameter[name[work_dir], binary_operation[constant[%s-funnel.log] <ast.Mod object at 0x7da2590d6920> name[project_name]]]]
variable[orig_config_dir] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[call[name[os].path.realpath, parameter[call[name[utils].which, parameter[constant[rabix]]]]]]], constant[config]]]
variable[work_config_dir] assign[=] call[name[utils].safe_makedir, parameter[call[name[os].path.join, parameter[name[work_dir], constant[rabix_config]]]]]
for taget[name[fname]] in starred[call[name[os].listdir, parameter[name[orig_config_dir]]]] begin[:]
if compare[name[fname] equal[==] constant[core.properties]] begin[:]
with call[name[open], parameter[call[name[os].path.join, parameter[name[orig_config_dir], name[fname]]]]] begin[:]
with call[name[open], parameter[call[name[os].path.join, parameter[name[work_config_dir], name[fname]]], constant[w]]] begin[:]
for taget[name[line]] in starred[name[in_handle]] begin[:]
if call[name[line].startswith, parameter[constant[backend.embedded.types]]] begin[:]
variable[line] assign[=] constant[backend.embedded.types=TES
]
call[name[out_handle].write, parameter[name[line]]]
variable[flags] assign[=] list[[<ast.Constant object at 0x7da1b17a42e0>, <ast.Name object at 0x7da1b17a68c0>, <ast.BinOp object at 0x7da1b17a40d0>, <ast.BinOp object at 0x7da1b17a4160>]]
if name[args].no_container begin[:]
call[name[_remove_bcbiovm_path], parameter[]]
<ast.AugAssign object at 0x7da1b17a7f70>
variable[cmd] assign[=] binary_operation[binary_operation[list[[<ast.Constant object at 0x7da1b17a58d0>]] + name[flags]] + list[[<ast.Name object at 0x7da1b17a5db0>, <ast.Name object at 0x7da1b17a5c90>]]]
variable[funnelp] assign[=] call[name[subprocess].Popen, parameter[list[[<ast.Constant object at 0x7da1b17a4a00>, <ast.Constant object at 0x7da1b17a51e0>, <ast.Constant object at 0x7da1b17a4ee0>, <ast.Constant object at 0x7da1b17a68f0>, <ast.Name object at 0x7da1b17a50f0>, <ast.Constant object at 0x7da1b17a4b80>, <ast.Name object at 0x7da1b17a65f0>, <ast.Constant object at 0x7da1b17a5240>, <ast.Name object at 0x7da1b17a4c70>, <ast.Constant object at 0x7da1b17a56c0>, <ast.Call object at 0x7da1b17a5810>]]]]
<ast.Try object at 0x7da1b17a5270>
|
keyword[def] identifier[_run_funnel] ( identifier[args] ):
literal[string]
identifier[host] = literal[string]
identifier[port] = literal[string]
identifier[main_file] , identifier[json_file] , identifier[project_name] = identifier[_get_main_and_json] ( identifier[args] . identifier[directory] )
identifier[work_dir] = identifier[utils] . identifier[safe_makedir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[getcwd] (), literal[string] ))
identifier[log_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[work_dir] , literal[string] % identifier[project_name] )
identifier[orig_config_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[realpath] ( identifier[utils] . identifier[which] ( literal[string] ))), literal[string] )
identifier[work_config_dir] = identifier[utils] . identifier[safe_makedir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[work_dir] , literal[string] ))
keyword[for] identifier[fname] keyword[in] identifier[os] . identifier[listdir] ( identifier[orig_config_dir] ):
keyword[if] identifier[fname] == literal[string] :
keyword[with] identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[orig_config_dir] , identifier[fname] )) keyword[as] identifier[in_handle] :
keyword[with] identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[work_config_dir] , identifier[fname] ), literal[string] ) keyword[as] identifier[out_handle] :
keyword[for] identifier[line] keyword[in] identifier[in_handle] :
keyword[if] identifier[line] . identifier[startswith] ( literal[string] ):
identifier[line] = literal[string]
identifier[out_handle] . identifier[write] ( identifier[line] )
keyword[else] :
identifier[shutil] . identifier[copy] ( identifier[os] . identifier[path] . identifier[join] ( identifier[orig_config_dir] , identifier[fname] ), identifier[os] . identifier[path] . identifier[join] ( identifier[work_config_dir] , identifier[fname] ))
identifier[flags] =[ literal[string] , identifier[work_config_dir] ,
literal[string] %( identifier[host] , identifier[port] ), literal[string] % identifier[work_dir] ]
keyword[if] identifier[args] . identifier[no_container] :
identifier[_remove_bcbiovm_path] ()
identifier[flags] +=[ literal[string] ]
identifier[cmd] =[ literal[string] ]+ identifier[flags] +[ identifier[main_file] , identifier[json_file] ]
identifier[funnelp] = identifier[subprocess] . identifier[Popen] ([ literal[string] , literal[string] , literal[string] ,
literal[string] , identifier[host] , literal[string] , identifier[port] ,
literal[string] , identifier[work_dir] ,
literal[string] , identifier[os] . identifier[path] . identifier[join] ( identifier[work_dir] , literal[string] )])
keyword[try] :
keyword[with] identifier[utils] . identifier[chdir] ( identifier[work_dir] ):
identifier[_run_tool] ( identifier[cmd] , keyword[not] identifier[args] . identifier[no_container] , identifier[work_dir] , identifier[log_file] )
keyword[finally] :
identifier[funnelp] . identifier[kill] ()
|
def _run_funnel(args):
"""Run funnel TES server with rabix bunny for CWL.
"""
host = 'localhost'
port = '8088'
(main_file, json_file, project_name) = _get_main_and_json(args.directory)
work_dir = utils.safe_makedir(os.path.join(os.getcwd(), 'funnel_work'))
log_file = os.path.join(work_dir, '%s-funnel.log' % project_name)
# Create bunny configuration directory with TES backend
orig_config_dir = os.path.join(os.path.dirname(os.path.realpath(utils.which('rabix'))), 'config')
work_config_dir = utils.safe_makedir(os.path.join(work_dir, 'rabix_config'))
for fname in os.listdir(orig_config_dir):
if fname == 'core.properties':
with open(os.path.join(orig_config_dir, fname)) as in_handle:
with open(os.path.join(work_config_dir, fname), 'w') as out_handle:
for line in in_handle:
if line.startswith('backend.embedded.types'):
line = 'backend.embedded.types=TES\n' # depends on [control=['if'], data=[]]
out_handle.write(line) # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['out_handle']] # depends on [control=['with'], data=['open', 'in_handle']] # depends on [control=['if'], data=['fname']]
else:
shutil.copy(os.path.join(orig_config_dir, fname), os.path.join(work_config_dir, fname)) # depends on [control=['for'], data=['fname']]
flags = ['-c', work_config_dir, '-tes-url=http://%s:%s' % (host, port), '-tes-storage=%s' % work_dir]
if args.no_container:
_remove_bcbiovm_path()
flags += ['--no-container'] # depends on [control=['if'], data=[]]
cmd = ['rabix'] + flags + [main_file, json_file]
funnelp = subprocess.Popen(['funnel', 'server', 'run', '--Server.HostName', host, '--Server.HTTPPort', port, '--LocalStorage.AllowedDirs', work_dir, '--Worker.WorkDir', os.path.join(work_dir, 'funnel-work')])
try:
with utils.chdir(work_dir):
_run_tool(cmd, not args.no_container, work_dir, log_file) # depends on [control=['with'], data=[]] # depends on [control=['try'], data=[]]
finally:
funnelp.kill()
|
def handle_change(self, change):
""" Handle changes from atom ContainerLists """
op = change['operation']
if op in 'append':
self.add(len(change['value']), LatLng(*change['item']))
elif op == 'insert':
self.add(change['index'], LatLng(*change['item']))
elif op == 'extend':
points = [LatLng(*p) for p in change['items']]
self.addAll([bridge.encode(c) for c in points])
elif op == '__setitem__':
self.set(change['index'], LatLng(*change['newitem']))
elif op == 'pop':
self.remove(change['index'])
else:
raise NotImplementedError(
"Unsupported change operation {}".format(op))
|
def function[handle_change, parameter[self, change]]:
constant[ Handle changes from atom ContainerLists ]
variable[op] assign[=] call[name[change]][constant[operation]]
if compare[name[op] in constant[append]] begin[:]
call[name[self].add, parameter[call[name[len], parameter[call[name[change]][constant[value]]]], call[name[LatLng], parameter[<ast.Starred object at 0x7da1b0035bd0>]]]]
|
keyword[def] identifier[handle_change] ( identifier[self] , identifier[change] ):
literal[string]
identifier[op] = identifier[change] [ literal[string] ]
keyword[if] identifier[op] keyword[in] literal[string] :
identifier[self] . identifier[add] ( identifier[len] ( identifier[change] [ literal[string] ]), identifier[LatLng] (* identifier[change] [ literal[string] ]))
keyword[elif] identifier[op] == literal[string] :
identifier[self] . identifier[add] ( identifier[change] [ literal[string] ], identifier[LatLng] (* identifier[change] [ literal[string] ]))
keyword[elif] identifier[op] == literal[string] :
identifier[points] =[ identifier[LatLng] (* identifier[p] ) keyword[for] identifier[p] keyword[in] identifier[change] [ literal[string] ]]
identifier[self] . identifier[addAll] ([ identifier[bridge] . identifier[encode] ( identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[points] ])
keyword[elif] identifier[op] == literal[string] :
identifier[self] . identifier[set] ( identifier[change] [ literal[string] ], identifier[LatLng] (* identifier[change] [ literal[string] ]))
keyword[elif] identifier[op] == literal[string] :
identifier[self] . identifier[remove] ( identifier[change] [ literal[string] ])
keyword[else] :
keyword[raise] identifier[NotImplementedError] (
literal[string] . identifier[format] ( identifier[op] ))
|
def handle_change(self, change):
""" Handle changes from atom ContainerLists """
op = change['operation']
if op in 'append':
self.add(len(change['value']), LatLng(*change['item'])) # depends on [control=['if'], data=[]]
elif op == 'insert':
self.add(change['index'], LatLng(*change['item'])) # depends on [control=['if'], data=[]]
elif op == 'extend':
points = [LatLng(*p) for p in change['items']]
self.addAll([bridge.encode(c) for c in points]) # depends on [control=['if'], data=[]]
elif op == '__setitem__':
self.set(change['index'], LatLng(*change['newitem'])) # depends on [control=['if'], data=[]]
elif op == 'pop':
self.remove(change['index']) # depends on [control=['if'], data=[]]
else:
raise NotImplementedError('Unsupported change operation {}'.format(op))
|
def get_audio_metadata(fname):
""" collects basic MP3 metadata
Works, once you use mutagenx (buried deep in issues page)
['Angels']
['Red Back Fever']
['Red Back Fever']
{'album': ['Red Back Fever'], 'title': ['Red Back Fever'], 'artist': ['Angels']}
"""
from mutagenx.easyid3 import EasyID3
audio = EasyID3(fname)
audio_dict = {}
try:
artist = audio["artist"]
except KeyError:
artist = ''
try:
title = audio["title"]
except KeyError:
print("Cant get title")
try:
album = audio["album"]
except KeyError:
album = ''
audio_dict['album'] = album
audio_dict['title'] = title
audio_dict['artist'] = artist
return audio_dict
|
def function[get_audio_metadata, parameter[fname]]:
constant[ collects basic MP3 metadata
Works, once you use mutagenx (buried deep in issues page)
['Angels']
['Red Back Fever']
['Red Back Fever']
{'album': ['Red Back Fever'], 'title': ['Red Back Fever'], 'artist': ['Angels']}
]
from relative_module[mutagenx.easyid3] import module[EasyID3]
variable[audio] assign[=] call[name[EasyID3], parameter[name[fname]]]
variable[audio_dict] assign[=] dictionary[[], []]
<ast.Try object at 0x7da204345e40>
<ast.Try object at 0x7da2043464a0>
<ast.Try object at 0x7da204345900>
call[name[audio_dict]][constant[album]] assign[=] name[album]
call[name[audio_dict]][constant[title]] assign[=] name[title]
call[name[audio_dict]][constant[artist]] assign[=] name[artist]
return[name[audio_dict]]
|
keyword[def] identifier[get_audio_metadata] ( identifier[fname] ):
literal[string]
keyword[from] identifier[mutagenx] . identifier[easyid3] keyword[import] identifier[EasyID3]
identifier[audio] = identifier[EasyID3] ( identifier[fname] )
identifier[audio_dict] ={}
keyword[try] :
identifier[artist] = identifier[audio] [ literal[string] ]
keyword[except] identifier[KeyError] :
identifier[artist] = literal[string]
keyword[try] :
identifier[title] = identifier[audio] [ literal[string] ]
keyword[except] identifier[KeyError] :
identifier[print] ( literal[string] )
keyword[try] :
identifier[album] = identifier[audio] [ literal[string] ]
keyword[except] identifier[KeyError] :
identifier[album] = literal[string]
identifier[audio_dict] [ literal[string] ]= identifier[album]
identifier[audio_dict] [ literal[string] ]= identifier[title]
identifier[audio_dict] [ literal[string] ]= identifier[artist]
keyword[return] identifier[audio_dict]
|
def get_audio_metadata(fname):
""" collects basic MP3 metadata
Works, once you use mutagenx (buried deep in issues page)
['Angels']
['Red Back Fever']
['Red Back Fever']
{'album': ['Red Back Fever'], 'title': ['Red Back Fever'], 'artist': ['Angels']}
"""
from mutagenx.easyid3 import EasyID3
audio = EasyID3(fname)
audio_dict = {}
try:
artist = audio['artist'] # depends on [control=['try'], data=[]]
except KeyError:
artist = '' # depends on [control=['except'], data=[]]
try:
title = audio['title'] # depends on [control=['try'], data=[]]
except KeyError:
print('Cant get title') # depends on [control=['except'], data=[]]
try:
album = audio['album'] # depends on [control=['try'], data=[]]
except KeyError:
album = '' # depends on [control=['except'], data=[]]
audio_dict['album'] = album
audio_dict['title'] = title
audio_dict['artist'] = artist
return audio_dict
|
def trailing_stop_loss(self, accountID, **kwargs):
"""
Shortcut to create a Trailing Stop Loss Order in an Account
Args:
accountID : The ID of the Account
kwargs : The arguments to create a TrailingStopLossOrderRequest
Returns:
v20.response.Response containing the results from submitting
the request
"""
return self.create(
accountID,
order=TrailingStopLossOrderRequest(**kwargs)
)
|
def function[trailing_stop_loss, parameter[self, accountID]]:
constant[
Shortcut to create a Trailing Stop Loss Order in an Account
Args:
accountID : The ID of the Account
kwargs : The arguments to create a TrailingStopLossOrderRequest
Returns:
v20.response.Response containing the results from submitting
the request
]
return[call[name[self].create, parameter[name[accountID]]]]
|
keyword[def] identifier[trailing_stop_loss] ( identifier[self] , identifier[accountID] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[create] (
identifier[accountID] ,
identifier[order] = identifier[TrailingStopLossOrderRequest] (** identifier[kwargs] )
)
|
def trailing_stop_loss(self, accountID, **kwargs):
"""
Shortcut to create a Trailing Stop Loss Order in an Account
Args:
accountID : The ID of the Account
kwargs : The arguments to create a TrailingStopLossOrderRequest
Returns:
v20.response.Response containing the results from submitting
the request
"""
return self.create(accountID, order=TrailingStopLossOrderRequest(**kwargs))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.