code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def NewFromJSON(data):
"""
Create a new Shake instance from a JSON dict.
Args:
data (dict): JSON dictionary representing a Shake.
Returns:
A Shake instance.
"""
s = Shake(
id=data.get('id', None),
name=data.get('name', None),
url=data.get('url', None),
thumbnail_url=data.get('thumbnail_url', None),
description=data.get('description', None),
type=data.get('type', None),
created_at=data.get('created_at', None),
updated_at=data.get('updated_at', None)
)
if data.get('owner', None):
s.owner = User.NewFromJSON(data.get('owner', None))
return s | def function[NewFromJSON, parameter[data]]:
constant[
Create a new Shake instance from a JSON dict.
Args:
data (dict): JSON dictionary representing a Shake.
Returns:
A Shake instance.
]
variable[s] assign[=] call[name[Shake], parameter[]]
if call[name[data].get, parameter[constant[owner], constant[None]]] begin[:]
name[s].owner assign[=] call[name[User].NewFromJSON, parameter[call[name[data].get, parameter[constant[owner], constant[None]]]]]
return[name[s]] | keyword[def] identifier[NewFromJSON] ( identifier[data] ):
literal[string]
identifier[s] = identifier[Shake] (
identifier[id] = identifier[data] . identifier[get] ( literal[string] , keyword[None] ),
identifier[name] = identifier[data] . identifier[get] ( literal[string] , keyword[None] ),
identifier[url] = identifier[data] . identifier[get] ( literal[string] , keyword[None] ),
identifier[thumbnail_url] = identifier[data] . identifier[get] ( literal[string] , keyword[None] ),
identifier[description] = identifier[data] . identifier[get] ( literal[string] , keyword[None] ),
identifier[type] = identifier[data] . identifier[get] ( literal[string] , keyword[None] ),
identifier[created_at] = identifier[data] . identifier[get] ( literal[string] , keyword[None] ),
identifier[updated_at] = identifier[data] . identifier[get] ( literal[string] , keyword[None] )
)
keyword[if] identifier[data] . identifier[get] ( literal[string] , keyword[None] ):
identifier[s] . identifier[owner] = identifier[User] . identifier[NewFromJSON] ( identifier[data] . identifier[get] ( literal[string] , keyword[None] ))
keyword[return] identifier[s] | def NewFromJSON(data):
"""
Create a new Shake instance from a JSON dict.
Args:
data (dict): JSON dictionary representing a Shake.
Returns:
A Shake instance.
"""
s = Shake(id=data.get('id', None), name=data.get('name', None), url=data.get('url', None), thumbnail_url=data.get('thumbnail_url', None), description=data.get('description', None), type=data.get('type', None), created_at=data.get('created_at', None), updated_at=data.get('updated_at', None))
if data.get('owner', None):
s.owner = User.NewFromJSON(data.get('owner', None)) # depends on [control=['if'], data=[]]
return s |
def reset(self, force_flush_cache: bool = False) -> None:
"""
Reset transaction back to original state, discarding all
uncompleted transactions.
"""
super(LDAPwrapper, self).reset()
if len(self._transactions) == 0:
raise RuntimeError("reset called outside a transaction.")
self._transactions[-1] = [] | def function[reset, parameter[self, force_flush_cache]]:
constant[
Reset transaction back to original state, discarding all
uncompleted transactions.
]
call[call[name[super], parameter[name[LDAPwrapper], name[self]]].reset, parameter[]]
if compare[call[name[len], parameter[name[self]._transactions]] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da18f09f2b0>
call[name[self]._transactions][<ast.UnaryOp object at 0x7da18f09cac0>] assign[=] list[[]] | keyword[def] identifier[reset] ( identifier[self] , identifier[force_flush_cache] : identifier[bool] = keyword[False] )-> keyword[None] :
literal[string]
identifier[super] ( identifier[LDAPwrapper] , identifier[self] ). identifier[reset] ()
keyword[if] identifier[len] ( identifier[self] . identifier[_transactions] )== literal[int] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
identifier[self] . identifier[_transactions] [- literal[int] ]=[] | def reset(self, force_flush_cache: bool=False) -> None:
"""
Reset transaction back to original state, discarding all
uncompleted transactions.
"""
super(LDAPwrapper, self).reset()
if len(self._transactions) == 0:
raise RuntimeError('reset called outside a transaction.') # depends on [control=['if'], data=[]]
self._transactions[-1] = [] |
def _get_by(key, val, l):
"""
Out of list *l* return all elements that have *key=val*
This comes in handy when you are working with aggregated/bucketed queries
"""
return [x for x in l if _check_value_recursively(key, val, x)] | def function[_get_by, parameter[key, val, l]]:
constant[
Out of list *l* return all elements that have *key=val*
This comes in handy when you are working with aggregated/bucketed queries
]
return[<ast.ListComp object at 0x7da18ede78e0>] | keyword[def] identifier[_get_by] ( identifier[key] , identifier[val] , identifier[l] ):
literal[string]
keyword[return] [ identifier[x] keyword[for] identifier[x] keyword[in] identifier[l] keyword[if] identifier[_check_value_recursively] ( identifier[key] , identifier[val] , identifier[x] )] | def _get_by(key, val, l):
"""
Out of list *l* return all elements that have *key=val*
This comes in handy when you are working with aggregated/bucketed queries
"""
return [x for x in l if _check_value_recursively(key, val, x)] |
async def get_zones(self) -> List[Zone]:
"""Return list of available zones."""
res = await self.services["avContent"]["getCurrentExternalTerminalsStatus"]()
zones = [Zone.make(services=self.services, **x) for x in res if 'meta:zone:output' in x['meta']]
if not zones:
raise SongpalException("Device has no zones")
return zones | <ast.AsyncFunctionDef object at 0x7da18f00c190> | keyword[async] keyword[def] identifier[get_zones] ( identifier[self] )-> identifier[List] [ identifier[Zone] ]:
literal[string]
identifier[res] = keyword[await] identifier[self] . identifier[services] [ literal[string] ][ literal[string] ]()
identifier[zones] =[ identifier[Zone] . identifier[make] ( identifier[services] = identifier[self] . identifier[services] ,** identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[res] keyword[if] literal[string] keyword[in] identifier[x] [ literal[string] ]]
keyword[if] keyword[not] identifier[zones] :
keyword[raise] identifier[SongpalException] ( literal[string] )
keyword[return] identifier[zones] | async def get_zones(self) -> List[Zone]:
"""Return list of available zones."""
res = await self.services['avContent']['getCurrentExternalTerminalsStatus']()
zones = [Zone.make(services=self.services, **x) for x in res if 'meta:zone:output' in x['meta']]
if not zones:
raise SongpalException('Device has no zones') # depends on [control=['if'], data=[]]
return zones |
def bound_symbols(self):
"""Set of bound SymPy symbols in the expression"""
if self._bound_symbols is None:
res = set.union(
set([]), # dummy arg (union fails without arguments)
*[_bound_symbols(val) for val in self.kwargs.values()])
res.update(
set([]), # dummy arg (update fails without arguments)
*[_bound_symbols(arg) for arg in self.args])
self._bound_symbols = res
return self._bound_symbols | def function[bound_symbols, parameter[self]]:
constant[Set of bound SymPy symbols in the expression]
if compare[name[self]._bound_symbols is constant[None]] begin[:]
variable[res] assign[=] call[name[set].union, parameter[call[name[set], parameter[list[[]]]], <ast.Starred object at 0x7da20c6ab3a0>]]
call[name[res].update, parameter[call[name[set], parameter[list[[]]]], <ast.Starred object at 0x7da20c6a8c70>]]
name[self]._bound_symbols assign[=] name[res]
return[name[self]._bound_symbols] | keyword[def] identifier[bound_symbols] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_bound_symbols] keyword[is] keyword[None] :
identifier[res] = identifier[set] . identifier[union] (
identifier[set] ([]),
*[ identifier[_bound_symbols] ( identifier[val] ) keyword[for] identifier[val] keyword[in] identifier[self] . identifier[kwargs] . identifier[values] ()])
identifier[res] . identifier[update] (
identifier[set] ([]),
*[ identifier[_bound_symbols] ( identifier[arg] ) keyword[for] identifier[arg] keyword[in] identifier[self] . identifier[args] ])
identifier[self] . identifier[_bound_symbols] = identifier[res]
keyword[return] identifier[self] . identifier[_bound_symbols] | def bound_symbols(self):
"""Set of bound SymPy symbols in the expression"""
if self._bound_symbols is None: # dummy arg (union fails without arguments)
res = set.union(set([]), *[_bound_symbols(val) for val in self.kwargs.values()]) # dummy arg (update fails without arguments)
res.update(set([]), *[_bound_symbols(arg) for arg in self.args])
self._bound_symbols = res # depends on [control=['if'], data=[]]
return self._bound_symbols |
def callProceeding(RepeatIndicator_presence=0,
BearerCapability_presence=0,
BearerCapability_presence1=0,
Facility_presence=0, ProgressIndicator_presence=0,
PriorityLevel_presence=0):
"""CALL PROCEEDING Section 9.3.3"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x2) # 00000010
packet = a / b
if RepeatIndicator_presence is 1:
c = RepeatIndicatorHdr(ieiRI=0xD, eightBitRI=0x0)
packet = packet / c
if BearerCapability_presence is 1:
d = BearerCapabilityHdr(ieiBC=0x04, eightBitBC=0x0)
packet = packet / d
if BearerCapability_presence1 is 1:
e = BearerCapabilityHdr(ieiBC=0x04, eightBitBC=0x0)
packet = packet / e
if Facility_presence is 1:
f = FacilityHdr(ieiF=0x1C, eightBitF=0x0)
packet = packet / f
if ProgressIndicator_presence is 1:
g = ProgressIndicatorHdr(ieiPI=0x1E, eightBitPI=0x0)
packet = packet / g
if PriorityLevel_presence is 1:
h = PriorityLevelHdr(ieiPL=0x80, eightBitPL=0x0)
packet = packet / h
return packet | def function[callProceeding, parameter[RepeatIndicator_presence, BearerCapability_presence, BearerCapability_presence1, Facility_presence, ProgressIndicator_presence, PriorityLevel_presence]]:
constant[CALL PROCEEDING Section 9.3.3]
variable[a] assign[=] call[name[TpPd], parameter[]]
variable[b] assign[=] call[name[MessageType], parameter[]]
variable[packet] assign[=] binary_operation[name[a] / name[b]]
if compare[name[RepeatIndicator_presence] is constant[1]] begin[:]
variable[c] assign[=] call[name[RepeatIndicatorHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[c]]
if compare[name[BearerCapability_presence] is constant[1]] begin[:]
variable[d] assign[=] call[name[BearerCapabilityHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[d]]
if compare[name[BearerCapability_presence1] is constant[1]] begin[:]
variable[e] assign[=] call[name[BearerCapabilityHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[e]]
if compare[name[Facility_presence] is constant[1]] begin[:]
variable[f] assign[=] call[name[FacilityHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[f]]
if compare[name[ProgressIndicator_presence] is constant[1]] begin[:]
variable[g] assign[=] call[name[ProgressIndicatorHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[g]]
if compare[name[PriorityLevel_presence] is constant[1]] begin[:]
variable[h] assign[=] call[name[PriorityLevelHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[h]]
return[name[packet]] | keyword[def] identifier[callProceeding] ( identifier[RepeatIndicator_presence] = literal[int] ,
identifier[BearerCapability_presence] = literal[int] ,
identifier[BearerCapability_presence1] = literal[int] ,
identifier[Facility_presence] = literal[int] , identifier[ProgressIndicator_presence] = literal[int] ,
identifier[PriorityLevel_presence] = literal[int] ):
literal[string]
identifier[a] = identifier[TpPd] ( identifier[pd] = literal[int] )
identifier[b] = identifier[MessageType] ( identifier[mesType] = literal[int] )
identifier[packet] = identifier[a] / identifier[b]
keyword[if] identifier[RepeatIndicator_presence] keyword[is] literal[int] :
identifier[c] = identifier[RepeatIndicatorHdr] ( identifier[ieiRI] = literal[int] , identifier[eightBitRI] = literal[int] )
identifier[packet] = identifier[packet] / identifier[c]
keyword[if] identifier[BearerCapability_presence] keyword[is] literal[int] :
identifier[d] = identifier[BearerCapabilityHdr] ( identifier[ieiBC] = literal[int] , identifier[eightBitBC] = literal[int] )
identifier[packet] = identifier[packet] / identifier[d]
keyword[if] identifier[BearerCapability_presence1] keyword[is] literal[int] :
identifier[e] = identifier[BearerCapabilityHdr] ( identifier[ieiBC] = literal[int] , identifier[eightBitBC] = literal[int] )
identifier[packet] = identifier[packet] / identifier[e]
keyword[if] identifier[Facility_presence] keyword[is] literal[int] :
identifier[f] = identifier[FacilityHdr] ( identifier[ieiF] = literal[int] , identifier[eightBitF] = literal[int] )
identifier[packet] = identifier[packet] / identifier[f]
keyword[if] identifier[ProgressIndicator_presence] keyword[is] literal[int] :
identifier[g] = identifier[ProgressIndicatorHdr] ( identifier[ieiPI] = literal[int] , identifier[eightBitPI] = literal[int] )
identifier[packet] = identifier[packet] / identifier[g]
keyword[if] identifier[PriorityLevel_presence] keyword[is] literal[int] :
identifier[h] = identifier[PriorityLevelHdr] ( identifier[ieiPL] = literal[int] , identifier[eightBitPL] = literal[int] )
identifier[packet] = identifier[packet] / identifier[h]
keyword[return] identifier[packet] | def callProceeding(RepeatIndicator_presence=0, BearerCapability_presence=0, BearerCapability_presence1=0, Facility_presence=0, ProgressIndicator_presence=0, PriorityLevel_presence=0):
"""CALL PROCEEDING Section 9.3.3"""
a = TpPd(pd=3)
b = MessageType(mesType=2) # 00000010
packet = a / b
if RepeatIndicator_presence is 1:
c = RepeatIndicatorHdr(ieiRI=13, eightBitRI=0)
packet = packet / c # depends on [control=['if'], data=[]]
if BearerCapability_presence is 1:
d = BearerCapabilityHdr(ieiBC=4, eightBitBC=0)
packet = packet / d # depends on [control=['if'], data=[]]
if BearerCapability_presence1 is 1:
e = BearerCapabilityHdr(ieiBC=4, eightBitBC=0)
packet = packet / e # depends on [control=['if'], data=[]]
if Facility_presence is 1:
f = FacilityHdr(ieiF=28, eightBitF=0)
packet = packet / f # depends on [control=['if'], data=[]]
if ProgressIndicator_presence is 1:
g = ProgressIndicatorHdr(ieiPI=30, eightBitPI=0)
packet = packet / g # depends on [control=['if'], data=[]]
if PriorityLevel_presence is 1:
h = PriorityLevelHdr(ieiPL=128, eightBitPL=0)
packet = packet / h # depends on [control=['if'], data=[]]
return packet |
def plot_sector_exposures_net(net_exposures, sector_dict=None, ax=None):
"""
Plots output of compute_sector_exposures as line graphs
Parameters
----------
net_exposures : arrays
Arrays of net sector exposures (output of compute_sector_exposures).
sector_dict : dict or OrderedDict
Dictionary of all sectors
- See full description in compute_sector_exposures
"""
if ax is None:
ax = plt.gca()
if sector_dict is None:
sector_names = SECTORS.values()
else:
sector_names = sector_dict.values()
color_list = plt.cm.gist_rainbow(np.linspace(0, 1, 11))
for i in range(len(net_exposures)):
ax.plot(net_exposures[i], color=color_list[i], alpha=0.8,
label=sector_names[i])
ax.set(title='Net exposures to sectors',
ylabel='Proportion of net exposure \n in sectors')
return ax | def function[plot_sector_exposures_net, parameter[net_exposures, sector_dict, ax]]:
constant[
Plots output of compute_sector_exposures as line graphs
Parameters
----------
net_exposures : arrays
Arrays of net sector exposures (output of compute_sector_exposures).
sector_dict : dict or OrderedDict
Dictionary of all sectors
- See full description in compute_sector_exposures
]
if compare[name[ax] is constant[None]] begin[:]
variable[ax] assign[=] call[name[plt].gca, parameter[]]
if compare[name[sector_dict] is constant[None]] begin[:]
variable[sector_names] assign[=] call[name[SECTORS].values, parameter[]]
variable[color_list] assign[=] call[name[plt].cm.gist_rainbow, parameter[call[name[np].linspace, parameter[constant[0], constant[1], constant[11]]]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[net_exposures]]]]]] begin[:]
call[name[ax].plot, parameter[call[name[net_exposures]][name[i]]]]
call[name[ax].set, parameter[]]
return[name[ax]] | keyword[def] identifier[plot_sector_exposures_net] ( identifier[net_exposures] , identifier[sector_dict] = keyword[None] , identifier[ax] = keyword[None] ):
literal[string]
keyword[if] identifier[ax] keyword[is] keyword[None] :
identifier[ax] = identifier[plt] . identifier[gca] ()
keyword[if] identifier[sector_dict] keyword[is] keyword[None] :
identifier[sector_names] = identifier[SECTORS] . identifier[values] ()
keyword[else] :
identifier[sector_names] = identifier[sector_dict] . identifier[values] ()
identifier[color_list] = identifier[plt] . identifier[cm] . identifier[gist_rainbow] ( identifier[np] . identifier[linspace] ( literal[int] , literal[int] , literal[int] ))
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[net_exposures] )):
identifier[ax] . identifier[plot] ( identifier[net_exposures] [ identifier[i] ], identifier[color] = identifier[color_list] [ identifier[i] ], identifier[alpha] = literal[int] ,
identifier[label] = identifier[sector_names] [ identifier[i] ])
identifier[ax] . identifier[set] ( identifier[title] = literal[string] ,
identifier[ylabel] = literal[string] )
keyword[return] identifier[ax] | def plot_sector_exposures_net(net_exposures, sector_dict=None, ax=None):
"""
Plots output of compute_sector_exposures as line graphs
Parameters
----------
net_exposures : arrays
Arrays of net sector exposures (output of compute_sector_exposures).
sector_dict : dict or OrderedDict
Dictionary of all sectors
- See full description in compute_sector_exposures
"""
if ax is None:
ax = plt.gca() # depends on [control=['if'], data=['ax']]
if sector_dict is None:
sector_names = SECTORS.values() # depends on [control=['if'], data=[]]
else:
sector_names = sector_dict.values()
color_list = plt.cm.gist_rainbow(np.linspace(0, 1, 11))
for i in range(len(net_exposures)):
ax.plot(net_exposures[i], color=color_list[i], alpha=0.8, label=sector_names[i]) # depends on [control=['for'], data=['i']]
ax.set(title='Net exposures to sectors', ylabel='Proportion of net exposure \n in sectors')
return ax |
def flat_map(self, flatmap_fn):
"""Applies a flatmap operator to the stream.
Attributes:
flatmap_fn (function): The user-defined logic of the flatmap
(e.g. split()).
"""
op = Operator(
_generate_uuid(),
OpType.FlatMap,
"FlatMap",
flatmap_fn,
num_instances=self.env.config.parallelism)
return self.__register(op) | def function[flat_map, parameter[self, flatmap_fn]]:
constant[Applies a flatmap operator to the stream.
Attributes:
flatmap_fn (function): The user-defined logic of the flatmap
(e.g. split()).
]
variable[op] assign[=] call[name[Operator], parameter[call[name[_generate_uuid], parameter[]], name[OpType].FlatMap, constant[FlatMap], name[flatmap_fn]]]
return[call[name[self].__register, parameter[name[op]]]] | keyword[def] identifier[flat_map] ( identifier[self] , identifier[flatmap_fn] ):
literal[string]
identifier[op] = identifier[Operator] (
identifier[_generate_uuid] (),
identifier[OpType] . identifier[FlatMap] ,
literal[string] ,
identifier[flatmap_fn] ,
identifier[num_instances] = identifier[self] . identifier[env] . identifier[config] . identifier[parallelism] )
keyword[return] identifier[self] . identifier[__register] ( identifier[op] ) | def flat_map(self, flatmap_fn):
"""Applies a flatmap operator to the stream.
Attributes:
flatmap_fn (function): The user-defined logic of the flatmap
(e.g. split()).
"""
op = Operator(_generate_uuid(), OpType.FlatMap, 'FlatMap', flatmap_fn, num_instances=self.env.config.parallelism)
return self.__register(op) |
def submit(self):
"""Submits a form."""
if self._w3c:
form = self.find_element(By.XPATH, "./ancestor-or-self::form")
self._parent.execute_script(
"var e = arguments[0].ownerDocument.createEvent('Event');"
"e.initEvent('submit', true, true);"
"if (arguments[0].dispatchEvent(e)) { arguments[0].submit() }", form)
else:
self._execute(Command.SUBMIT_ELEMENT) | def function[submit, parameter[self]]:
constant[Submits a form.]
if name[self]._w3c begin[:]
variable[form] assign[=] call[name[self].find_element, parameter[name[By].XPATH, constant[./ancestor-or-self::form]]]
call[name[self]._parent.execute_script, parameter[constant[var e = arguments[0].ownerDocument.createEvent('Event');e.initEvent('submit', true, true);if (arguments[0].dispatchEvent(e)) { arguments[0].submit() }], name[form]]] | keyword[def] identifier[submit] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_w3c] :
identifier[form] = identifier[self] . identifier[find_element] ( identifier[By] . identifier[XPATH] , literal[string] )
identifier[self] . identifier[_parent] . identifier[execute_script] (
literal[string]
literal[string]
literal[string] , identifier[form] )
keyword[else] :
identifier[self] . identifier[_execute] ( identifier[Command] . identifier[SUBMIT_ELEMENT] ) | def submit(self):
"""Submits a form."""
if self._w3c:
form = self.find_element(By.XPATH, './ancestor-or-self::form')
self._parent.execute_script("var e = arguments[0].ownerDocument.createEvent('Event');e.initEvent('submit', true, true);if (arguments[0].dispatchEvent(e)) { arguments[0].submit() }", form) # depends on [control=['if'], data=[]]
else:
self._execute(Command.SUBMIT_ELEMENT) |
def get_queryset(self, request):
"""Limit to TenantGroups that this user can access."""
qs = super(TenantGroupAdmin, self).get_queryset(request)
if not request.user.is_superuser:
qs = qs.filter(tenantrole__user=request.user,
tenantrole__role=TenantRole.ROLE_GROUP_MANAGER)
return qs | def function[get_queryset, parameter[self, request]]:
constant[Limit to TenantGroups that this user can access.]
variable[qs] assign[=] call[call[name[super], parameter[name[TenantGroupAdmin], name[self]]].get_queryset, parameter[name[request]]]
if <ast.UnaryOp object at 0x7da18fe90e20> begin[:]
variable[qs] assign[=] call[name[qs].filter, parameter[]]
return[name[qs]] | keyword[def] identifier[get_queryset] ( identifier[self] , identifier[request] ):
literal[string]
identifier[qs] = identifier[super] ( identifier[TenantGroupAdmin] , identifier[self] ). identifier[get_queryset] ( identifier[request] )
keyword[if] keyword[not] identifier[request] . identifier[user] . identifier[is_superuser] :
identifier[qs] = identifier[qs] . identifier[filter] ( identifier[tenantrole__user] = identifier[request] . identifier[user] ,
identifier[tenantrole__role] = identifier[TenantRole] . identifier[ROLE_GROUP_MANAGER] )
keyword[return] identifier[qs] | def get_queryset(self, request):
"""Limit to TenantGroups that this user can access."""
qs = super(TenantGroupAdmin, self).get_queryset(request)
if not request.user.is_superuser:
qs = qs.filter(tenantrole__user=request.user, tenantrole__role=TenantRole.ROLE_GROUP_MANAGER) # depends on [control=['if'], data=[]]
return qs |
def get_index(self, label):
"""
Find the index of the input ``label``.
Parameters
----------
labels : int
The label numbers to find.
Returns
-------
index : int
The array index.
Raises
------
ValueError
If ``label`` is invalid.
"""
self.check_labels(label)
return np.searchsorted(self.labels, label) | def function[get_index, parameter[self, label]]:
constant[
Find the index of the input ``label``.
Parameters
----------
labels : int
The label numbers to find.
Returns
-------
index : int
The array index.
Raises
------
ValueError
If ``label`` is invalid.
]
call[name[self].check_labels, parameter[name[label]]]
return[call[name[np].searchsorted, parameter[name[self].labels, name[label]]]] | keyword[def] identifier[get_index] ( identifier[self] , identifier[label] ):
literal[string]
identifier[self] . identifier[check_labels] ( identifier[label] )
keyword[return] identifier[np] . identifier[searchsorted] ( identifier[self] . identifier[labels] , identifier[label] ) | def get_index(self, label):
"""
Find the index of the input ``label``.
Parameters
----------
labels : int
The label numbers to find.
Returns
-------
index : int
The array index.
Raises
------
ValueError
If ``label`` is invalid.
"""
self.check_labels(label)
return np.searchsorted(self.labels, label) |
def get_unread_forums(self, user):
""" Returns the list of unread forums for the given user. """
return self.get_unread_forums_from_list(
user, self.perm_handler.get_readable_forums(Forum.objects.all(), user)) | def function[get_unread_forums, parameter[self, user]]:
constant[ Returns the list of unread forums for the given user. ]
return[call[name[self].get_unread_forums_from_list, parameter[name[user], call[name[self].perm_handler.get_readable_forums, parameter[call[name[Forum].objects.all, parameter[]], name[user]]]]]] | keyword[def] identifier[get_unread_forums] ( identifier[self] , identifier[user] ):
literal[string]
keyword[return] identifier[self] . identifier[get_unread_forums_from_list] (
identifier[user] , identifier[self] . identifier[perm_handler] . identifier[get_readable_forums] ( identifier[Forum] . identifier[objects] . identifier[all] (), identifier[user] )) | def get_unread_forums(self, user):
""" Returns the list of unread forums for the given user. """
return self.get_unread_forums_from_list(user, self.perm_handler.get_readable_forums(Forum.objects.all(), user)) |
def is_venv(directory, executable='python'):
"""
:param directory: base directory of python environment
"""
path=os.path.join(directory, 'bin', executable)
return os.path.isfile(path) | def function[is_venv, parameter[directory, executable]]:
constant[
:param directory: base directory of python environment
]
variable[path] assign[=] call[name[os].path.join, parameter[name[directory], constant[bin], name[executable]]]
return[call[name[os].path.isfile, parameter[name[path]]]] | keyword[def] identifier[is_venv] ( identifier[directory] , identifier[executable] = literal[string] ):
literal[string]
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[directory] , literal[string] , identifier[executable] )
keyword[return] identifier[os] . identifier[path] . identifier[isfile] ( identifier[path] ) | def is_venv(directory, executable='python'):
"""
:param directory: base directory of python environment
"""
path = os.path.join(directory, 'bin', executable)
return os.path.isfile(path) |
def copyTo(self, screen):
""" Creates a new point with the same offset on the target screen as this point has on the
current screen """
from .RegionMatching import Screen
if not isinstance(screen, Screen):
screen = RegionMatching.Screen(screen)
return screen.getTopLeft().offset(self.getScreen().getTopLeft().getOffset(self)) | def function[copyTo, parameter[self, screen]]:
constant[ Creates a new point with the same offset on the target screen as this point has on the
current screen ]
from relative_module[RegionMatching] import module[Screen]
if <ast.UnaryOp object at 0x7da1b138be50> begin[:]
variable[screen] assign[=] call[name[RegionMatching].Screen, parameter[name[screen]]]
return[call[call[name[screen].getTopLeft, parameter[]].offset, parameter[call[call[call[name[self].getScreen, parameter[]].getTopLeft, parameter[]].getOffset, parameter[name[self]]]]]] | keyword[def] identifier[copyTo] ( identifier[self] , identifier[screen] ):
literal[string]
keyword[from] . identifier[RegionMatching] keyword[import] identifier[Screen]
keyword[if] keyword[not] identifier[isinstance] ( identifier[screen] , identifier[Screen] ):
identifier[screen] = identifier[RegionMatching] . identifier[Screen] ( identifier[screen] )
keyword[return] identifier[screen] . identifier[getTopLeft] (). identifier[offset] ( identifier[self] . identifier[getScreen] (). identifier[getTopLeft] (). identifier[getOffset] ( identifier[self] )) | def copyTo(self, screen):
""" Creates a new point with the same offset on the target screen as this point has on the
current screen """
from .RegionMatching import Screen
if not isinstance(screen, Screen):
screen = RegionMatching.Screen(screen) # depends on [control=['if'], data=[]]
return screen.getTopLeft().offset(self.getScreen().getTopLeft().getOffset(self)) |
def _add_individual(self, ind_obj):
"""Add a individual to the adapter
Args:
ind_obj (puzzle.models.Individual)
"""
logger.debug("Adding individual {0} to plugin".format(ind_obj.ind_id))
self.individual_objs.append(ind_obj) | def function[_add_individual, parameter[self, ind_obj]]:
constant[Add a individual to the adapter
Args:
ind_obj (puzzle.models.Individual)
]
call[name[logger].debug, parameter[call[constant[Adding individual {0} to plugin].format, parameter[name[ind_obj].ind_id]]]]
call[name[self].individual_objs.append, parameter[name[ind_obj]]] | keyword[def] identifier[_add_individual] ( identifier[self] , identifier[ind_obj] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[ind_obj] . identifier[ind_id] ))
identifier[self] . identifier[individual_objs] . identifier[append] ( identifier[ind_obj] ) | def _add_individual(self, ind_obj):
"""Add a individual to the adapter
Args:
ind_obj (puzzle.models.Individual)
"""
logger.debug('Adding individual {0} to plugin'.format(ind_obj.ind_id))
self.individual_objs.append(ind_obj) |
def _extract_values(values_list):
"""extract values from either file or list
:param values_list: list or file name (str) with list of values
"""
values = []
# check if file or list of values to iterate
if isinstance(values_list, str):
with open(values_list) as ff:
reading = csv.reader(ff)
for j in reading:
values.append(j[0])
elif isinstance(values_list, list):
values = values_list
else:
raise Exception("input datatype not supported.")
return values | def function[_extract_values, parameter[values_list]]:
constant[extract values from either file or list
:param values_list: list or file name (str) with list of values
]
variable[values] assign[=] list[[]]
if call[name[isinstance], parameter[name[values_list], name[str]]] begin[:]
with call[name[open], parameter[name[values_list]]] begin[:]
variable[reading] assign[=] call[name[csv].reader, parameter[name[ff]]]
for taget[name[j]] in starred[name[reading]] begin[:]
call[name[values].append, parameter[call[name[j]][constant[0]]]]
return[name[values]] | keyword[def] identifier[_extract_values] ( identifier[values_list] ):
literal[string]
identifier[values] =[]
keyword[if] identifier[isinstance] ( identifier[values_list] , identifier[str] ):
keyword[with] identifier[open] ( identifier[values_list] ) keyword[as] identifier[ff] :
identifier[reading] = identifier[csv] . identifier[reader] ( identifier[ff] )
keyword[for] identifier[j] keyword[in] identifier[reading] :
identifier[values] . identifier[append] ( identifier[j] [ literal[int] ])
keyword[elif] identifier[isinstance] ( identifier[values_list] , identifier[list] ):
identifier[values] = identifier[values_list]
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[return] identifier[values] | def _extract_values(values_list):
"""extract values from either file or list
:param values_list: list or file name (str) with list of values
"""
values = []
# check if file or list of values to iterate
if isinstance(values_list, str):
with open(values_list) as ff:
reading = csv.reader(ff)
for j in reading:
values.append(j[0]) # depends on [control=['for'], data=['j']] # depends on [control=['with'], data=['ff']] # depends on [control=['if'], data=[]]
elif isinstance(values_list, list):
values = values_list # depends on [control=['if'], data=[]]
else:
raise Exception('input datatype not supported.')
return values |
def run(cls, version=None):
"""
Test runner method; is called by parent class defined in suite.py.
:param version: B2G version string to test against
:return: bool PASS/FAIL status
"""
try:
dumper = certdump()
versions = dumper.nssversion_via_marionette()
except Exception as e: # TODO: too broad exception
cls.log_status('FAIL', 'Failed to gather information from the device via Marionette: %s' % e)
return False
if version is None:
cls.log_status('FAIL', 'NSS version check requires a B2G version.\nReported component versions:\n%s' % (
'\n'.join(["%s: %s" % (k, versions[k]) for k in versions])))
return False
reported_version = versions['NSS_Version']
if version not in nssversion.b2g_version_to_hginfo:
cls.log_status('FAIL', 'No version comparison data for B2G %s.\nReported NSS component versions:\n%s' % (
version, '\n'.join(["%s: %s" % (k, versions[k]) for k in versions])))
return False
expected_version = nssversion.b2g_version_to_hginfo[version]['release_nss_version']
# Fail if reported version is a downgrade
if nssversion.first_older_than_second(reported_version, expected_version):
cls.log_status('FAIL', 'NSS downgrade detected. Expecting at least version %s.\n'
'Reported versions:\n%s' % (
expected_version, '\n'.join(["%s: %s" % (k, versions[k]) for k in versions])))
return False
# Pass if NSS version was upgraded.
if nssversion.first_older_than_second(expected_version, reported_version):
cls.log_status('PASS', 'NSS more recent than release version %s. Reported component versions:\n%s' % (
expected_version, '\n'.join(["%s: %s" % (k, versions[k]) for k in versions])))
return True
# Else device has reported the expected version.
cls.log_status('PASS', 'NSS version reported as expected. Reported component versions:\n%s' % (
'\n'.join(["%s: %s" % (k, versions[k]) for k in versions])))
return True | def function[run, parameter[cls, version]]:
constant[
Test runner method; is called by parent class defined in suite.py.
:param version: B2G version string to test against
:return: bool PASS/FAIL status
]
<ast.Try object at 0x7da2047eb700>
if compare[name[version] is constant[None]] begin[:]
call[name[cls].log_status, parameter[constant[FAIL], binary_operation[constant[NSS version check requires a B2G version.
Reported component versions:
%s] <ast.Mod object at 0x7da2590d6920> call[constant[
].join, parameter[<ast.ListComp object at 0x7da20e956170>]]]]]
return[constant[False]]
variable[reported_version] assign[=] call[name[versions]][constant[NSS_Version]]
if compare[name[version] <ast.NotIn object at 0x7da2590d7190> name[nssversion].b2g_version_to_hginfo] begin[:]
call[name[cls].log_status, parameter[constant[FAIL], binary_operation[constant[No version comparison data for B2G %s.
Reported NSS component versions:
%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c796260>, <ast.Call object at 0x7da20c795930>]]]]]
return[constant[False]]
variable[expected_version] assign[=] call[call[name[nssversion].b2g_version_to_hginfo][name[version]]][constant[release_nss_version]]
if call[name[nssversion].first_older_than_second, parameter[name[reported_version], name[expected_version]]] begin[:]
call[name[cls].log_status, parameter[constant[FAIL], binary_operation[constant[NSS downgrade detected. Expecting at least version %s.
Reported versions:
%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c795960>, <ast.Call object at 0x7da20c795270>]]]]]
return[constant[False]]
if call[name[nssversion].first_older_than_second, parameter[name[expected_version], name[reported_version]]] begin[:]
call[name[cls].log_status, parameter[constant[PASS], binary_operation[constant[NSS more recent than release version %s. Reported component versions:
%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c794790>, <ast.Call object at 0x7da20c796770>]]]]]
return[constant[True]]
call[name[cls].log_status, parameter[constant[PASS], binary_operation[constant[NSS version reported as expected. Reported component versions:
%s] <ast.Mod object at 0x7da2590d6920> call[constant[
].join, parameter[<ast.ListComp object at 0x7da20c795c30>]]]]]
return[constant[True]] | keyword[def] identifier[run] ( identifier[cls] , identifier[version] = keyword[None] ):
literal[string]
keyword[try] :
identifier[dumper] = identifier[certdump] ()
identifier[versions] = identifier[dumper] . identifier[nssversion_via_marionette] ()
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[cls] . identifier[log_status] ( literal[string] , literal[string] % identifier[e] )
keyword[return] keyword[False]
keyword[if] identifier[version] keyword[is] keyword[None] :
identifier[cls] . identifier[log_status] ( literal[string] , literal[string] %(
literal[string] . identifier[join] ([ literal[string] %( identifier[k] , identifier[versions] [ identifier[k] ]) keyword[for] identifier[k] keyword[in] identifier[versions] ])))
keyword[return] keyword[False]
identifier[reported_version] = identifier[versions] [ literal[string] ]
keyword[if] identifier[version] keyword[not] keyword[in] identifier[nssversion] . identifier[b2g_version_to_hginfo] :
identifier[cls] . identifier[log_status] ( literal[string] , literal[string] %(
identifier[version] , literal[string] . identifier[join] ([ literal[string] %( identifier[k] , identifier[versions] [ identifier[k] ]) keyword[for] identifier[k] keyword[in] identifier[versions] ])))
keyword[return] keyword[False]
identifier[expected_version] = identifier[nssversion] . identifier[b2g_version_to_hginfo] [ identifier[version] ][ literal[string] ]
keyword[if] identifier[nssversion] . identifier[first_older_than_second] ( identifier[reported_version] , identifier[expected_version] ):
identifier[cls] . identifier[log_status] ( literal[string] , literal[string]
literal[string] %(
identifier[expected_version] , literal[string] . identifier[join] ([ literal[string] %( identifier[k] , identifier[versions] [ identifier[k] ]) keyword[for] identifier[k] keyword[in] identifier[versions] ])))
keyword[return] keyword[False]
keyword[if] identifier[nssversion] . identifier[first_older_than_second] ( identifier[expected_version] , identifier[reported_version] ):
identifier[cls] . identifier[log_status] ( literal[string] , literal[string] %(
identifier[expected_version] , literal[string] . identifier[join] ([ literal[string] %( identifier[k] , identifier[versions] [ identifier[k] ]) keyword[for] identifier[k] keyword[in] identifier[versions] ])))
keyword[return] keyword[True]
identifier[cls] . identifier[log_status] ( literal[string] , literal[string] %(
literal[string] . identifier[join] ([ literal[string] %( identifier[k] , identifier[versions] [ identifier[k] ]) keyword[for] identifier[k] keyword[in] identifier[versions] ])))
keyword[return] keyword[True] | def run(cls, version=None):
"""
Test runner method; is called by parent class defined in suite.py.
:param version: B2G version string to test against
:return: bool PASS/FAIL status
"""
try:
dumper = certdump()
versions = dumper.nssversion_via_marionette() # depends on [control=['try'], data=[]]
except Exception as e: # TODO: too broad exception
cls.log_status('FAIL', 'Failed to gather information from the device via Marionette: %s' % e)
return False # depends on [control=['except'], data=['e']]
if version is None:
cls.log_status('FAIL', 'NSS version check requires a B2G version.\nReported component versions:\n%s' % '\n'.join(['%s: %s' % (k, versions[k]) for k in versions]))
return False # depends on [control=['if'], data=[]]
reported_version = versions['NSS_Version']
if version not in nssversion.b2g_version_to_hginfo:
cls.log_status('FAIL', 'No version comparison data for B2G %s.\nReported NSS component versions:\n%s' % (version, '\n'.join(['%s: %s' % (k, versions[k]) for k in versions])))
return False # depends on [control=['if'], data=['version']]
expected_version = nssversion.b2g_version_to_hginfo[version]['release_nss_version']
# Fail if reported version is a downgrade
if nssversion.first_older_than_second(reported_version, expected_version):
cls.log_status('FAIL', 'NSS downgrade detected. Expecting at least version %s.\nReported versions:\n%s' % (expected_version, '\n'.join(['%s: %s' % (k, versions[k]) for k in versions])))
return False # depends on [control=['if'], data=[]]
# Pass if NSS version was upgraded.
if nssversion.first_older_than_second(expected_version, reported_version):
cls.log_status('PASS', 'NSS more recent than release version %s. Reported component versions:\n%s' % (expected_version, '\n'.join(['%s: %s' % (k, versions[k]) for k in versions])))
return True # depends on [control=['if'], data=[]]
# Else device has reported the expected version.
cls.log_status('PASS', 'NSS version reported as expected. Reported component versions:\n%s' % '\n'.join(['%s: %s' % (k, versions[k]) for k in versions]))
return True |
def facade(factory):
"""Declare a method as a facade factory."""
wrapper = FacadeDescriptor(factory.__name__, factory)
return update_wrapper(wrapper, factory) | def function[facade, parameter[factory]]:
constant[Declare a method as a facade factory.]
variable[wrapper] assign[=] call[name[FacadeDescriptor], parameter[name[factory].__name__, name[factory]]]
return[call[name[update_wrapper], parameter[name[wrapper], name[factory]]]] | keyword[def] identifier[facade] ( identifier[factory] ):
literal[string]
identifier[wrapper] = identifier[FacadeDescriptor] ( identifier[factory] . identifier[__name__] , identifier[factory] )
keyword[return] identifier[update_wrapper] ( identifier[wrapper] , identifier[factory] ) | def facade(factory):
"""Declare a method as a facade factory."""
wrapper = FacadeDescriptor(factory.__name__, factory)
return update_wrapper(wrapper, factory) |
def periodic(period=60.0, file=sys.stderr):
"""Start a daemon thread which will periodically print GC stats
:param period: Update period in seconds
:param file: A writable file-like object
"""
import threading
import time
S = _StatsThread(period=period, file=file)
T = threading.Thread(target=S)
T.daemon = True
T.start() | def function[periodic, parameter[period, file]]:
constant[Start a daemon thread which will periodically print GC stats
:param period: Update period in seconds
:param file: A writable file-like object
]
import module[threading]
import module[time]
variable[S] assign[=] call[name[_StatsThread], parameter[]]
variable[T] assign[=] call[name[threading].Thread, parameter[]]
name[T].daemon assign[=] constant[True]
call[name[T].start, parameter[]] | keyword[def] identifier[periodic] ( identifier[period] = literal[int] , identifier[file] = identifier[sys] . identifier[stderr] ):
literal[string]
keyword[import] identifier[threading]
keyword[import] identifier[time]
identifier[S] = identifier[_StatsThread] ( identifier[period] = identifier[period] , identifier[file] = identifier[file] )
identifier[T] = identifier[threading] . identifier[Thread] ( identifier[target] = identifier[S] )
identifier[T] . identifier[daemon] = keyword[True]
identifier[T] . identifier[start] () | def periodic(period=60.0, file=sys.stderr):
"""Start a daemon thread which will periodically print GC stats
:param period: Update period in seconds
:param file: A writable file-like object
"""
import threading
import time
S = _StatsThread(period=period, file=file)
T = threading.Thread(target=S)
T.daemon = True
T.start() |
def _determine_dimensions(num_of_pixels):
"""
Given a number of pixels, determines the largest width and height that define a
rectangle with such an area
"""
for x in xrange(int(math.sqrt(num_of_pixels)) + 1, 1, -1):
if num_of_pixels % x == 0:
return num_of_pixels // x, x
return 1, num_of_pixels | def function[_determine_dimensions, parameter[num_of_pixels]]:
constant[
Given a number of pixels, determines the largest width and height that define a
rectangle with such an area
]
for taget[name[x]] in starred[call[name[xrange], parameter[binary_operation[call[name[int], parameter[call[name[math].sqrt, parameter[name[num_of_pixels]]]]] + constant[1]], constant[1], <ast.UnaryOp object at 0x7da18dc99a20>]]] begin[:]
if compare[binary_operation[name[num_of_pixels] <ast.Mod object at 0x7da2590d6920> name[x]] equal[==] constant[0]] begin[:]
return[tuple[[<ast.BinOp object at 0x7da18dc9ad10>, <ast.Name object at 0x7da2054a47f0>]]]
return[tuple[[<ast.Constant object at 0x7da2054a4d30>, <ast.Name object at 0x7da2054a76d0>]]] | keyword[def] identifier[_determine_dimensions] ( identifier[num_of_pixels] ):
literal[string]
keyword[for] identifier[x] keyword[in] identifier[xrange] ( identifier[int] ( identifier[math] . identifier[sqrt] ( identifier[num_of_pixels] ))+ literal[int] , literal[int] ,- literal[int] ):
keyword[if] identifier[num_of_pixels] % identifier[x] == literal[int] :
keyword[return] identifier[num_of_pixels] // identifier[x] , identifier[x]
keyword[return] literal[int] , identifier[num_of_pixels] | def _determine_dimensions(num_of_pixels):
"""
Given a number of pixels, determines the largest width and height that define a
rectangle with such an area
"""
for x in xrange(int(math.sqrt(num_of_pixels)) + 1, 1, -1):
if num_of_pixels % x == 0:
return (num_of_pixels // x, x) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']]
return (1, num_of_pixels) |
def array(self):
"""
return the underlying numpy array
"""
return np.geomspace(self.start, self.stop, self.num, self.endpoint) | def function[array, parameter[self]]:
constant[
return the underlying numpy array
]
return[call[name[np].geomspace, parameter[name[self].start, name[self].stop, name[self].num, name[self].endpoint]]] | keyword[def] identifier[array] ( identifier[self] ):
literal[string]
keyword[return] identifier[np] . identifier[geomspace] ( identifier[self] . identifier[start] , identifier[self] . identifier[stop] , identifier[self] . identifier[num] , identifier[self] . identifier[endpoint] ) | def array(self):
"""
return the underlying numpy array
"""
return np.geomspace(self.start, self.stop, self.num, self.endpoint) |
def data(self):
"""Metric data
Args:
value (:obj:`bool` or :obj:`int` or :obj:`long` or :obj:`float`
or :obj:`basestring` or :obj:`bytes`)
Returns:
value
Raises:
:obj:`TypeError`
"""
if self._data_type == int:
if self._pb.HasField("int64_data"):
return self._pb.int64_data
if self._pb.HasField("int32_data"):
return self._pb.int32_data
if self._pb.HasField("uint64_data"):
return self._pb.uint64_data
if self._pb.HasField("uint32_data"):
return self._pb.uint32_data
elif self._data_type == float:
if self._pb.HasField("float32_data"):
return self._pb.float32_data
if self._pb.HasField("float64_data"):
return self._pb.float64_data
elif self._data_type == str:
return self._pb.string_data
elif self._data_type == bool:
return self._pb.bool_data
elif self._data_type == bytes:
return self._pb.bytes_data
return None | def function[data, parameter[self]]:
constant[Metric data
Args:
value (:obj:`bool` or :obj:`int` or :obj:`long` or :obj:`float`
or :obj:`basestring` or :obj:`bytes`)
Returns:
value
Raises:
:obj:`TypeError`
]
if compare[name[self]._data_type equal[==] name[int]] begin[:]
if call[name[self]._pb.HasField, parameter[constant[int64_data]]] begin[:]
return[name[self]._pb.int64_data]
if call[name[self]._pb.HasField, parameter[constant[int32_data]]] begin[:]
return[name[self]._pb.int32_data]
if call[name[self]._pb.HasField, parameter[constant[uint64_data]]] begin[:]
return[name[self]._pb.uint64_data]
if call[name[self]._pb.HasField, parameter[constant[uint32_data]]] begin[:]
return[name[self]._pb.uint32_data]
return[constant[None]] | keyword[def] identifier[data] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_data_type] == identifier[int] :
keyword[if] identifier[self] . identifier[_pb] . identifier[HasField] ( literal[string] ):
keyword[return] identifier[self] . identifier[_pb] . identifier[int64_data]
keyword[if] identifier[self] . identifier[_pb] . identifier[HasField] ( literal[string] ):
keyword[return] identifier[self] . identifier[_pb] . identifier[int32_data]
keyword[if] identifier[self] . identifier[_pb] . identifier[HasField] ( literal[string] ):
keyword[return] identifier[self] . identifier[_pb] . identifier[uint64_data]
keyword[if] identifier[self] . identifier[_pb] . identifier[HasField] ( literal[string] ):
keyword[return] identifier[self] . identifier[_pb] . identifier[uint32_data]
keyword[elif] identifier[self] . identifier[_data_type] == identifier[float] :
keyword[if] identifier[self] . identifier[_pb] . identifier[HasField] ( literal[string] ):
keyword[return] identifier[self] . identifier[_pb] . identifier[float32_data]
keyword[if] identifier[self] . identifier[_pb] . identifier[HasField] ( literal[string] ):
keyword[return] identifier[self] . identifier[_pb] . identifier[float64_data]
keyword[elif] identifier[self] . identifier[_data_type] == identifier[str] :
keyword[return] identifier[self] . identifier[_pb] . identifier[string_data]
keyword[elif] identifier[self] . identifier[_data_type] == identifier[bool] :
keyword[return] identifier[self] . identifier[_pb] . identifier[bool_data]
keyword[elif] identifier[self] . identifier[_data_type] == identifier[bytes] :
keyword[return] identifier[self] . identifier[_pb] . identifier[bytes_data]
keyword[return] keyword[None] | def data(self):
"""Metric data
Args:
value (:obj:`bool` or :obj:`int` or :obj:`long` or :obj:`float`
or :obj:`basestring` or :obj:`bytes`)
Returns:
value
Raises:
:obj:`TypeError`
"""
if self._data_type == int:
if self._pb.HasField('int64_data'):
return self._pb.int64_data # depends on [control=['if'], data=[]]
if self._pb.HasField('int32_data'):
return self._pb.int32_data # depends on [control=['if'], data=[]]
if self._pb.HasField('uint64_data'):
return self._pb.uint64_data # depends on [control=['if'], data=[]]
if self._pb.HasField('uint32_data'):
return self._pb.uint32_data # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif self._data_type == float:
if self._pb.HasField('float32_data'):
return self._pb.float32_data # depends on [control=['if'], data=[]]
if self._pb.HasField('float64_data'):
return self._pb.float64_data # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif self._data_type == str:
return self._pb.string_data # depends on [control=['if'], data=[]]
elif self._data_type == bool:
return self._pb.bool_data # depends on [control=['if'], data=[]]
elif self._data_type == bytes:
return self._pb.bytes_data # depends on [control=['if'], data=[]]
return None |
def calculate_tx_fee( tx_hex, fee_per_byte ):
"""
High-level API call (meant to be blockchain-agnostic)
What is the fee for the transaction?
"""
txobj = btc_tx_deserialize(tx_hex)
tx_num_bytes = len(tx_hex) / 2
num_virtual_bytes = None
if btc_tx_is_segwit(tx_hex):
# segwit--discount witness data
witness_len = 0
for inp in txobj['ins']:
witness_len += len(inp['witness_script']) / 2
# see https://bitcoincore.org/en/segwit_wallet_dev/#transaction-fee-estimation
tx_num_bytes_original = tx_num_bytes - witness_len
num_virtual_bytes = 3 * tx_num_bytes_original + tx_num_bytes
else:
# non-segwit
num_virtual_bytes = tx_num_bytes * 4
return (fee_per_byte * num_virtual_bytes) / 4 | def function[calculate_tx_fee, parameter[tx_hex, fee_per_byte]]:
constant[
High-level API call (meant to be blockchain-agnostic)
What is the fee for the transaction?
]
variable[txobj] assign[=] call[name[btc_tx_deserialize], parameter[name[tx_hex]]]
variable[tx_num_bytes] assign[=] binary_operation[call[name[len], parameter[name[tx_hex]]] / constant[2]]
variable[num_virtual_bytes] assign[=] constant[None]
if call[name[btc_tx_is_segwit], parameter[name[tx_hex]]] begin[:]
variable[witness_len] assign[=] constant[0]
for taget[name[inp]] in starred[call[name[txobj]][constant[ins]]] begin[:]
<ast.AugAssign object at 0x7da18f58dcc0>
variable[tx_num_bytes_original] assign[=] binary_operation[name[tx_num_bytes] - name[witness_len]]
variable[num_virtual_bytes] assign[=] binary_operation[binary_operation[constant[3] * name[tx_num_bytes_original]] + name[tx_num_bytes]]
return[binary_operation[binary_operation[name[fee_per_byte] * name[num_virtual_bytes]] / constant[4]]] | keyword[def] identifier[calculate_tx_fee] ( identifier[tx_hex] , identifier[fee_per_byte] ):
literal[string]
identifier[txobj] = identifier[btc_tx_deserialize] ( identifier[tx_hex] )
identifier[tx_num_bytes] = identifier[len] ( identifier[tx_hex] )/ literal[int]
identifier[num_virtual_bytes] = keyword[None]
keyword[if] identifier[btc_tx_is_segwit] ( identifier[tx_hex] ):
identifier[witness_len] = literal[int]
keyword[for] identifier[inp] keyword[in] identifier[txobj] [ literal[string] ]:
identifier[witness_len] += identifier[len] ( identifier[inp] [ literal[string] ])/ literal[int]
identifier[tx_num_bytes_original] = identifier[tx_num_bytes] - identifier[witness_len]
identifier[num_virtual_bytes] = literal[int] * identifier[tx_num_bytes_original] + identifier[tx_num_bytes]
keyword[else] :
identifier[num_virtual_bytes] = identifier[tx_num_bytes] * literal[int]
keyword[return] ( identifier[fee_per_byte] * identifier[num_virtual_bytes] )/ literal[int] | def calculate_tx_fee(tx_hex, fee_per_byte):
"""
High-level API call (meant to be blockchain-agnostic)
What is the fee for the transaction?
"""
txobj = btc_tx_deserialize(tx_hex)
tx_num_bytes = len(tx_hex) / 2
num_virtual_bytes = None
if btc_tx_is_segwit(tx_hex): # segwit--discount witness data
witness_len = 0
for inp in txobj['ins']:
witness_len += len(inp['witness_script']) / 2 # depends on [control=['for'], data=['inp']]
# see https://bitcoincore.org/en/segwit_wallet_dev/#transaction-fee-estimation
tx_num_bytes_original = tx_num_bytes - witness_len
num_virtual_bytes = 3 * tx_num_bytes_original + tx_num_bytes # depends on [control=['if'], data=[]]
else: # non-segwit
num_virtual_bytes = tx_num_bytes * 4
return fee_per_byte * num_virtual_bytes / 4 |
def update_visited(self):
"""
Updates exploration map visited status
"""
assert isinstance(self.player.cshape.center, eu.Vector2)
pos = self.player.cshape.center
# Helper function
def set_visited(layer, cell):
if cell and not cell.properties.get('visited') and cell.tile and cell.tile.id > 0:
cell.properties['visited'] = True
self.reward_explore()
# TODO: Decouple into view rendering
# Change colour of visited cells
key = layer.get_key_at_pixel(cell.x, cell.y)
#layer.set_cell_color(key[0], key[1], [155,155,155])
layer.set_cell_opacity(key[0], key[1], 255*0.8)
# End Helper
# Get the current tile under player
current = self.visit_layer.get_at_pixel(pos.x, pos.y)
if current:
# In spawn square
if current == self.visit_layer.get_at_pixel(self.spawn.x, self.spawn.y):
self.reward_goal()
# Only record/reward exploration when battery is above 50%
#if self.player.stats['battery'] > 50:
set_visited(self.visit_layer, current)
neighbours = self.visit_layer.get_neighbors(current)
for cell in neighbours:
neighbour = neighbours[cell]
set_visited(self.visit_layer, neighbour) | def function[update_visited, parameter[self]]:
constant[
Updates exploration map visited status
]
assert[call[name[isinstance], parameter[name[self].player.cshape.center, name[eu].Vector2]]]
variable[pos] assign[=] name[self].player.cshape.center
def function[set_visited, parameter[layer, cell]]:
if <ast.BoolOp object at 0x7da1b25241f0> begin[:]
call[name[cell].properties][constant[visited]] assign[=] constant[True]
call[name[self].reward_explore, parameter[]]
variable[key] assign[=] call[name[layer].get_key_at_pixel, parameter[name[cell].x, name[cell].y]]
call[name[layer].set_cell_opacity, parameter[call[name[key]][constant[0]], call[name[key]][constant[1]], binary_operation[constant[255] * constant[0.8]]]]
variable[current] assign[=] call[name[self].visit_layer.get_at_pixel, parameter[name[pos].x, name[pos].y]]
if name[current] begin[:]
if compare[name[current] equal[==] call[name[self].visit_layer.get_at_pixel, parameter[name[self].spawn.x, name[self].spawn.y]]] begin[:]
call[name[self].reward_goal, parameter[]]
call[name[set_visited], parameter[name[self].visit_layer, name[current]]]
variable[neighbours] assign[=] call[name[self].visit_layer.get_neighbors, parameter[name[current]]]
for taget[name[cell]] in starred[name[neighbours]] begin[:]
variable[neighbour] assign[=] call[name[neighbours]][name[cell]]
call[name[set_visited], parameter[name[self].visit_layer, name[neighbour]]] | keyword[def] identifier[update_visited] ( identifier[self] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[self] . identifier[player] . identifier[cshape] . identifier[center] , identifier[eu] . identifier[Vector2] )
identifier[pos] = identifier[self] . identifier[player] . identifier[cshape] . identifier[center]
keyword[def] identifier[set_visited] ( identifier[layer] , identifier[cell] ):
keyword[if] identifier[cell] keyword[and] keyword[not] identifier[cell] . identifier[properties] . identifier[get] ( literal[string] ) keyword[and] identifier[cell] . identifier[tile] keyword[and] identifier[cell] . identifier[tile] . identifier[id] > literal[int] :
identifier[cell] . identifier[properties] [ literal[string] ]= keyword[True]
identifier[self] . identifier[reward_explore] ()
identifier[key] = identifier[layer] . identifier[get_key_at_pixel] ( identifier[cell] . identifier[x] , identifier[cell] . identifier[y] )
identifier[layer] . identifier[set_cell_opacity] ( identifier[key] [ literal[int] ], identifier[key] [ literal[int] ], literal[int] * literal[int] )
identifier[current] = identifier[self] . identifier[visit_layer] . identifier[get_at_pixel] ( identifier[pos] . identifier[x] , identifier[pos] . identifier[y] )
keyword[if] identifier[current] :
keyword[if] identifier[current] == identifier[self] . identifier[visit_layer] . identifier[get_at_pixel] ( identifier[self] . identifier[spawn] . identifier[x] , identifier[self] . identifier[spawn] . identifier[y] ):
identifier[self] . identifier[reward_goal] ()
identifier[set_visited] ( identifier[self] . identifier[visit_layer] , identifier[current] )
identifier[neighbours] = identifier[self] . identifier[visit_layer] . identifier[get_neighbors] ( identifier[current] )
keyword[for] identifier[cell] keyword[in] identifier[neighbours] :
identifier[neighbour] = identifier[neighbours] [ identifier[cell] ]
identifier[set_visited] ( identifier[self] . identifier[visit_layer] , identifier[neighbour] ) | def update_visited(self):
"""
Updates exploration map visited status
"""
assert isinstance(self.player.cshape.center, eu.Vector2)
pos = self.player.cshape.center
# Helper function
def set_visited(layer, cell):
if cell and (not cell.properties.get('visited')) and cell.tile and (cell.tile.id > 0):
cell.properties['visited'] = True
self.reward_explore()
# TODO: Decouple into view rendering
# Change colour of visited cells
key = layer.get_key_at_pixel(cell.x, cell.y)
#layer.set_cell_color(key[0], key[1], [155,155,155])
layer.set_cell_opacity(key[0], key[1], 255 * 0.8) # depends on [control=['if'], data=[]]
# End Helper
# Get the current tile under player
current = self.visit_layer.get_at_pixel(pos.x, pos.y)
if current:
# In spawn square
if current == self.visit_layer.get_at_pixel(self.spawn.x, self.spawn.y):
self.reward_goal() # depends on [control=['if'], data=[]]
# Only record/reward exploration when battery is above 50%
#if self.player.stats['battery'] > 50:
set_visited(self.visit_layer, current)
neighbours = self.visit_layer.get_neighbors(current)
for cell in neighbours:
neighbour = neighbours[cell]
set_visited(self.visit_layer, neighbour) # depends on [control=['for'], data=['cell']] # depends on [control=['if'], data=[]] |
def check_release_file_exists():
"""Check if the release.yaml file exists"""
release_file = get_heron_release_file()
# if the file does not exist and is not a file
if not os.path.isfile(release_file):
Log.error("Required file not found: %s" % release_file)
return False
return True | def function[check_release_file_exists, parameter[]]:
constant[Check if the release.yaml file exists]
variable[release_file] assign[=] call[name[get_heron_release_file], parameter[]]
if <ast.UnaryOp object at 0x7da20c993f70> begin[:]
call[name[Log].error, parameter[binary_operation[constant[Required file not found: %s] <ast.Mod object at 0x7da2590d6920> name[release_file]]]]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[check_release_file_exists] ():
literal[string]
identifier[release_file] = identifier[get_heron_release_file] ()
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[release_file] ):
identifier[Log] . identifier[error] ( literal[string] % identifier[release_file] )
keyword[return] keyword[False]
keyword[return] keyword[True] | def check_release_file_exists():
"""Check if the release.yaml file exists"""
release_file = get_heron_release_file()
# if the file does not exist and is not a file
if not os.path.isfile(release_file):
Log.error('Required file not found: %s' % release_file)
return False # depends on [control=['if'], data=[]]
return True |
def inquire_property(name, doc=None):
"""Creates a property based on an inquire result
This method creates a property that calls the
:python:`_inquire` method, and return the value of the
requested information.
Args:
name (str): the name of the 'inquire' result information
Returns:
property: the created property
"""
def inquire_property(self):
if not self._started:
msg = ("Cannot read {0} from a security context whose "
"establishment has not yet been started.")
raise AttributeError(msg)
return getattr(self._inquire(**{name: True}), name)
return property(inquire_property, doc=doc) | def function[inquire_property, parameter[name, doc]]:
constant[Creates a property based on an inquire result
This method creates a property that calls the
:python:`_inquire` method, and return the value of the
requested information.
Args:
name (str): the name of the 'inquire' result information
Returns:
property: the created property
]
def function[inquire_property, parameter[self]]:
if <ast.UnaryOp object at 0x7da20c796080> begin[:]
variable[msg] assign[=] constant[Cannot read {0} from a security context whose establishment has not yet been started.]
<ast.Raise object at 0x7da20c796650>
return[call[name[getattr], parameter[call[name[self]._inquire, parameter[]], name[name]]]]
return[call[name[property], parameter[name[inquire_property]]]] | keyword[def] identifier[inquire_property] ( identifier[name] , identifier[doc] = keyword[None] ):
literal[string]
keyword[def] identifier[inquire_property] ( identifier[self] ):
keyword[if] keyword[not] identifier[self] . identifier[_started] :
identifier[msg] =( literal[string]
literal[string] )
keyword[raise] identifier[AttributeError] ( identifier[msg] )
keyword[return] identifier[getattr] ( identifier[self] . identifier[_inquire] (**{ identifier[name] : keyword[True] }), identifier[name] )
keyword[return] identifier[property] ( identifier[inquire_property] , identifier[doc] = identifier[doc] ) | def inquire_property(name, doc=None):
"""Creates a property based on an inquire result
This method creates a property that calls the
:python:`_inquire` method, and return the value of the
requested information.
Args:
name (str): the name of the 'inquire' result information
Returns:
property: the created property
"""
def inquire_property(self):
if not self._started:
msg = 'Cannot read {0} from a security context whose establishment has not yet been started.'
raise AttributeError(msg) # depends on [control=['if'], data=[]]
return getattr(self._inquire(**{name: True}), name)
return property(inquire_property, doc=doc) |
def rdf(coords_a, coords_b, binsize=0.002,
cutoff=1.5, periodic=None, normalize=True):
"""Calculate the radial distribution function of *coords_a* against
*coords_b*.
**Parameters**
- coords_a: np.ndarray((3, NA))
first set of coordinates
- coords_b: np.ndarray((3, NB))
coordinates to calculate the RDF against
- periodic: np.ndarray((3, 3)) or None
Wether or not include periodic images in the calculation
- normalize: True or False
gromacs-like normalization
- cutoff:
where to cutoff the RDF
"""
period = periodic[0, 0], periodic[1,1], periodic[2,2]
distances = distances_within(coords_a, coords_b, cutoff,
np.array(period, dtype=np.double))
n_a = len(coords_a)
n_b = len(coords_b)
volume = periodic[0, 0] * periodic[1, 1] * periodic[2, 2]
int_distances = np.rint(distances/binsize).astype(int)
hist = np.bincount(int_distances)
bin_edges = np.arange(len(hist)+1) * binsize
if normalize:
dr = binsize
normfac = volume/(n_a*n_b)
# Normalize this by a sphere shell
for i, r in enumerate(bin_edges[1:]):
hist[i] /= ((4.0/3.0 * np.pi * (r + 0.5*dr)**3)
- (4.0/3.0 * np.pi * (r- 0.5*dr)**3))
# Normalize by density
hist = hist * normfac
# Cutting up to rmax value
width = cutoff/binsize + 1
return bin_edges[0:width], hist[0:width] | def function[rdf, parameter[coords_a, coords_b, binsize, cutoff, periodic, normalize]]:
constant[Calculate the radial distribution function of *coords_a* against
*coords_b*.
**Parameters**
- coords_a: np.ndarray((3, NA))
first set of coordinates
- coords_b: np.ndarray((3, NB))
coordinates to calculate the RDF against
- periodic: np.ndarray((3, 3)) or None
Wether or not include periodic images in the calculation
- normalize: True or False
gromacs-like normalization
- cutoff:
where to cutoff the RDF
]
variable[period] assign[=] tuple[[<ast.Subscript object at 0x7da2054a63b0>, <ast.Subscript object at 0x7da2054a4430>, <ast.Subscript object at 0x7da2054a57e0>]]
variable[distances] assign[=] call[name[distances_within], parameter[name[coords_a], name[coords_b], name[cutoff], call[name[np].array, parameter[name[period]]]]]
variable[n_a] assign[=] call[name[len], parameter[name[coords_a]]]
variable[n_b] assign[=] call[name[len], parameter[name[coords_b]]]
variable[volume] assign[=] binary_operation[binary_operation[call[name[periodic]][tuple[[<ast.Constant object at 0x7da2054a4070>, <ast.Constant object at 0x7da2054a57b0>]]] * call[name[periodic]][tuple[[<ast.Constant object at 0x7da2054a4cd0>, <ast.Constant object at 0x7da2054a6bf0>]]]] * call[name[periodic]][tuple[[<ast.Constant object at 0x7da2054a73a0>, <ast.Constant object at 0x7da2054a51e0>]]]]
variable[int_distances] assign[=] call[call[name[np].rint, parameter[binary_operation[name[distances] / name[binsize]]]].astype, parameter[name[int]]]
variable[hist] assign[=] call[name[np].bincount, parameter[name[int_distances]]]
variable[bin_edges] assign[=] binary_operation[call[name[np].arange, parameter[binary_operation[call[name[len], parameter[name[hist]]] + constant[1]]]] * name[binsize]]
if name[normalize] begin[:]
variable[dr] assign[=] name[binsize]
variable[normfac] assign[=] binary_operation[name[volume] / binary_operation[name[n_a] * name[n_b]]]
for taget[tuple[[<ast.Name object at 0x7da2054a5690>, <ast.Name object at 0x7da2054a44c0>]]] in starred[call[name[enumerate], parameter[call[name[bin_edges]][<ast.Slice object at 0x7da2054a6770>]]]] begin[:]
<ast.AugAssign object at 0x7da2054a6350>
variable[hist] assign[=] binary_operation[name[hist] * name[normfac]]
variable[width] assign[=] binary_operation[binary_operation[name[cutoff] / name[binsize]] + constant[1]]
return[tuple[[<ast.Subscript object at 0x7da18f810cd0>, <ast.Subscript object at 0x7da18f812410>]]] | keyword[def] identifier[rdf] ( identifier[coords_a] , identifier[coords_b] , identifier[binsize] = literal[int] ,
identifier[cutoff] = literal[int] , identifier[periodic] = keyword[None] , identifier[normalize] = keyword[True] ):
literal[string]
identifier[period] = identifier[periodic] [ literal[int] , literal[int] ], identifier[periodic] [ literal[int] , literal[int] ], identifier[periodic] [ literal[int] , literal[int] ]
identifier[distances] = identifier[distances_within] ( identifier[coords_a] , identifier[coords_b] , identifier[cutoff] ,
identifier[np] . identifier[array] ( identifier[period] , identifier[dtype] = identifier[np] . identifier[double] ))
identifier[n_a] = identifier[len] ( identifier[coords_a] )
identifier[n_b] = identifier[len] ( identifier[coords_b] )
identifier[volume] = identifier[periodic] [ literal[int] , literal[int] ]* identifier[periodic] [ literal[int] , literal[int] ]* identifier[periodic] [ literal[int] , literal[int] ]
identifier[int_distances] = identifier[np] . identifier[rint] ( identifier[distances] / identifier[binsize] ). identifier[astype] ( identifier[int] )
identifier[hist] = identifier[np] . identifier[bincount] ( identifier[int_distances] )
identifier[bin_edges] = identifier[np] . identifier[arange] ( identifier[len] ( identifier[hist] )+ literal[int] )* identifier[binsize]
keyword[if] identifier[normalize] :
identifier[dr] = identifier[binsize]
identifier[normfac] = identifier[volume] /( identifier[n_a] * identifier[n_b] )
keyword[for] identifier[i] , identifier[r] keyword[in] identifier[enumerate] ( identifier[bin_edges] [ literal[int] :]):
identifier[hist] [ identifier[i] ]/=(( literal[int] / literal[int] * identifier[np] . identifier[pi] *( identifier[r] + literal[int] * identifier[dr] )** literal[int] )
-( literal[int] / literal[int] * identifier[np] . identifier[pi] *( identifier[r] - literal[int] * identifier[dr] )** literal[int] ))
identifier[hist] = identifier[hist] * identifier[normfac]
identifier[width] = identifier[cutoff] / identifier[binsize] + literal[int]
keyword[return] identifier[bin_edges] [ literal[int] : identifier[width] ], identifier[hist] [ literal[int] : identifier[width] ] | def rdf(coords_a, coords_b, binsize=0.002, cutoff=1.5, periodic=None, normalize=True):
"""Calculate the radial distribution function of *coords_a* against
*coords_b*.
**Parameters**
- coords_a: np.ndarray((3, NA))
first set of coordinates
- coords_b: np.ndarray((3, NB))
coordinates to calculate the RDF against
- periodic: np.ndarray((3, 3)) or None
Wether or not include periodic images in the calculation
- normalize: True or False
gromacs-like normalization
- cutoff:
where to cutoff the RDF
"""
period = (periodic[0, 0], periodic[1, 1], periodic[2, 2])
distances = distances_within(coords_a, coords_b, cutoff, np.array(period, dtype=np.double))
n_a = len(coords_a)
n_b = len(coords_b)
volume = periodic[0, 0] * periodic[1, 1] * periodic[2, 2]
int_distances = np.rint(distances / binsize).astype(int)
hist = np.bincount(int_distances)
bin_edges = np.arange(len(hist) + 1) * binsize
if normalize:
dr = binsize
normfac = volume / (n_a * n_b)
# Normalize this by a sphere shell
for (i, r) in enumerate(bin_edges[1:]):
hist[i] /= 4.0 / 3.0 * np.pi * (r + 0.5 * dr) ** 3 - 4.0 / 3.0 * np.pi * (r - 0.5 * dr) ** 3 # depends on [control=['for'], data=[]]
# Normalize by density
hist = hist * normfac # depends on [control=['if'], data=[]]
# Cutting up to rmax value
width = cutoff / binsize + 1
return (bin_edges[0:width], hist[0:width]) |
def K_chol(self):
"""
Cholesky of the prior covariance K
"""
if self._K_chol is None:
self._K_chol = jitchol(self._K)
return self._K_chol | def function[K_chol, parameter[self]]:
constant[
Cholesky of the prior covariance K
]
if compare[name[self]._K_chol is constant[None]] begin[:]
name[self]._K_chol assign[=] call[name[jitchol], parameter[name[self]._K]]
return[name[self]._K_chol] | keyword[def] identifier[K_chol] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_K_chol] keyword[is] keyword[None] :
identifier[self] . identifier[_K_chol] = identifier[jitchol] ( identifier[self] . identifier[_K] )
keyword[return] identifier[self] . identifier[_K_chol] | def K_chol(self):
"""
Cholesky of the prior covariance K
"""
if self._K_chol is None:
self._K_chol = jitchol(self._K) # depends on [control=['if'], data=[]]
return self._K_chol |
def get_main_database_path(self):
"""
Should return the path to the database
Returns
-------
path : unicode
path to the database, empty string for in-memory databases
"""
cur = self.conn.cursor()
cur.execute("PRAGMA database_list")
rows = cur.fetchall()
for row in rows:
if row[1] == str("main"):
return row[2] | def function[get_main_database_path, parameter[self]]:
constant[
Should return the path to the database
Returns
-------
path : unicode
path to the database, empty string for in-memory databases
]
variable[cur] assign[=] call[name[self].conn.cursor, parameter[]]
call[name[cur].execute, parameter[constant[PRAGMA database_list]]]
variable[rows] assign[=] call[name[cur].fetchall, parameter[]]
for taget[name[row]] in starred[name[rows]] begin[:]
if compare[call[name[row]][constant[1]] equal[==] call[name[str], parameter[constant[main]]]] begin[:]
return[call[name[row]][constant[2]]] | keyword[def] identifier[get_main_database_path] ( identifier[self] ):
literal[string]
identifier[cur] = identifier[self] . identifier[conn] . identifier[cursor] ()
identifier[cur] . identifier[execute] ( literal[string] )
identifier[rows] = identifier[cur] . identifier[fetchall] ()
keyword[for] identifier[row] keyword[in] identifier[rows] :
keyword[if] identifier[row] [ literal[int] ]== identifier[str] ( literal[string] ):
keyword[return] identifier[row] [ literal[int] ] | def get_main_database_path(self):
"""
Should return the path to the database
Returns
-------
path : unicode
path to the database, empty string for in-memory databases
"""
cur = self.conn.cursor()
cur.execute('PRAGMA database_list')
rows = cur.fetchall()
for row in rows:
if row[1] == str('main'):
return row[2] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['row']] |
def from_path_and_array(cls, path, folder, y, classes=None, val_idxs=None, test_name=None,
num_workers=8, tfms=(None,None), bs=64):
""" Read in images given a sub-folder and their labels given a numpy array
Arguments:
path: a root path of the data (used for storing trained models, precomputed values, etc)
folder: a name of the folder in which training images are contained.
y: numpy array which contains target labels ordered by filenames.
bs: batch size
tfms: transformations (for data augmentations). e.g. output of `tfms_from_model`
val_idxs: index of images to be used for validation. e.g. output of `get_cv_idxs`.
If None, default arguments to get_cv_idxs are used.
test_name: a name of the folder which contains test images.
num_workers: number of workers
Returns:
ImageClassifierData
"""
assert not (tfms[0] is None or tfms[1] is None), "please provide transformations for your train and validation sets"
assert not (os.path.isabs(folder)), "folder needs to be a relative path"
fnames = np.core.defchararray.add(f'{folder}/', sorted(os.listdir(f'{path}{folder}')))
return cls.from_names_and_array(path, fnames, y, classes, val_idxs, test_name,
num_workers=num_workers, tfms=tfms, bs=bs) | def function[from_path_and_array, parameter[cls, path, folder, y, classes, val_idxs, test_name, num_workers, tfms, bs]]:
constant[ Read in images given a sub-folder and their labels given a numpy array
Arguments:
path: a root path of the data (used for storing trained models, precomputed values, etc)
folder: a name of the folder in which training images are contained.
y: numpy array which contains target labels ordered by filenames.
bs: batch size
tfms: transformations (for data augmentations). e.g. output of `tfms_from_model`
val_idxs: index of images to be used for validation. e.g. output of `get_cv_idxs`.
If None, default arguments to get_cv_idxs are used.
test_name: a name of the folder which contains test images.
num_workers: number of workers
Returns:
ImageClassifierData
]
assert[<ast.UnaryOp object at 0x7da1b1d6f250>]
assert[<ast.UnaryOp object at 0x7da1b1d6efb0>]
variable[fnames] assign[=] call[name[np].core.defchararray.add, parameter[<ast.JoinedStr object at 0x7da1b1d6cb50>, call[name[sorted], parameter[call[name[os].listdir, parameter[<ast.JoinedStr object at 0x7da20cabdf00>]]]]]]
return[call[name[cls].from_names_and_array, parameter[name[path], name[fnames], name[y], name[classes], name[val_idxs], name[test_name]]]] | keyword[def] identifier[from_path_and_array] ( identifier[cls] , identifier[path] , identifier[folder] , identifier[y] , identifier[classes] = keyword[None] , identifier[val_idxs] = keyword[None] , identifier[test_name] = keyword[None] ,
identifier[num_workers] = literal[int] , identifier[tfms] =( keyword[None] , keyword[None] ), identifier[bs] = literal[int] ):
literal[string]
keyword[assert] keyword[not] ( identifier[tfms] [ literal[int] ] keyword[is] keyword[None] keyword[or] identifier[tfms] [ literal[int] ] keyword[is] keyword[None] ), literal[string]
keyword[assert] keyword[not] ( identifier[os] . identifier[path] . identifier[isabs] ( identifier[folder] )), literal[string]
identifier[fnames] = identifier[np] . identifier[core] . identifier[defchararray] . identifier[add] ( literal[string] , identifier[sorted] ( identifier[os] . identifier[listdir] ( literal[string] )))
keyword[return] identifier[cls] . identifier[from_names_and_array] ( identifier[path] , identifier[fnames] , identifier[y] , identifier[classes] , identifier[val_idxs] , identifier[test_name] ,
identifier[num_workers] = identifier[num_workers] , identifier[tfms] = identifier[tfms] , identifier[bs] = identifier[bs] ) | def from_path_and_array(cls, path, folder, y, classes=None, val_idxs=None, test_name=None, num_workers=8, tfms=(None, None), bs=64):
""" Read in images given a sub-folder and their labels given a numpy array
Arguments:
path: a root path of the data (used for storing trained models, precomputed values, etc)
folder: a name of the folder in which training images are contained.
y: numpy array which contains target labels ordered by filenames.
bs: batch size
tfms: transformations (for data augmentations). e.g. output of `tfms_from_model`
val_idxs: index of images to be used for validation. e.g. output of `get_cv_idxs`.
If None, default arguments to get_cv_idxs are used.
test_name: a name of the folder which contains test images.
num_workers: number of workers
Returns:
ImageClassifierData
"""
assert not (tfms[0] is None or tfms[1] is None), 'please provide transformations for your train and validation sets'
assert not os.path.isabs(folder), 'folder needs to be a relative path'
fnames = np.core.defchararray.add(f'{folder}/', sorted(os.listdir(f'{path}{folder}')))
return cls.from_names_and_array(path, fnames, y, classes, val_idxs, test_name, num_workers=num_workers, tfms=tfms, bs=bs) |
def save_global(self, obj, name=None, pack=struct.pack):
"""
Save a "global".
The name of this method is somewhat misleading: all types get
dispatched here.
"""
if obj.__module__ == "__builtin__" or obj.__module__ == "builtins":
if obj in _BUILTIN_TYPE_NAMES:
return self.save_reduce(_builtin_type, (_BUILTIN_TYPE_NAMES[obj],), obj=obj)
if name is None:
name = obj.__name__
modname = getattr(obj, "__module__", None)
if modname is None:
try:
# whichmodule() could fail, see
# https://bitbucket.org/gutworth/six/issues/63/importing-six-breaks-pickling
modname = pickle.whichmodule(obj, name)
except Exception:
modname = '__main__'
if modname == '__main__':
themodule = None
else:
__import__(modname)
themodule = sys.modules[modname]
self.modules.add(themodule)
if hasattr(themodule, name) and getattr(themodule, name) is obj:
return Pickler.save_global(self, obj, name)
typ = type(obj)
if typ is not obj and isinstance(obj, (type, _class_type)):
self.save_dynamic_class(obj)
else:
raise pickle.PicklingError("Can't pickle %r" % obj) | def function[save_global, parameter[self, obj, name, pack]]:
constant[
Save a "global".
The name of this method is somewhat misleading: all types get
dispatched here.
]
if <ast.BoolOp object at 0x7da207f99de0> begin[:]
if compare[name[obj] in name[_BUILTIN_TYPE_NAMES]] begin[:]
return[call[name[self].save_reduce, parameter[name[_builtin_type], tuple[[<ast.Subscript object at 0x7da1b1fc94b0>]]]]]
if compare[name[name] is constant[None]] begin[:]
variable[name] assign[=] name[obj].__name__
variable[modname] assign[=] call[name[getattr], parameter[name[obj], constant[__module__], constant[None]]]
if compare[name[modname] is constant[None]] begin[:]
<ast.Try object at 0x7da1b1fc8af0>
if compare[name[modname] equal[==] constant[__main__]] begin[:]
variable[themodule] assign[=] constant[None]
if <ast.BoolOp object at 0x7da1b1fc8d90> begin[:]
return[call[name[Pickler].save_global, parameter[name[self], name[obj], name[name]]]]
variable[typ] assign[=] call[name[type], parameter[name[obj]]]
if <ast.BoolOp object at 0x7da1b1fcaa40> begin[:]
call[name[self].save_dynamic_class, parameter[name[obj]]] | keyword[def] identifier[save_global] ( identifier[self] , identifier[obj] , identifier[name] = keyword[None] , identifier[pack] = identifier[struct] . identifier[pack] ):
literal[string]
keyword[if] identifier[obj] . identifier[__module__] == literal[string] keyword[or] identifier[obj] . identifier[__module__] == literal[string] :
keyword[if] identifier[obj] keyword[in] identifier[_BUILTIN_TYPE_NAMES] :
keyword[return] identifier[self] . identifier[save_reduce] ( identifier[_builtin_type] ,( identifier[_BUILTIN_TYPE_NAMES] [ identifier[obj] ],), identifier[obj] = identifier[obj] )
keyword[if] identifier[name] keyword[is] keyword[None] :
identifier[name] = identifier[obj] . identifier[__name__]
identifier[modname] = identifier[getattr] ( identifier[obj] , literal[string] , keyword[None] )
keyword[if] identifier[modname] keyword[is] keyword[None] :
keyword[try] :
identifier[modname] = identifier[pickle] . identifier[whichmodule] ( identifier[obj] , identifier[name] )
keyword[except] identifier[Exception] :
identifier[modname] = literal[string]
keyword[if] identifier[modname] == literal[string] :
identifier[themodule] = keyword[None]
keyword[else] :
identifier[__import__] ( identifier[modname] )
identifier[themodule] = identifier[sys] . identifier[modules] [ identifier[modname] ]
identifier[self] . identifier[modules] . identifier[add] ( identifier[themodule] )
keyword[if] identifier[hasattr] ( identifier[themodule] , identifier[name] ) keyword[and] identifier[getattr] ( identifier[themodule] , identifier[name] ) keyword[is] identifier[obj] :
keyword[return] identifier[Pickler] . identifier[save_global] ( identifier[self] , identifier[obj] , identifier[name] )
identifier[typ] = identifier[type] ( identifier[obj] )
keyword[if] identifier[typ] keyword[is] keyword[not] identifier[obj] keyword[and] identifier[isinstance] ( identifier[obj] ,( identifier[type] , identifier[_class_type] )):
identifier[self] . identifier[save_dynamic_class] ( identifier[obj] )
keyword[else] :
keyword[raise] identifier[pickle] . identifier[PicklingError] ( literal[string] % identifier[obj] ) | def save_global(self, obj, name=None, pack=struct.pack):
"""
Save a "global".
The name of this method is somewhat misleading: all types get
dispatched here.
"""
if obj.__module__ == '__builtin__' or obj.__module__ == 'builtins':
if obj in _BUILTIN_TYPE_NAMES:
return self.save_reduce(_builtin_type, (_BUILTIN_TYPE_NAMES[obj],), obj=obj) # depends on [control=['if'], data=['obj', '_BUILTIN_TYPE_NAMES']] # depends on [control=['if'], data=[]]
if name is None:
name = obj.__name__ # depends on [control=['if'], data=['name']]
modname = getattr(obj, '__module__', None)
if modname is None:
try:
# whichmodule() could fail, see
# https://bitbucket.org/gutworth/six/issues/63/importing-six-breaks-pickling
modname = pickle.whichmodule(obj, name) # depends on [control=['try'], data=[]]
except Exception:
modname = '__main__' # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['modname']]
if modname == '__main__':
themodule = None # depends on [control=['if'], data=[]]
else:
__import__(modname)
themodule = sys.modules[modname]
self.modules.add(themodule)
if hasattr(themodule, name) and getattr(themodule, name) is obj:
return Pickler.save_global(self, obj, name) # depends on [control=['if'], data=[]]
typ = type(obj)
if typ is not obj and isinstance(obj, (type, _class_type)):
self.save_dynamic_class(obj) # depends on [control=['if'], data=[]]
else:
raise pickle.PicklingError("Can't pickle %r" % obj) |
def get_prep_value(self, value):
"""Returns field's value prepared for saving into a database."""
if isinstance(value, LocalizedValue):
prep_value = LocalizedValue()
for k, v in value.__dict__.items():
if v is None:
prep_value.set(k, '')
else:
# Need to convert File objects provided via a form to
# unicode for database insertion
prep_value.set(k, six.text_type(v))
return super().get_prep_value(prep_value)
return super().get_prep_value(value) | def function[get_prep_value, parameter[self, value]]:
constant[Returns field's value prepared for saving into a database.]
if call[name[isinstance], parameter[name[value], name[LocalizedValue]]] begin[:]
variable[prep_value] assign[=] call[name[LocalizedValue], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b084d0c0>, <ast.Name object at 0x7da1b084ef20>]]] in starred[call[name[value].__dict__.items, parameter[]]] begin[:]
if compare[name[v] is constant[None]] begin[:]
call[name[prep_value].set, parameter[name[k], constant[]]]
return[call[call[name[super], parameter[]].get_prep_value, parameter[name[prep_value]]]]
return[call[call[name[super], parameter[]].get_prep_value, parameter[name[value]]]] | keyword[def] identifier[get_prep_value] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[LocalizedValue] ):
identifier[prep_value] = identifier[LocalizedValue] ()
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[value] . identifier[__dict__] . identifier[items] ():
keyword[if] identifier[v] keyword[is] keyword[None] :
identifier[prep_value] . identifier[set] ( identifier[k] , literal[string] )
keyword[else] :
identifier[prep_value] . identifier[set] ( identifier[k] , identifier[six] . identifier[text_type] ( identifier[v] ))
keyword[return] identifier[super] (). identifier[get_prep_value] ( identifier[prep_value] )
keyword[return] identifier[super] (). identifier[get_prep_value] ( identifier[value] ) | def get_prep_value(self, value):
"""Returns field's value prepared for saving into a database."""
if isinstance(value, LocalizedValue):
prep_value = LocalizedValue()
for (k, v) in value.__dict__.items():
if v is None:
prep_value.set(k, '') # depends on [control=['if'], data=[]]
else:
# Need to convert File objects provided via a form to
# unicode for database insertion
prep_value.set(k, six.text_type(v)) # depends on [control=['for'], data=[]]
return super().get_prep_value(prep_value) # depends on [control=['if'], data=[]]
return super().get_prep_value(value) |
def get_sql_state(self, state):
"""
Get SQLStateGraph from state.
"""
if not hasattr(state, 'sql_state'):
setattr(state, 'sql_state', SQLStateGraph())
return state.sql_state | def function[get_sql_state, parameter[self, state]]:
constant[
Get SQLStateGraph from state.
]
if <ast.UnaryOp object at 0x7da1b23365c0> begin[:]
call[name[setattr], parameter[name[state], constant[sql_state], call[name[SQLStateGraph], parameter[]]]]
return[name[state].sql_state] | keyword[def] identifier[get_sql_state] ( identifier[self] , identifier[state] ):
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[state] , literal[string] ):
identifier[setattr] ( identifier[state] , literal[string] , identifier[SQLStateGraph] ())
keyword[return] identifier[state] . identifier[sql_state] | def get_sql_state(self, state):
"""
Get SQLStateGraph from state.
"""
if not hasattr(state, 'sql_state'):
setattr(state, 'sql_state', SQLStateGraph()) # depends on [control=['if'], data=[]]
return state.sql_state |
def delete_thumbnails(self, source_cache=None):
"""
Delete any thumbnails generated from the source image.
:arg source_cache: An optional argument only used for optimisation
where the source cache instance is already known.
:returns: The number of files deleted.
"""
source_cache = self.get_source_cache()
deleted = 0
if source_cache:
thumbnail_storage_hash = utils.get_storage_hash(
self.thumbnail_storage)
for thumbnail_cache in source_cache.thumbnails.all():
# Only attempt to delete the file if it was stored using the
# same storage as is currently used.
if thumbnail_cache.storage_hash == thumbnail_storage_hash:
self.thumbnail_storage.delete(thumbnail_cache.name)
# Delete the cache thumbnail instance too.
thumbnail_cache.delete()
deleted += 1
return deleted | def function[delete_thumbnails, parameter[self, source_cache]]:
constant[
Delete any thumbnails generated from the source image.
:arg source_cache: An optional argument only used for optimisation
where the source cache instance is already known.
:returns: The number of files deleted.
]
variable[source_cache] assign[=] call[name[self].get_source_cache, parameter[]]
variable[deleted] assign[=] constant[0]
if name[source_cache] begin[:]
variable[thumbnail_storage_hash] assign[=] call[name[utils].get_storage_hash, parameter[name[self].thumbnail_storage]]
for taget[name[thumbnail_cache]] in starred[call[name[source_cache].thumbnails.all, parameter[]]] begin[:]
if compare[name[thumbnail_cache].storage_hash equal[==] name[thumbnail_storage_hash]] begin[:]
call[name[self].thumbnail_storage.delete, parameter[name[thumbnail_cache].name]]
call[name[thumbnail_cache].delete, parameter[]]
<ast.AugAssign object at 0x7da20cabf6a0>
return[name[deleted]] | keyword[def] identifier[delete_thumbnails] ( identifier[self] , identifier[source_cache] = keyword[None] ):
literal[string]
identifier[source_cache] = identifier[self] . identifier[get_source_cache] ()
identifier[deleted] = literal[int]
keyword[if] identifier[source_cache] :
identifier[thumbnail_storage_hash] = identifier[utils] . identifier[get_storage_hash] (
identifier[self] . identifier[thumbnail_storage] )
keyword[for] identifier[thumbnail_cache] keyword[in] identifier[source_cache] . identifier[thumbnails] . identifier[all] ():
keyword[if] identifier[thumbnail_cache] . identifier[storage_hash] == identifier[thumbnail_storage_hash] :
identifier[self] . identifier[thumbnail_storage] . identifier[delete] ( identifier[thumbnail_cache] . identifier[name] )
identifier[thumbnail_cache] . identifier[delete] ()
identifier[deleted] += literal[int]
keyword[return] identifier[deleted] | def delete_thumbnails(self, source_cache=None):
"""
Delete any thumbnails generated from the source image.
:arg source_cache: An optional argument only used for optimisation
where the source cache instance is already known.
:returns: The number of files deleted.
"""
source_cache = self.get_source_cache()
deleted = 0
if source_cache:
thumbnail_storage_hash = utils.get_storage_hash(self.thumbnail_storage)
for thumbnail_cache in source_cache.thumbnails.all():
# Only attempt to delete the file if it was stored using the
# same storage as is currently used.
if thumbnail_cache.storage_hash == thumbnail_storage_hash:
self.thumbnail_storage.delete(thumbnail_cache.name)
# Delete the cache thumbnail instance too.
thumbnail_cache.delete()
deleted += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['thumbnail_cache']] # depends on [control=['if'], data=[]]
return deleted |
def countrate(self,binned=True,range=None,force=False):
"""Calculate effective stimulus in count/s.
Also see :ref:`pysynphot-formula-countrate` and
:ref:`pysynphot-formula-effstim`.
.. note::
This is the calculation performed when the ETC invokes
``countrate``.
Parameters
-----------
binned : bool
If `True` (default), use binned data.
Otherwise, use native data.
range : tuple or `None`
If not `None`, it must be a sequence with two floating-point
elements specifying the wavelength range (*inclusive*) in the
unit of ``self.waveunits`` in the form of ``(low, high)``;
This is the range over which the integration will be performed.
If the specified range does not exactly match a value in the
wavelength set:
* If ``binned=True``, the bin containing the range value will
be used. This assumes ``self.binwave`` contains bin centers.
* If ``binned=False``, native dataset will be interpolated to
the specified values. (*Not Implemented.*)
force : bool
If `False` (default), partially overlapping ranges
will raise an exception. If `True`, a partial overlap will
return the calculated value instead. Disjoint ranges raise
an exception regardless.
Returns
-------
ans : float
Count rate.
Raises
------
NotImplementedError
Wavelength range is defined for unbinned data.
pysynphot.exceptions.DisjointError
Wavelength range does not overlap with observation.
pysynphot.exceptions.PartialOverlap
Wavelength range only partially overlaps with observation.
"""
if self._binflux is None:
self.initbinflux()
myfluxunits = self.fluxunits.name
self.convert('counts')
warn=False
if binned:
#No range specified - use full range
if range is None:
lx,ux=(None,None)
#Range is disjoint from binwave
elif (range[0]>self.binwave[-1] or
range[1]<self.binwave[0]):
raise exceptions.DisjointError("%s is disjoint from obs.binwave %s"%(range,
[self.binwave[0],self.binwave[-1]]))
#Partial overlap
else:
if range[0] < self._bin_edges[0]:
warn=True
lx=None
else:
lx=np.searchsorted(self._bin_edges,range[0])-1
if range[1] > self._bin_edges[-1]:
warn=True
ux=None
else:
ux=np.searchsorted(self._bin_edges,range[1])
ans = math.fsum(self.binflux[lx:ux])
if warn and not force:
raise exceptions.PartialOverlap("%s does not fully overlap binwave range %s. Countrate in overlap area is %f"%(range,[self.binwave[0],self.binwave[-1]],ans))
else:
if range is None:
ans = math.fsum(self.flux)
else:
raise NotImplementedError("Sorry, range+binned=False not yet implemented")
self.convert(myfluxunits)
return ans | def function[countrate, parameter[self, binned, range, force]]:
constant[Calculate effective stimulus in count/s.
Also see :ref:`pysynphot-formula-countrate` and
:ref:`pysynphot-formula-effstim`.
.. note::
This is the calculation performed when the ETC invokes
``countrate``.
Parameters
-----------
binned : bool
If `True` (default), use binned data.
Otherwise, use native data.
range : tuple or `None`
If not `None`, it must be a sequence with two floating-point
elements specifying the wavelength range (*inclusive*) in the
unit of ``self.waveunits`` in the form of ``(low, high)``;
This is the range over which the integration will be performed.
If the specified range does not exactly match a value in the
wavelength set:
* If ``binned=True``, the bin containing the range value will
be used. This assumes ``self.binwave`` contains bin centers.
* If ``binned=False``, native dataset will be interpolated to
the specified values. (*Not Implemented.*)
force : bool
If `False` (default), partially overlapping ranges
will raise an exception. If `True`, a partial overlap will
return the calculated value instead. Disjoint ranges raise
an exception regardless.
Returns
-------
ans : float
Count rate.
Raises
------
NotImplementedError
Wavelength range is defined for unbinned data.
pysynphot.exceptions.DisjointError
Wavelength range does not overlap with observation.
pysynphot.exceptions.PartialOverlap
Wavelength range only partially overlaps with observation.
]
if compare[name[self]._binflux is constant[None]] begin[:]
call[name[self].initbinflux, parameter[]]
variable[myfluxunits] assign[=] name[self].fluxunits.name
call[name[self].convert, parameter[constant[counts]]]
variable[warn] assign[=] constant[False]
if name[binned] begin[:]
if compare[name[range] is constant[None]] begin[:]
<ast.Tuple object at 0x7da1b2347e20> assign[=] tuple[[<ast.Constant object at 0x7da1b2346b90>, <ast.Constant object at 0x7da1b2346f20>]]
variable[ans] assign[=] call[name[math].fsum, parameter[call[name[self].binflux][<ast.Slice object at 0x7da1b2344ac0>]]]
if <ast.BoolOp object at 0x7da1b2346620> begin[:]
<ast.Raise object at 0x7da1b2345390>
call[name[self].convert, parameter[name[myfluxunits]]]
return[name[ans]] | keyword[def] identifier[countrate] ( identifier[self] , identifier[binned] = keyword[True] , identifier[range] = keyword[None] , identifier[force] = keyword[False] ):
literal[string]
keyword[if] identifier[self] . identifier[_binflux] keyword[is] keyword[None] :
identifier[self] . identifier[initbinflux] ()
identifier[myfluxunits] = identifier[self] . identifier[fluxunits] . identifier[name]
identifier[self] . identifier[convert] ( literal[string] )
identifier[warn] = keyword[False]
keyword[if] identifier[binned] :
keyword[if] identifier[range] keyword[is] keyword[None] :
identifier[lx] , identifier[ux] =( keyword[None] , keyword[None] )
keyword[elif] ( identifier[range] [ literal[int] ]> identifier[self] . identifier[binwave] [- literal[int] ] keyword[or]
identifier[range] [ literal[int] ]< identifier[self] . identifier[binwave] [ literal[int] ]):
keyword[raise] identifier[exceptions] . identifier[DisjointError] ( literal[string] %( identifier[range] ,
[ identifier[self] . identifier[binwave] [ literal[int] ], identifier[self] . identifier[binwave] [- literal[int] ]]))
keyword[else] :
keyword[if] identifier[range] [ literal[int] ]< identifier[self] . identifier[_bin_edges] [ literal[int] ]:
identifier[warn] = keyword[True]
identifier[lx] = keyword[None]
keyword[else] :
identifier[lx] = identifier[np] . identifier[searchsorted] ( identifier[self] . identifier[_bin_edges] , identifier[range] [ literal[int] ])- literal[int]
keyword[if] identifier[range] [ literal[int] ]> identifier[self] . identifier[_bin_edges] [- literal[int] ]:
identifier[warn] = keyword[True]
identifier[ux] = keyword[None]
keyword[else] :
identifier[ux] = identifier[np] . identifier[searchsorted] ( identifier[self] . identifier[_bin_edges] , identifier[range] [ literal[int] ])
identifier[ans] = identifier[math] . identifier[fsum] ( identifier[self] . identifier[binflux] [ identifier[lx] : identifier[ux] ])
keyword[if] identifier[warn] keyword[and] keyword[not] identifier[force] :
keyword[raise] identifier[exceptions] . identifier[PartialOverlap] ( literal[string] %( identifier[range] ,[ identifier[self] . identifier[binwave] [ literal[int] ], identifier[self] . identifier[binwave] [- literal[int] ]], identifier[ans] ))
keyword[else] :
keyword[if] identifier[range] keyword[is] keyword[None] :
identifier[ans] = identifier[math] . identifier[fsum] ( identifier[self] . identifier[flux] )
keyword[else] :
keyword[raise] identifier[NotImplementedError] ( literal[string] )
identifier[self] . identifier[convert] ( identifier[myfluxunits] )
keyword[return] identifier[ans] | def countrate(self, binned=True, range=None, force=False):
"""Calculate effective stimulus in count/s.
Also see :ref:`pysynphot-formula-countrate` and
:ref:`pysynphot-formula-effstim`.
.. note::
This is the calculation performed when the ETC invokes
``countrate``.
Parameters
-----------
binned : bool
If `True` (default), use binned data.
Otherwise, use native data.
range : tuple or `None`
If not `None`, it must be a sequence with two floating-point
elements specifying the wavelength range (*inclusive*) in the
unit of ``self.waveunits`` in the form of ``(low, high)``;
This is the range over which the integration will be performed.
If the specified range does not exactly match a value in the
wavelength set:
* If ``binned=True``, the bin containing the range value will
be used. This assumes ``self.binwave`` contains bin centers.
* If ``binned=False``, native dataset will be interpolated to
the specified values. (*Not Implemented.*)
force : bool
If `False` (default), partially overlapping ranges
will raise an exception. If `True`, a partial overlap will
return the calculated value instead. Disjoint ranges raise
an exception regardless.
Returns
-------
ans : float
Count rate.
Raises
------
NotImplementedError
Wavelength range is defined for unbinned data.
pysynphot.exceptions.DisjointError
Wavelength range does not overlap with observation.
pysynphot.exceptions.PartialOverlap
Wavelength range only partially overlaps with observation.
"""
if self._binflux is None:
self.initbinflux() # depends on [control=['if'], data=[]]
myfluxunits = self.fluxunits.name
self.convert('counts')
warn = False
if binned:
#No range specified - use full range
if range is None:
(lx, ux) = (None, None) # depends on [control=['if'], data=[]]
#Range is disjoint from binwave
elif range[0] > self.binwave[-1] or range[1] < self.binwave[0]:
raise exceptions.DisjointError('%s is disjoint from obs.binwave %s' % (range, [self.binwave[0], self.binwave[-1]])) # depends on [control=['if'], data=[]]
else:
#Partial overlap
if range[0] < self._bin_edges[0]:
warn = True
lx = None # depends on [control=['if'], data=[]]
else:
lx = np.searchsorted(self._bin_edges, range[0]) - 1
if range[1] > self._bin_edges[-1]:
warn = True
ux = None # depends on [control=['if'], data=[]]
else:
ux = np.searchsorted(self._bin_edges, range[1])
ans = math.fsum(self.binflux[lx:ux])
if warn and (not force):
raise exceptions.PartialOverlap('%s does not fully overlap binwave range %s. Countrate in overlap area is %f' % (range, [self.binwave[0], self.binwave[-1]], ans)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif range is None:
ans = math.fsum(self.flux) # depends on [control=['if'], data=[]]
else:
raise NotImplementedError('Sorry, range+binned=False not yet implemented')
self.convert(myfluxunits)
return ans |
def process_file(path):
""" Open a single labeled image at path and get needed information, return as a dictionary"""
info = dict()
with fits.open(path) as hdu:
head = hdu[0].header
data = hdu[0].data
labels = {theme: value for value, theme in list(hdu[1].data)}
info['filename'] = os.path.basename(path)
info['trainer'] = head['expert']
info['date-label'] = dateparser.parse(head['date-lab'])
info['date-observation'] = dateparser.parse(head['date-end'])
for theme in themes:
info[theme + "_count"] = np.sum(data == labels[theme])
return info | def function[process_file, parameter[path]]:
constant[ Open a single labeled image at path and get needed information, return as a dictionary]
variable[info] assign[=] call[name[dict], parameter[]]
with call[name[fits].open, parameter[name[path]]] begin[:]
variable[head] assign[=] call[name[hdu]][constant[0]].header
variable[data] assign[=] call[name[hdu]][constant[0]].data
variable[labels] assign[=] <ast.DictComp object at 0x7da1b1fb83d0>
call[name[info]][constant[filename]] assign[=] call[name[os].path.basename, parameter[name[path]]]
call[name[info]][constant[trainer]] assign[=] call[name[head]][constant[expert]]
call[name[info]][constant[date-label]] assign[=] call[name[dateparser].parse, parameter[call[name[head]][constant[date-lab]]]]
call[name[info]][constant[date-observation]] assign[=] call[name[dateparser].parse, parameter[call[name[head]][constant[date-end]]]]
for taget[name[theme]] in starred[name[themes]] begin[:]
call[name[info]][binary_operation[name[theme] + constant[_count]]] assign[=] call[name[np].sum, parameter[compare[name[data] equal[==] call[name[labels]][name[theme]]]]]
return[name[info]] | keyword[def] identifier[process_file] ( identifier[path] ):
literal[string]
identifier[info] = identifier[dict] ()
keyword[with] identifier[fits] . identifier[open] ( identifier[path] ) keyword[as] identifier[hdu] :
identifier[head] = identifier[hdu] [ literal[int] ]. identifier[header]
identifier[data] = identifier[hdu] [ literal[int] ]. identifier[data]
identifier[labels] ={ identifier[theme] : identifier[value] keyword[for] identifier[value] , identifier[theme] keyword[in] identifier[list] ( identifier[hdu] [ literal[int] ]. identifier[data] )}
identifier[info] [ literal[string] ]= identifier[os] . identifier[path] . identifier[basename] ( identifier[path] )
identifier[info] [ literal[string] ]= identifier[head] [ literal[string] ]
identifier[info] [ literal[string] ]= identifier[dateparser] . identifier[parse] ( identifier[head] [ literal[string] ])
identifier[info] [ literal[string] ]= identifier[dateparser] . identifier[parse] ( identifier[head] [ literal[string] ])
keyword[for] identifier[theme] keyword[in] identifier[themes] :
identifier[info] [ identifier[theme] + literal[string] ]= identifier[np] . identifier[sum] ( identifier[data] == identifier[labels] [ identifier[theme] ])
keyword[return] identifier[info] | def process_file(path):
""" Open a single labeled image at path and get needed information, return as a dictionary"""
info = dict()
with fits.open(path) as hdu:
head = hdu[0].header
data = hdu[0].data
labels = {theme: value for (value, theme) in list(hdu[1].data)} # depends on [control=['with'], data=['hdu']]
info['filename'] = os.path.basename(path)
info['trainer'] = head['expert']
info['date-label'] = dateparser.parse(head['date-lab'])
info['date-observation'] = dateparser.parse(head['date-end'])
for theme in themes:
info[theme + '_count'] = np.sum(data == labels[theme]) # depends on [control=['for'], data=['theme']]
return info |
def _reconnect(self, errorState):
"""
Attempt to reconnect.
If the current back-off delay is 0, L{connect} is called. Otherwise,
it will cause a transition to the C{'waiting'} state, ultimately
causing a call to L{connect} when the delay expires.
"""
def connect():
if self.noisy:
log.msg("Reconnecting now.")
self.connect()
backOff = self.backOffs[errorState]
if self._errorState != errorState or self._delay is None:
self._errorState = errorState
self._delay = backOff['initial']
else:
self._delay = min(backOff['max'], self._delay * backOff['factor'])
if self._delay == 0:
connect()
else:
self._reconnectDelayedCall = self.reactor.callLater(self._delay,
connect)
self._toState('waiting') | def function[_reconnect, parameter[self, errorState]]:
constant[
Attempt to reconnect.
If the current back-off delay is 0, L{connect} is called. Otherwise,
it will cause a transition to the C{'waiting'} state, ultimately
causing a call to L{connect} when the delay expires.
]
def function[connect, parameter[]]:
if name[self].noisy begin[:]
call[name[log].msg, parameter[constant[Reconnecting now.]]]
call[name[self].connect, parameter[]]
variable[backOff] assign[=] call[name[self].backOffs][name[errorState]]
if <ast.BoolOp object at 0x7da1b0658d60> begin[:]
name[self]._errorState assign[=] name[errorState]
name[self]._delay assign[=] call[name[backOff]][constant[initial]]
if compare[name[self]._delay equal[==] constant[0]] begin[:]
call[name[connect], parameter[]] | keyword[def] identifier[_reconnect] ( identifier[self] , identifier[errorState] ):
literal[string]
keyword[def] identifier[connect] ():
keyword[if] identifier[self] . identifier[noisy] :
identifier[log] . identifier[msg] ( literal[string] )
identifier[self] . identifier[connect] ()
identifier[backOff] = identifier[self] . identifier[backOffs] [ identifier[errorState] ]
keyword[if] identifier[self] . identifier[_errorState] != identifier[errorState] keyword[or] identifier[self] . identifier[_delay] keyword[is] keyword[None] :
identifier[self] . identifier[_errorState] = identifier[errorState]
identifier[self] . identifier[_delay] = identifier[backOff] [ literal[string] ]
keyword[else] :
identifier[self] . identifier[_delay] = identifier[min] ( identifier[backOff] [ literal[string] ], identifier[self] . identifier[_delay] * identifier[backOff] [ literal[string] ])
keyword[if] identifier[self] . identifier[_delay] == literal[int] :
identifier[connect] ()
keyword[else] :
identifier[self] . identifier[_reconnectDelayedCall] = identifier[self] . identifier[reactor] . identifier[callLater] ( identifier[self] . identifier[_delay] ,
identifier[connect] )
identifier[self] . identifier[_toState] ( literal[string] ) | def _reconnect(self, errorState):
"""
Attempt to reconnect.
If the current back-off delay is 0, L{connect} is called. Otherwise,
it will cause a transition to the C{'waiting'} state, ultimately
causing a call to L{connect} when the delay expires.
"""
def connect():
if self.noisy:
log.msg('Reconnecting now.') # depends on [control=['if'], data=[]]
self.connect()
backOff = self.backOffs[errorState]
if self._errorState != errorState or self._delay is None:
self._errorState = errorState
self._delay = backOff['initial'] # depends on [control=['if'], data=[]]
else:
self._delay = min(backOff['max'], self._delay * backOff['factor'])
if self._delay == 0:
connect() # depends on [control=['if'], data=[]]
else:
self._reconnectDelayedCall = self.reactor.callLater(self._delay, connect)
self._toState('waiting') |
def polish(commit_indexes=None, urls=None):
'''
Apply certain behaviors to commits or URLs that need polishing before they are ready for screenshots
For example, if you have 10 commits in a row where static file links were broken, you could re-write the html
in memory as it is interpreted.
Keyword arguments:
commit_indexes -- A list of indexes to apply the wrapped function to
url -- A list of URLs to apply the wrapped function to
'''
def decorator(f):
if commit_indexes:
f.polish_commit_indexes = commit_indexes
if urls:
f.polish_urls = urls
@wraps(f)
def wrappee(*args, **kwargs):
return f(*args, **kwargs)
return wrappee
return decorator | def function[polish, parameter[commit_indexes, urls]]:
constant[
Apply certain behaviors to commits or URLs that need polishing before they are ready for screenshots
For example, if you have 10 commits in a row where static file links were broken, you could re-write the html
in memory as it is interpreted.
Keyword arguments:
commit_indexes -- A list of indexes to apply the wrapped function to
url -- A list of URLs to apply the wrapped function to
]
def function[decorator, parameter[f]]:
if name[commit_indexes] begin[:]
name[f].polish_commit_indexes assign[=] name[commit_indexes]
if name[urls] begin[:]
name[f].polish_urls assign[=] name[urls]
def function[wrappee, parameter[]]:
return[call[name[f], parameter[<ast.Starred object at 0x7da204344550>]]]
return[name[wrappee]]
return[name[decorator]] | keyword[def] identifier[polish] ( identifier[commit_indexes] = keyword[None] , identifier[urls] = keyword[None] ):
literal[string]
keyword[def] identifier[decorator] ( identifier[f] ):
keyword[if] identifier[commit_indexes] :
identifier[f] . identifier[polish_commit_indexes] = identifier[commit_indexes]
keyword[if] identifier[urls] :
identifier[f] . identifier[polish_urls] = identifier[urls]
@ identifier[wraps] ( identifier[f] )
keyword[def] identifier[wrappee] (* identifier[args] ,** identifier[kwargs] ):
keyword[return] identifier[f] (* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[wrappee]
keyword[return] identifier[decorator] | def polish(commit_indexes=None, urls=None):
"""
Apply certain behaviors to commits or URLs that need polishing before they are ready for screenshots
For example, if you have 10 commits in a row where static file links were broken, you could re-write the html
in memory as it is interpreted.
Keyword arguments:
commit_indexes -- A list of indexes to apply the wrapped function to
url -- A list of URLs to apply the wrapped function to
"""
def decorator(f):
if commit_indexes:
f.polish_commit_indexes = commit_indexes # depends on [control=['if'], data=[]]
if urls:
f.polish_urls = urls # depends on [control=['if'], data=[]]
@wraps(f)
def wrappee(*args, **kwargs):
return f(*args, **kwargs)
return wrappee
return decorator |
def schedule_telegram_message(message, to, sender=None, priority=None):
"""Schedules Telegram message for delivery.
:param str message: text to send.
:param list|str|unicode to: recipients addresses or Django User model heir instances with `telegram` attributes.
:param User sender: User model heir instance
:param int priority: number describing message priority. If set overrides priority provided with message type.
"""
schedule_messages(message, recipients('telegram', to), sender=sender, priority=priority) | def function[schedule_telegram_message, parameter[message, to, sender, priority]]:
constant[Schedules Telegram message for delivery.
:param str message: text to send.
:param list|str|unicode to: recipients addresses or Django User model heir instances with `telegram` attributes.
:param User sender: User model heir instance
:param int priority: number describing message priority. If set overrides priority provided with message type.
]
call[name[schedule_messages], parameter[name[message], call[name[recipients], parameter[constant[telegram], name[to]]]]] | keyword[def] identifier[schedule_telegram_message] ( identifier[message] , identifier[to] , identifier[sender] = keyword[None] , identifier[priority] = keyword[None] ):
literal[string]
identifier[schedule_messages] ( identifier[message] , identifier[recipients] ( literal[string] , identifier[to] ), identifier[sender] = identifier[sender] , identifier[priority] = identifier[priority] ) | def schedule_telegram_message(message, to, sender=None, priority=None):
"""Schedules Telegram message for delivery.
:param str message: text to send.
:param list|str|unicode to: recipients addresses or Django User model heir instances with `telegram` attributes.
:param User sender: User model heir instance
:param int priority: number describing message priority. If set overrides priority provided with message type.
"""
schedule_messages(message, recipients('telegram', to), sender=sender, priority=priority) |
def get(cls, label='default', path=None):
"""Read a server configuration from a configuration file.
This method extends :meth:`nailgun.config.BaseServerConfig.get`. Please
read up on that method before trying to understand this one.
The entity classes rely on the requests library to be a transport
mechanism. The methods provided by that library, such as ``get`` and
``post``, accept an ``auth`` argument. That argument must be a tuple:
Auth tuple to enable Basic/Digest/Custom HTTP Auth.
However, the JSON decoder does not recognize a tuple as a type, and
represents sequences of elements as a tuple. Compensate for that by
converting ``auth`` to a two element tuple if it is a two element list.
This override is done here, and not in the base class, because the base
class may be extracted out into a separate library and used in other
contexts. In those contexts, the presence of a list may not matter or
may be desirable.
"""
config = super(ServerConfig, cls).get(label, path)
if hasattr(config, 'auth') and isinstance(config.auth, list):
config.auth = tuple(config.auth)
return config | def function[get, parameter[cls, label, path]]:
constant[Read a server configuration from a configuration file.
This method extends :meth:`nailgun.config.BaseServerConfig.get`. Please
read up on that method before trying to understand this one.
The entity classes rely on the requests library to be a transport
mechanism. The methods provided by that library, such as ``get`` and
``post``, accept an ``auth`` argument. That argument must be a tuple:
Auth tuple to enable Basic/Digest/Custom HTTP Auth.
However, the JSON decoder does not recognize a tuple as a type, and
represents sequences of elements as a tuple. Compensate for that by
converting ``auth`` to a two element tuple if it is a two element list.
This override is done here, and not in the base class, because the base
class may be extracted out into a separate library and used in other
contexts. In those contexts, the presence of a list may not matter or
may be desirable.
]
variable[config] assign[=] call[call[name[super], parameter[name[ServerConfig], name[cls]]].get, parameter[name[label], name[path]]]
if <ast.BoolOp object at 0x7da1b0678ee0> begin[:]
name[config].auth assign[=] call[name[tuple], parameter[name[config].auth]]
return[name[config]] | keyword[def] identifier[get] ( identifier[cls] , identifier[label] = literal[string] , identifier[path] = keyword[None] ):
literal[string]
identifier[config] = identifier[super] ( identifier[ServerConfig] , identifier[cls] ). identifier[get] ( identifier[label] , identifier[path] )
keyword[if] identifier[hasattr] ( identifier[config] , literal[string] ) keyword[and] identifier[isinstance] ( identifier[config] . identifier[auth] , identifier[list] ):
identifier[config] . identifier[auth] = identifier[tuple] ( identifier[config] . identifier[auth] )
keyword[return] identifier[config] | def get(cls, label='default', path=None):
"""Read a server configuration from a configuration file.
This method extends :meth:`nailgun.config.BaseServerConfig.get`. Please
read up on that method before trying to understand this one.
The entity classes rely on the requests library to be a transport
mechanism. The methods provided by that library, such as ``get`` and
``post``, accept an ``auth`` argument. That argument must be a tuple:
Auth tuple to enable Basic/Digest/Custom HTTP Auth.
However, the JSON decoder does not recognize a tuple as a type, and
represents sequences of elements as a tuple. Compensate for that by
converting ``auth`` to a two element tuple if it is a two element list.
This override is done here, and not in the base class, because the base
class may be extracted out into a separate library and used in other
contexts. In those contexts, the presence of a list may not matter or
may be desirable.
"""
config = super(ServerConfig, cls).get(label, path)
if hasattr(config, 'auth') and isinstance(config.auth, list):
config.auth = tuple(config.auth) # depends on [control=['if'], data=[]]
return config |
def clear(self, key=None):
"""
Clear a cache entry, or the entire cache if no key is given
Returns CACHE_DISABLED if the cache is disabled
Returns True on successful operation
:param key: optional key to limit the clear operation to (defaults to None)
"""
if not self.options.enabled:
return CACHE_DISABLED
logger.debug('clear(key={})'.format(repr(key)))
if key is not None and key in self._dict.keys():
del self._dict[key]
logger.info('cache cleared for key: ' + repr(key))
elif not key:
for cached_key in [k for k in self._dict.keys()]:
del self._dict[cached_key]
logger.info('cache cleared for ALL keys')
return True | def function[clear, parameter[self, key]]:
constant[
Clear a cache entry, or the entire cache if no key is given
Returns CACHE_DISABLED if the cache is disabled
Returns True on successful operation
:param key: optional key to limit the clear operation to (defaults to None)
]
if <ast.UnaryOp object at 0x7da20c76c3a0> begin[:]
return[name[CACHE_DISABLED]]
call[name[logger].debug, parameter[call[constant[clear(key={})].format, parameter[call[name[repr], parameter[name[key]]]]]]]
if <ast.BoolOp object at 0x7da20c76c3d0> begin[:]
<ast.Delete object at 0x7da20c76dba0>
call[name[logger].info, parameter[binary_operation[constant[cache cleared for key: ] + call[name[repr], parameter[name[key]]]]]]
return[constant[True]] | keyword[def] identifier[clear] ( identifier[self] , identifier[key] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[options] . identifier[enabled] :
keyword[return] identifier[CACHE_DISABLED]
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[repr] ( identifier[key] )))
keyword[if] identifier[key] keyword[is] keyword[not] keyword[None] keyword[and] identifier[key] keyword[in] identifier[self] . identifier[_dict] . identifier[keys] ():
keyword[del] identifier[self] . identifier[_dict] [ identifier[key] ]
identifier[logger] . identifier[info] ( literal[string] + identifier[repr] ( identifier[key] ))
keyword[elif] keyword[not] identifier[key] :
keyword[for] identifier[cached_key] keyword[in] [ identifier[k] keyword[for] identifier[k] keyword[in] identifier[self] . identifier[_dict] . identifier[keys] ()]:
keyword[del] identifier[self] . identifier[_dict] [ identifier[cached_key] ]
identifier[logger] . identifier[info] ( literal[string] )
keyword[return] keyword[True] | def clear(self, key=None):
"""
Clear a cache entry, or the entire cache if no key is given
Returns CACHE_DISABLED if the cache is disabled
Returns True on successful operation
:param key: optional key to limit the clear operation to (defaults to None)
"""
if not self.options.enabled:
return CACHE_DISABLED # depends on [control=['if'], data=[]]
logger.debug('clear(key={})'.format(repr(key)))
if key is not None and key in self._dict.keys():
del self._dict[key]
logger.info('cache cleared for key: ' + repr(key)) # depends on [control=['if'], data=[]]
elif not key:
for cached_key in [k for k in self._dict.keys()]:
del self._dict[cached_key] # depends on [control=['for'], data=['cached_key']]
logger.info('cache cleared for ALL keys') # depends on [control=['if'], data=[]]
return True |
def get_key_goids(self, goids):
"""Given GO IDs, return key GO IDs."""
go2obj = self.go2obj
return set(go2obj[go].id for go in goids) | def function[get_key_goids, parameter[self, goids]]:
constant[Given GO IDs, return key GO IDs.]
variable[go2obj] assign[=] name[self].go2obj
return[call[name[set], parameter[<ast.GeneratorExp object at 0x7da18f812590>]]] | keyword[def] identifier[get_key_goids] ( identifier[self] , identifier[goids] ):
literal[string]
identifier[go2obj] = identifier[self] . identifier[go2obj]
keyword[return] identifier[set] ( identifier[go2obj] [ identifier[go] ]. identifier[id] keyword[for] identifier[go] keyword[in] identifier[goids] ) | def get_key_goids(self, goids):
"""Given GO IDs, return key GO IDs."""
go2obj = self.go2obj
return set((go2obj[go].id for go in goids)) |
async def build_get_txn_fees_req(wallet_handle: int,
submitter_did: str,
payment_method: str) -> str:
"""
Builds Indy request for getting fees for transactions in the ledger
:param wallet_handle: wallet handle (created by open_wallet).
:param submitter_did : (Option) DID of request sender
:param payment_method: Payment method to use (for example, 'sov').
:return: set_txn_fees_json: Indy request for setting fees for transactions in the ledger
"""
logger = logging.getLogger(__name__)
logger.debug("build_get_txn_fees_req: >>> wallet_handle: %r, submitter_did: %r, payment_method: %r",
wallet_handle,
submitter_did,
payment_method)
if not hasattr(build_get_txn_fees_req, "cb"):
logger.debug("build_get_txn_fees_req: Creating callback")
build_get_txn_fees_req.cb = create_cb(CFUNCTYPE(None, c_int32, c_int32, c_char_p))
c_wallet_handle = c_int32(wallet_handle)
c_submitter_did = c_char_p(submitter_did.encode('utf-8')) if submitter_did is not None else None
c_payment_method = c_char_p(payment_method.encode('utf-8'))
get_txn_fees_json = await do_call('indy_build_get_txn_fees_req',
c_wallet_handle,
c_submitter_did,
c_payment_method,
build_get_txn_fees_req.cb)
res = get_txn_fees_json.decode()
logger.debug("build_get_txn_fees_req: <<< res: %r", res)
return res | <ast.AsyncFunctionDef object at 0x7da1b1f3ac20> | keyword[async] keyword[def] identifier[build_get_txn_fees_req] ( identifier[wallet_handle] : identifier[int] ,
identifier[submitter_did] : identifier[str] ,
identifier[payment_method] : identifier[str] )-> identifier[str] :
literal[string]
identifier[logger] = identifier[logging] . identifier[getLogger] ( identifier[__name__] )
identifier[logger] . identifier[debug] ( literal[string] ,
identifier[wallet_handle] ,
identifier[submitter_did] ,
identifier[payment_method] )
keyword[if] keyword[not] identifier[hasattr] ( identifier[build_get_txn_fees_req] , literal[string] ):
identifier[logger] . identifier[debug] ( literal[string] )
identifier[build_get_txn_fees_req] . identifier[cb] = identifier[create_cb] ( identifier[CFUNCTYPE] ( keyword[None] , identifier[c_int32] , identifier[c_int32] , identifier[c_char_p] ))
identifier[c_wallet_handle] = identifier[c_int32] ( identifier[wallet_handle] )
identifier[c_submitter_did] = identifier[c_char_p] ( identifier[submitter_did] . identifier[encode] ( literal[string] )) keyword[if] identifier[submitter_did] keyword[is] keyword[not] keyword[None] keyword[else] keyword[None]
identifier[c_payment_method] = identifier[c_char_p] ( identifier[payment_method] . identifier[encode] ( literal[string] ))
identifier[get_txn_fees_json] = keyword[await] identifier[do_call] ( literal[string] ,
identifier[c_wallet_handle] ,
identifier[c_submitter_did] ,
identifier[c_payment_method] ,
identifier[build_get_txn_fees_req] . identifier[cb] )
identifier[res] = identifier[get_txn_fees_json] . identifier[decode] ()
identifier[logger] . identifier[debug] ( literal[string] , identifier[res] )
keyword[return] identifier[res] | async def build_get_txn_fees_req(wallet_handle: int, submitter_did: str, payment_method: str) -> str:
"""
Builds Indy request for getting fees for transactions in the ledger
:param wallet_handle: wallet handle (created by open_wallet).
:param submitter_did : (Option) DID of request sender
:param payment_method: Payment method to use (for example, 'sov').
:return: set_txn_fees_json: Indy request for setting fees for transactions in the ledger
"""
logger = logging.getLogger(__name__)
logger.debug('build_get_txn_fees_req: >>> wallet_handle: %r, submitter_did: %r, payment_method: %r', wallet_handle, submitter_did, payment_method)
if not hasattr(build_get_txn_fees_req, 'cb'):
logger.debug('build_get_txn_fees_req: Creating callback')
build_get_txn_fees_req.cb = create_cb(CFUNCTYPE(None, c_int32, c_int32, c_char_p)) # depends on [control=['if'], data=[]]
c_wallet_handle = c_int32(wallet_handle)
c_submitter_did = c_char_p(submitter_did.encode('utf-8')) if submitter_did is not None else None
c_payment_method = c_char_p(payment_method.encode('utf-8'))
get_txn_fees_json = await do_call('indy_build_get_txn_fees_req', c_wallet_handle, c_submitter_did, c_payment_method, build_get_txn_fees_req.cb)
res = get_txn_fees_json.decode()
logger.debug('build_get_txn_fees_req: <<< res: %r', res)
return res |
def unique_sites(self, scaled_positions, symprec=1e-3, output_mask=False):
"""Returns a subset of *scaled_positions* containing only the
symmetry-unique positions. If *output_mask* is True, a boolean
array masking the subset is also returned.
Example:
>>> from ase.lattice.spacegroup import Spacegroup
>>> sg = Spacegroup(225) # fcc
>>> sg.unique_sites([[0.0, 0.0, 0.0],
... [0.5, 0.5, 0.0],
... [1.0, 0.0, 0.0],
... [0.5, 0.0, 0.0]])
array([[ 0. , 0. , 0. ],
[ 0.5, 0. , 0. ]])
"""
scaled = np.array(scaled_positions, ndmin=2)
symnorm = self.symmetry_normalised_sites(scaled)
perm = np.lexsort(symnorm.T)
iperm = perm.argsort()
xmask = np.abs(np.diff(symnorm[perm], axis=0)).max(axis=1) > symprec
mask = np.concatenate(([True], xmask))
imask = mask[iperm]
if output_mask:
return scaled[imask], imask
else:
return scaled[imask] | def function[unique_sites, parameter[self, scaled_positions, symprec, output_mask]]:
constant[Returns a subset of *scaled_positions* containing only the
symmetry-unique positions. If *output_mask* is True, a boolean
array masking the subset is also returned.
Example:
>>> from ase.lattice.spacegroup import Spacegroup
>>> sg = Spacegroup(225) # fcc
>>> sg.unique_sites([[0.0, 0.0, 0.0],
... [0.5, 0.5, 0.0],
... [1.0, 0.0, 0.0],
... [0.5, 0.0, 0.0]])
array([[ 0. , 0. , 0. ],
[ 0.5, 0. , 0. ]])
]
variable[scaled] assign[=] call[name[np].array, parameter[name[scaled_positions]]]
variable[symnorm] assign[=] call[name[self].symmetry_normalised_sites, parameter[name[scaled]]]
variable[perm] assign[=] call[name[np].lexsort, parameter[name[symnorm].T]]
variable[iperm] assign[=] call[name[perm].argsort, parameter[]]
variable[xmask] assign[=] compare[call[call[name[np].abs, parameter[call[name[np].diff, parameter[call[name[symnorm]][name[perm]]]]]].max, parameter[]] greater[>] name[symprec]]
variable[mask] assign[=] call[name[np].concatenate, parameter[tuple[[<ast.List object at 0x7da1b26ae050>, <ast.Name object at 0x7da1b26ad9c0>]]]]
variable[imask] assign[=] call[name[mask]][name[iperm]]
if name[output_mask] begin[:]
return[tuple[[<ast.Subscript object at 0x7da1b26af8b0>, <ast.Name object at 0x7da20c794850>]]] | keyword[def] identifier[unique_sites] ( identifier[self] , identifier[scaled_positions] , identifier[symprec] = literal[int] , identifier[output_mask] = keyword[False] ):
literal[string]
identifier[scaled] = identifier[np] . identifier[array] ( identifier[scaled_positions] , identifier[ndmin] = literal[int] )
identifier[symnorm] = identifier[self] . identifier[symmetry_normalised_sites] ( identifier[scaled] )
identifier[perm] = identifier[np] . identifier[lexsort] ( identifier[symnorm] . identifier[T] )
identifier[iperm] = identifier[perm] . identifier[argsort] ()
identifier[xmask] = identifier[np] . identifier[abs] ( identifier[np] . identifier[diff] ( identifier[symnorm] [ identifier[perm] ], identifier[axis] = literal[int] )). identifier[max] ( identifier[axis] = literal[int] )> identifier[symprec]
identifier[mask] = identifier[np] . identifier[concatenate] (([ keyword[True] ], identifier[xmask] ))
identifier[imask] = identifier[mask] [ identifier[iperm] ]
keyword[if] identifier[output_mask] :
keyword[return] identifier[scaled] [ identifier[imask] ], identifier[imask]
keyword[else] :
keyword[return] identifier[scaled] [ identifier[imask] ] | def unique_sites(self, scaled_positions, symprec=0.001, output_mask=False):
"""Returns a subset of *scaled_positions* containing only the
symmetry-unique positions. If *output_mask* is True, a boolean
array masking the subset is also returned.
Example:
>>> from ase.lattice.spacegroup import Spacegroup
>>> sg = Spacegroup(225) # fcc
>>> sg.unique_sites([[0.0, 0.0, 0.0],
... [0.5, 0.5, 0.0],
... [1.0, 0.0, 0.0],
... [0.5, 0.0, 0.0]])
array([[ 0. , 0. , 0. ],
[ 0.5, 0. , 0. ]])
"""
scaled = np.array(scaled_positions, ndmin=2)
symnorm = self.symmetry_normalised_sites(scaled)
perm = np.lexsort(symnorm.T)
iperm = perm.argsort()
xmask = np.abs(np.diff(symnorm[perm], axis=0)).max(axis=1) > symprec
mask = np.concatenate(([True], xmask))
imask = mask[iperm]
if output_mask:
return (scaled[imask], imask) # depends on [control=['if'], data=[]]
else:
return scaled[imask] |
async def create_scene(self, room_id, name, color_id=0, icon_id=0):
"""Creates am empty scene.
Scenemembers need to be added after the scene has been created.
:returns: A json object including scene id.
"""
name = unicode_to_base64(name)
_data = {
"scene": {
ATTR_ROOM_ID: room_id,
ATTR_NAME: name,
ATTR_COLOR_ID: color_id,
ATTR_ICON_ID: icon_id,
}
}
_response = await self.request.post(self._base_path, data=_data)
return _response | <ast.AsyncFunctionDef object at 0x7da1b0aba560> | keyword[async] keyword[def] identifier[create_scene] ( identifier[self] , identifier[room_id] , identifier[name] , identifier[color_id] = literal[int] , identifier[icon_id] = literal[int] ):
literal[string]
identifier[name] = identifier[unicode_to_base64] ( identifier[name] )
identifier[_data] ={
literal[string] :{
identifier[ATTR_ROOM_ID] : identifier[room_id] ,
identifier[ATTR_NAME] : identifier[name] ,
identifier[ATTR_COLOR_ID] : identifier[color_id] ,
identifier[ATTR_ICON_ID] : identifier[icon_id] ,
}
}
identifier[_response] = keyword[await] identifier[self] . identifier[request] . identifier[post] ( identifier[self] . identifier[_base_path] , identifier[data] = identifier[_data] )
keyword[return] identifier[_response] | async def create_scene(self, room_id, name, color_id=0, icon_id=0):
"""Creates am empty scene.
Scenemembers need to be added after the scene has been created.
:returns: A json object including scene id.
"""
name = unicode_to_base64(name)
_data = {'scene': {ATTR_ROOM_ID: room_id, ATTR_NAME: name, ATTR_COLOR_ID: color_id, ATTR_ICON_ID: icon_id}}
_response = await self.request.post(self._base_path, data=_data)
return _response |
def translate(rect, x, y, width=1):
"""
Given four points of a rectangle, translate the
rectangle to the specified x and y coordinates and,
optionally, change the width.
:type rect: list of tuples
:param rect: Four points describing a rectangle.
:type x: float
:param x: The amount to shift the rectangle along the x-axis.
:type y: float
:param y: The amount to shift the rectangle along the y-axis.
:type width: float
:param width: The amount by which to change the width of the
rectangle.
"""
return ((rect[0][0]+x, rect[0][1]+y), (rect[1][0]+x, rect[1][1]+y),
(rect[2][0]+x+width, rect[2][1]+y), (rect[3][0]+x+width, rect[3][1]+y)) | def function[translate, parameter[rect, x, y, width]]:
constant[
Given four points of a rectangle, translate the
rectangle to the specified x and y coordinates and,
optionally, change the width.
:type rect: list of tuples
:param rect: Four points describing a rectangle.
:type x: float
:param x: The amount to shift the rectangle along the x-axis.
:type y: float
:param y: The amount to shift the rectangle along the y-axis.
:type width: float
:param width: The amount by which to change the width of the
rectangle.
]
return[tuple[[<ast.Tuple object at 0x7da204566f80>, <ast.Tuple object at 0x7da204564e20>, <ast.Tuple object at 0x7da204566c20>, <ast.Tuple object at 0x7da204567880>]]] | keyword[def] identifier[translate] ( identifier[rect] , identifier[x] , identifier[y] , identifier[width] = literal[int] ):
literal[string]
keyword[return] (( identifier[rect] [ literal[int] ][ literal[int] ]+ identifier[x] , identifier[rect] [ literal[int] ][ literal[int] ]+ identifier[y] ),( identifier[rect] [ literal[int] ][ literal[int] ]+ identifier[x] , identifier[rect] [ literal[int] ][ literal[int] ]+ identifier[y] ),
( identifier[rect] [ literal[int] ][ literal[int] ]+ identifier[x] + identifier[width] , identifier[rect] [ literal[int] ][ literal[int] ]+ identifier[y] ),( identifier[rect] [ literal[int] ][ literal[int] ]+ identifier[x] + identifier[width] , identifier[rect] [ literal[int] ][ literal[int] ]+ identifier[y] )) | def translate(rect, x, y, width=1):
"""
Given four points of a rectangle, translate the
rectangle to the specified x and y coordinates and,
optionally, change the width.
:type rect: list of tuples
:param rect: Four points describing a rectangle.
:type x: float
:param x: The amount to shift the rectangle along the x-axis.
:type y: float
:param y: The amount to shift the rectangle along the y-axis.
:type width: float
:param width: The amount by which to change the width of the
rectangle.
"""
return ((rect[0][0] + x, rect[0][1] + y), (rect[1][0] + x, rect[1][1] + y), (rect[2][0] + x + width, rect[2][1] + y), (rect[3][0] + x + width, rect[3][1] + y)) |
def dict_of_lists_add(dictionary, key, value):
# type: (DictUpperBound, Any, Any) -> None
"""Add value to a list in a dictionary by key
Args:
dictionary (DictUpperBound): Dictionary to which to add values
key (Any): Key within dictionary
value (Any): Value to add to list in dictionary
Returns:
None
"""
list_objs = dictionary.get(key, list())
list_objs.append(value)
dictionary[key] = list_objs | def function[dict_of_lists_add, parameter[dictionary, key, value]]:
constant[Add value to a list in a dictionary by key
Args:
dictionary (DictUpperBound): Dictionary to which to add values
key (Any): Key within dictionary
value (Any): Value to add to list in dictionary
Returns:
None
]
variable[list_objs] assign[=] call[name[dictionary].get, parameter[name[key], call[name[list], parameter[]]]]
call[name[list_objs].append, parameter[name[value]]]
call[name[dictionary]][name[key]] assign[=] name[list_objs] | keyword[def] identifier[dict_of_lists_add] ( identifier[dictionary] , identifier[key] , identifier[value] ):
literal[string]
identifier[list_objs] = identifier[dictionary] . identifier[get] ( identifier[key] , identifier[list] ())
identifier[list_objs] . identifier[append] ( identifier[value] )
identifier[dictionary] [ identifier[key] ]= identifier[list_objs] | def dict_of_lists_add(dictionary, key, value):
# type: (DictUpperBound, Any, Any) -> None
'Add value to a list in a dictionary by key\n\n Args:\n dictionary (DictUpperBound): Dictionary to which to add values\n key (Any): Key within dictionary\n value (Any): Value to add to list in dictionary\n\n Returns:\n None\n\n '
list_objs = dictionary.get(key, list())
list_objs.append(value)
dictionary[key] = list_objs |
def get_available_urls(self, urls):
"""Return reachable urls sorted by their ping times."""
baseurl_to_urls = {self._baseurl(url): url for url in urls}
pingtimes = self._pinger.pings(list(baseurl_to_urls.keys())) # List of pairs (host, time in ms).
self._log.debug('Artifact cache server ping times: {}'
.format(', '.join(['{}: {:.6f} secs'.format(*p) for p in pingtimes])))
sorted_pingtimes = sorted(pingtimes, key=lambda x: x[1])
available_urls = [baseurl_to_urls[baseurl] for baseurl, pingtime in sorted_pingtimes
if pingtime < Pinger.UNREACHABLE]
self._log.debug('Available cache servers: {0}'.format(available_urls))
return available_urls | def function[get_available_urls, parameter[self, urls]]:
constant[Return reachable urls sorted by their ping times.]
variable[baseurl_to_urls] assign[=] <ast.DictComp object at 0x7da1b22a5d20>
variable[pingtimes] assign[=] call[name[self]._pinger.pings, parameter[call[name[list], parameter[call[name[baseurl_to_urls].keys, parameter[]]]]]]
call[name[self]._log.debug, parameter[call[constant[Artifact cache server ping times: {}].format, parameter[call[constant[, ].join, parameter[<ast.ListComp object at 0x7da1b22a5090>]]]]]]
variable[sorted_pingtimes] assign[=] call[name[sorted], parameter[name[pingtimes]]]
variable[available_urls] assign[=] <ast.ListComp object at 0x7da1b22496f0>
call[name[self]._log.debug, parameter[call[constant[Available cache servers: {0}].format, parameter[name[available_urls]]]]]
return[name[available_urls]] | keyword[def] identifier[get_available_urls] ( identifier[self] , identifier[urls] ):
literal[string]
identifier[baseurl_to_urls] ={ identifier[self] . identifier[_baseurl] ( identifier[url] ): identifier[url] keyword[for] identifier[url] keyword[in] identifier[urls] }
identifier[pingtimes] = identifier[self] . identifier[_pinger] . identifier[pings] ( identifier[list] ( identifier[baseurl_to_urls] . identifier[keys] ()))
identifier[self] . identifier[_log] . identifier[debug] ( literal[string]
. identifier[format] ( literal[string] . identifier[join] ([ literal[string] . identifier[format] (* identifier[p] ) keyword[for] identifier[p] keyword[in] identifier[pingtimes] ])))
identifier[sorted_pingtimes] = identifier[sorted] ( identifier[pingtimes] , identifier[key] = keyword[lambda] identifier[x] : identifier[x] [ literal[int] ])
identifier[available_urls] =[ identifier[baseurl_to_urls] [ identifier[baseurl] ] keyword[for] identifier[baseurl] , identifier[pingtime] keyword[in] identifier[sorted_pingtimes]
keyword[if] identifier[pingtime] < identifier[Pinger] . identifier[UNREACHABLE] ]
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[available_urls] ))
keyword[return] identifier[available_urls] | def get_available_urls(self, urls):
"""Return reachable urls sorted by their ping times."""
baseurl_to_urls = {self._baseurl(url): url for url in urls}
pingtimes = self._pinger.pings(list(baseurl_to_urls.keys())) # List of pairs (host, time in ms).
self._log.debug('Artifact cache server ping times: {}'.format(', '.join(['{}: {:.6f} secs'.format(*p) for p in pingtimes])))
sorted_pingtimes = sorted(pingtimes, key=lambda x: x[1])
available_urls = [baseurl_to_urls[baseurl] for (baseurl, pingtime) in sorted_pingtimes if pingtime < Pinger.UNREACHABLE]
self._log.debug('Available cache servers: {0}'.format(available_urls))
return available_urls |
def _complete_current(self):
""" Perform the completion with the currently selected item.
"""
self._current_text_cursor().insertText(self.currentItem().text())
self.hide() | def function[_complete_current, parameter[self]]:
constant[ Perform the completion with the currently selected item.
]
call[call[name[self]._current_text_cursor, parameter[]].insertText, parameter[call[call[name[self].currentItem, parameter[]].text, parameter[]]]]
call[name[self].hide, parameter[]] | keyword[def] identifier[_complete_current] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_current_text_cursor] (). identifier[insertText] ( identifier[self] . identifier[currentItem] (). identifier[text] ())
identifier[self] . identifier[hide] () | def _complete_current(self):
""" Perform the completion with the currently selected item.
"""
self._current_text_cursor().insertText(self.currentItem().text())
self.hide() |
def configure_retrievefor(self, ns, definition):
"""
Register a relation endpoint.
The definition's func should be a retrieve function, which must:
- accept kwargs for path data and optional request data
- return an item
The definition's request_schema will be used to process query string arguments, if any.
:param ns: the namespace
:param definition: the endpoint definition
"""
request_schema = definition.request_schema or Schema()
@self.add_route(ns.relation_path, Operation.RetrieveFor, ns)
@qs(request_schema)
@response(definition.response_schema)
@wraps(definition.func)
def retrieve(**path_data):
headers = dict()
request_data = load_query_string_data(request_schema)
response_data = require_response_data(definition.func(**merge_data(path_data, request_data)))
definition.header_func(headers, response_data)
response_format = self.negotiate_response_content(definition.response_formats)
return dump_response_data(
definition.response_schema,
response_data,
headers=headers,
response_format=response_format,
)
retrieve.__doc__ = "Retrieve {} relative to a {}".format(pluralize(ns.object_name), ns.subject_name) | def function[configure_retrievefor, parameter[self, ns, definition]]:
constant[
Register a relation endpoint.
The definition's func should be a retrieve function, which must:
- accept kwargs for path data and optional request data
- return an item
The definition's request_schema will be used to process query string arguments, if any.
:param ns: the namespace
:param definition: the endpoint definition
]
variable[request_schema] assign[=] <ast.BoolOp object at 0x7da1b0c62a70>
def function[retrieve, parameter[]]:
variable[headers] assign[=] call[name[dict], parameter[]]
variable[request_data] assign[=] call[name[load_query_string_data], parameter[name[request_schema]]]
variable[response_data] assign[=] call[name[require_response_data], parameter[call[name[definition].func, parameter[]]]]
call[name[definition].header_func, parameter[name[headers], name[response_data]]]
variable[response_format] assign[=] call[name[self].negotiate_response_content, parameter[name[definition].response_formats]]
return[call[name[dump_response_data], parameter[name[definition].response_schema, name[response_data]]]]
name[retrieve].__doc__ assign[=] call[constant[Retrieve {} relative to a {}].format, parameter[call[name[pluralize], parameter[name[ns].object_name]], name[ns].subject_name]] | keyword[def] identifier[configure_retrievefor] ( identifier[self] , identifier[ns] , identifier[definition] ):
literal[string]
identifier[request_schema] = identifier[definition] . identifier[request_schema] keyword[or] identifier[Schema] ()
@ identifier[self] . identifier[add_route] ( identifier[ns] . identifier[relation_path] , identifier[Operation] . identifier[RetrieveFor] , identifier[ns] )
@ identifier[qs] ( identifier[request_schema] )
@ identifier[response] ( identifier[definition] . identifier[response_schema] )
@ identifier[wraps] ( identifier[definition] . identifier[func] )
keyword[def] identifier[retrieve] (** identifier[path_data] ):
identifier[headers] = identifier[dict] ()
identifier[request_data] = identifier[load_query_string_data] ( identifier[request_schema] )
identifier[response_data] = identifier[require_response_data] ( identifier[definition] . identifier[func] (** identifier[merge_data] ( identifier[path_data] , identifier[request_data] )))
identifier[definition] . identifier[header_func] ( identifier[headers] , identifier[response_data] )
identifier[response_format] = identifier[self] . identifier[negotiate_response_content] ( identifier[definition] . identifier[response_formats] )
keyword[return] identifier[dump_response_data] (
identifier[definition] . identifier[response_schema] ,
identifier[response_data] ,
identifier[headers] = identifier[headers] ,
identifier[response_format] = identifier[response_format] ,
)
identifier[retrieve] . identifier[__doc__] = literal[string] . identifier[format] ( identifier[pluralize] ( identifier[ns] . identifier[object_name] ), identifier[ns] . identifier[subject_name] ) | def configure_retrievefor(self, ns, definition):
"""
Register a relation endpoint.
The definition's func should be a retrieve function, which must:
- accept kwargs for path data and optional request data
- return an item
The definition's request_schema will be used to process query string arguments, if any.
:param ns: the namespace
:param definition: the endpoint definition
"""
request_schema = definition.request_schema or Schema()
@self.add_route(ns.relation_path, Operation.RetrieveFor, ns)
@qs(request_schema)
@response(definition.response_schema)
@wraps(definition.func)
def retrieve(**path_data):
headers = dict()
request_data = load_query_string_data(request_schema)
response_data = require_response_data(definition.func(**merge_data(path_data, request_data)))
definition.header_func(headers, response_data)
response_format = self.negotiate_response_content(definition.response_formats)
return dump_response_data(definition.response_schema, response_data, headers=headers, response_format=response_format)
retrieve.__doc__ = 'Retrieve {} relative to a {}'.format(pluralize(ns.object_name), ns.subject_name) |
def get_current_frame():
"""
:return: current frame object (excluding this function call)
:rtype: types.FrameType
Uses sys._getframe if available, otherwise some trickery with sys.exc_info and a dummy exception.
"""
if hasattr(sys, "_getframe"):
# noinspection PyProtectedMember
return sys._getframe(1)
try:
raise ZeroDivisionError
except ZeroDivisionError:
return sys.exc_info()[2].tb_frame.f_back | def function[get_current_frame, parameter[]]:
constant[
:return: current frame object (excluding this function call)
:rtype: types.FrameType
Uses sys._getframe if available, otherwise some trickery with sys.exc_info and a dummy exception.
]
if call[name[hasattr], parameter[name[sys], constant[_getframe]]] begin[:]
return[call[name[sys]._getframe, parameter[constant[1]]]]
<ast.Try object at 0x7da1b237fa60> | keyword[def] identifier[get_current_frame] ():
literal[string]
keyword[if] identifier[hasattr] ( identifier[sys] , literal[string] ):
keyword[return] identifier[sys] . identifier[_getframe] ( literal[int] )
keyword[try] :
keyword[raise] identifier[ZeroDivisionError]
keyword[except] identifier[ZeroDivisionError] :
keyword[return] identifier[sys] . identifier[exc_info] ()[ literal[int] ]. identifier[tb_frame] . identifier[f_back] | def get_current_frame():
"""
:return: current frame object (excluding this function call)
:rtype: types.FrameType
Uses sys._getframe if available, otherwise some trickery with sys.exc_info and a dummy exception.
"""
if hasattr(sys, '_getframe'):
# noinspection PyProtectedMember
return sys._getframe(1) # depends on [control=['if'], data=[]]
try:
raise ZeroDivisionError # depends on [control=['try'], data=[]]
except ZeroDivisionError:
return sys.exc_info()[2].tb_frame.f_back # depends on [control=['except'], data=[]] |
def add_orbit(self, component=None, **kwargs):
"""
Shortcut to :meth:`add_component` but with kind='orbit'
"""
kwargs.setdefault('component', component)
return self.add_component('orbit', **kwargs) | def function[add_orbit, parameter[self, component]]:
constant[
Shortcut to :meth:`add_component` but with kind='orbit'
]
call[name[kwargs].setdefault, parameter[constant[component], name[component]]]
return[call[name[self].add_component, parameter[constant[orbit]]]] | keyword[def] identifier[add_orbit] ( identifier[self] , identifier[component] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] . identifier[setdefault] ( literal[string] , identifier[component] )
keyword[return] identifier[self] . identifier[add_component] ( literal[string] ,** identifier[kwargs] ) | def add_orbit(self, component=None, **kwargs):
"""
Shortcut to :meth:`add_component` but with kind='orbit'
"""
kwargs.setdefault('component', component)
return self.add_component('orbit', **kwargs) |
def get(self, key):
"""Return the status from a job.
:param key: id of job
:type document: dict or list
:return: message with location of job
:rtype: dict
:raises Unauthorized: if API returns status 401
:raises Forbidden: if API returns status 403
:raises NotFound: if API returns status 404
:raises ApiError: if API returns other status
"""
uri = 'updates/job/{}'.format(key)
return self.make_request(method='GET', uri=uri) | def function[get, parameter[self, key]]:
constant[Return the status from a job.
:param key: id of job
:type document: dict or list
:return: message with location of job
:rtype: dict
:raises Unauthorized: if API returns status 401
:raises Forbidden: if API returns status 403
:raises NotFound: if API returns status 404
:raises ApiError: if API returns other status
]
variable[uri] assign[=] call[constant[updates/job/{}].format, parameter[name[key]]]
return[call[name[self].make_request, parameter[]]] | keyword[def] identifier[get] ( identifier[self] , identifier[key] ):
literal[string]
identifier[uri] = literal[string] . identifier[format] ( identifier[key] )
keyword[return] identifier[self] . identifier[make_request] ( identifier[method] = literal[string] , identifier[uri] = identifier[uri] ) | def get(self, key):
"""Return the status from a job.
:param key: id of job
:type document: dict or list
:return: message with location of job
:rtype: dict
:raises Unauthorized: if API returns status 401
:raises Forbidden: if API returns status 403
:raises NotFound: if API returns status 404
:raises ApiError: if API returns other status
"""
uri = 'updates/job/{}'.format(key)
return self.make_request(method='GET', uri=uri) |
def autocomplete():
"""Entry Point for completion of main and subcommand options.
"""
# Don't complete if user hasn't sourced bash_completion file.
if 'PIP_AUTO_COMPLETE' not in os.environ:
return
cwords = os.environ['COMP_WORDS'].split()[1:]
cword = int(os.environ['COMP_CWORD'])
try:
current = cwords[cword - 1]
except IndexError:
current = ''
subcommands = [cmd for cmd, summary in get_summaries()]
options = []
# subcommand
try:
subcommand_name = [w for w in cwords if w in subcommands][0]
except IndexError:
subcommand_name = None
parser = create_main_parser()
# subcommand options
if subcommand_name:
# special case: 'help' subcommand has no options
if subcommand_name == 'help':
sys.exit(1)
# special case: list locally installed dists for show and uninstall
should_list_installed = (
subcommand_name in ['show', 'uninstall'] and
not current.startswith('-')
)
if should_list_installed:
installed = []
lc = current.lower()
for dist in get_installed_distributions(local_only=True):
if dist.key.startswith(lc) and dist.key not in cwords[1:]:
installed.append(dist.key)
# if there are no dists installed, fall back to option completion
if installed:
for dist in installed:
print(dist)
sys.exit(1)
subcommand = commands_dict[subcommand_name]()
for opt in subcommand.parser.option_list_all:
if opt.help != optparse.SUPPRESS_HELP:
for opt_str in opt._long_opts + opt._short_opts:
options.append((opt_str, opt.nargs))
# filter out previously specified options from available options
prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
options = [(x, v) for (x, v) in options if x not in prev_opts]
# filter options by current input
options = [(k, v) for k, v in options if k.startswith(current)]
# get completion type given cwords and available subcommand options
completion_type = get_path_completion_type(
cwords, cword, subcommand.parser.option_list_all,
)
# get completion files and directories if ``completion_type`` is
# ``<file>``, ``<dir>`` or ``<path>``
if completion_type:
options = auto_complete_paths(current, completion_type)
options = ((opt, 0) for opt in options)
for option in options:
opt_label = option[0]
# append '=' to options which require args
if option[1] and option[0][:2] == "--":
opt_label += '='
print(opt_label)
else:
# show main parser options only when necessary
opts = [i.option_list for i in parser.option_groups]
opts.append(parser.option_list)
opts = (o for it in opts for o in it)
if current.startswith('-'):
for opt in opts:
if opt.help != optparse.SUPPRESS_HELP:
subcommands += opt._long_opts + opt._short_opts
else:
# get completion type given cwords and all available options
completion_type = get_path_completion_type(cwords, cword, opts)
if completion_type:
subcommands = auto_complete_paths(current, completion_type)
print(' '.join([x for x in subcommands if x.startswith(current)]))
sys.exit(1) | def function[autocomplete, parameter[]]:
constant[Entry Point for completion of main and subcommand options.
]
if compare[constant[PIP_AUTO_COMPLETE] <ast.NotIn object at 0x7da2590d7190> name[os].environ] begin[:]
return[None]
variable[cwords] assign[=] call[call[call[name[os].environ][constant[COMP_WORDS]].split, parameter[]]][<ast.Slice object at 0x7da18bc71f90>]
variable[cword] assign[=] call[name[int], parameter[call[name[os].environ][constant[COMP_CWORD]]]]
<ast.Try object at 0x7da18bc71510>
variable[subcommands] assign[=] <ast.ListComp object at 0x7da18bc723e0>
variable[options] assign[=] list[[]]
<ast.Try object at 0x7da18bc70d60>
variable[parser] assign[=] call[name[create_main_parser], parameter[]]
if name[subcommand_name] begin[:]
if compare[name[subcommand_name] equal[==] constant[help]] begin[:]
call[name[sys].exit, parameter[constant[1]]]
variable[should_list_installed] assign[=] <ast.BoolOp object at 0x7da18bc71e40>
if name[should_list_installed] begin[:]
variable[installed] assign[=] list[[]]
variable[lc] assign[=] call[name[current].lower, parameter[]]
for taget[name[dist]] in starred[call[name[get_installed_distributions], parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da18bc72590> begin[:]
call[name[installed].append, parameter[name[dist].key]]
if name[installed] begin[:]
for taget[name[dist]] in starred[name[installed]] begin[:]
call[name[print], parameter[name[dist]]]
call[name[sys].exit, parameter[constant[1]]]
variable[subcommand] assign[=] call[call[name[commands_dict]][name[subcommand_name]], parameter[]]
for taget[name[opt]] in starred[name[subcommand].parser.option_list_all] begin[:]
if compare[name[opt].help not_equal[!=] name[optparse].SUPPRESS_HELP] begin[:]
for taget[name[opt_str]] in starred[binary_operation[name[opt]._long_opts + name[opt]._short_opts]] begin[:]
call[name[options].append, parameter[tuple[[<ast.Name object at 0x7da18dc07a60>, <ast.Attribute object at 0x7da18dc05750>]]]]
variable[prev_opts] assign[=] <ast.ListComp object at 0x7da18dc04880>
variable[options] assign[=] <ast.ListComp object at 0x7da18dc04700>
variable[options] assign[=] <ast.ListComp object at 0x7da18dc042e0>
variable[completion_type] assign[=] call[name[get_path_completion_type], parameter[name[cwords], name[cword], name[subcommand].parser.option_list_all]]
if name[completion_type] begin[:]
variable[options] assign[=] call[name[auto_complete_paths], parameter[name[current], name[completion_type]]]
variable[options] assign[=] <ast.GeneratorExp object at 0x7da18dc07610>
for taget[name[option]] in starred[name[options]] begin[:]
variable[opt_label] assign[=] call[name[option]][constant[0]]
if <ast.BoolOp object at 0x7da18dc07f10> begin[:]
<ast.AugAssign object at 0x7da18dc06170>
call[name[print], parameter[name[opt_label]]]
call[name[sys].exit, parameter[constant[1]]] | keyword[def] identifier[autocomplete] ():
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[os] . identifier[environ] :
keyword[return]
identifier[cwords] = identifier[os] . identifier[environ] [ literal[string] ]. identifier[split] ()[ literal[int] :]
identifier[cword] = identifier[int] ( identifier[os] . identifier[environ] [ literal[string] ])
keyword[try] :
identifier[current] = identifier[cwords] [ identifier[cword] - literal[int] ]
keyword[except] identifier[IndexError] :
identifier[current] = literal[string]
identifier[subcommands] =[ identifier[cmd] keyword[for] identifier[cmd] , identifier[summary] keyword[in] identifier[get_summaries] ()]
identifier[options] =[]
keyword[try] :
identifier[subcommand_name] =[ identifier[w] keyword[for] identifier[w] keyword[in] identifier[cwords] keyword[if] identifier[w] keyword[in] identifier[subcommands] ][ literal[int] ]
keyword[except] identifier[IndexError] :
identifier[subcommand_name] = keyword[None]
identifier[parser] = identifier[create_main_parser] ()
keyword[if] identifier[subcommand_name] :
keyword[if] identifier[subcommand_name] == literal[string] :
identifier[sys] . identifier[exit] ( literal[int] )
identifier[should_list_installed] =(
identifier[subcommand_name] keyword[in] [ literal[string] , literal[string] ] keyword[and]
keyword[not] identifier[current] . identifier[startswith] ( literal[string] )
)
keyword[if] identifier[should_list_installed] :
identifier[installed] =[]
identifier[lc] = identifier[current] . identifier[lower] ()
keyword[for] identifier[dist] keyword[in] identifier[get_installed_distributions] ( identifier[local_only] = keyword[True] ):
keyword[if] identifier[dist] . identifier[key] . identifier[startswith] ( identifier[lc] ) keyword[and] identifier[dist] . identifier[key] keyword[not] keyword[in] identifier[cwords] [ literal[int] :]:
identifier[installed] . identifier[append] ( identifier[dist] . identifier[key] )
keyword[if] identifier[installed] :
keyword[for] identifier[dist] keyword[in] identifier[installed] :
identifier[print] ( identifier[dist] )
identifier[sys] . identifier[exit] ( literal[int] )
identifier[subcommand] = identifier[commands_dict] [ identifier[subcommand_name] ]()
keyword[for] identifier[opt] keyword[in] identifier[subcommand] . identifier[parser] . identifier[option_list_all] :
keyword[if] identifier[opt] . identifier[help] != identifier[optparse] . identifier[SUPPRESS_HELP] :
keyword[for] identifier[opt_str] keyword[in] identifier[opt] . identifier[_long_opts] + identifier[opt] . identifier[_short_opts] :
identifier[options] . identifier[append] (( identifier[opt_str] , identifier[opt] . identifier[nargs] ))
identifier[prev_opts] =[ identifier[x] . identifier[split] ( literal[string] )[ literal[int] ] keyword[for] identifier[x] keyword[in] identifier[cwords] [ literal[int] : identifier[cword] - literal[int] ]]
identifier[options] =[( identifier[x] , identifier[v] ) keyword[for] ( identifier[x] , identifier[v] ) keyword[in] identifier[options] keyword[if] identifier[x] keyword[not] keyword[in] identifier[prev_opts] ]
identifier[options] =[( identifier[k] , identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[options] keyword[if] identifier[k] . identifier[startswith] ( identifier[current] )]
identifier[completion_type] = identifier[get_path_completion_type] (
identifier[cwords] , identifier[cword] , identifier[subcommand] . identifier[parser] . identifier[option_list_all] ,
)
keyword[if] identifier[completion_type] :
identifier[options] = identifier[auto_complete_paths] ( identifier[current] , identifier[completion_type] )
identifier[options] =(( identifier[opt] , literal[int] ) keyword[for] identifier[opt] keyword[in] identifier[options] )
keyword[for] identifier[option] keyword[in] identifier[options] :
identifier[opt_label] = identifier[option] [ literal[int] ]
keyword[if] identifier[option] [ literal[int] ] keyword[and] identifier[option] [ literal[int] ][: literal[int] ]== literal[string] :
identifier[opt_label] += literal[string]
identifier[print] ( identifier[opt_label] )
keyword[else] :
identifier[opts] =[ identifier[i] . identifier[option_list] keyword[for] identifier[i] keyword[in] identifier[parser] . identifier[option_groups] ]
identifier[opts] . identifier[append] ( identifier[parser] . identifier[option_list] )
identifier[opts] =( identifier[o] keyword[for] identifier[it] keyword[in] identifier[opts] keyword[for] identifier[o] keyword[in] identifier[it] )
keyword[if] identifier[current] . identifier[startswith] ( literal[string] ):
keyword[for] identifier[opt] keyword[in] identifier[opts] :
keyword[if] identifier[opt] . identifier[help] != identifier[optparse] . identifier[SUPPRESS_HELP] :
identifier[subcommands] += identifier[opt] . identifier[_long_opts] + identifier[opt] . identifier[_short_opts]
keyword[else] :
identifier[completion_type] = identifier[get_path_completion_type] ( identifier[cwords] , identifier[cword] , identifier[opts] )
keyword[if] identifier[completion_type] :
identifier[subcommands] = identifier[auto_complete_paths] ( identifier[current] , identifier[completion_type] )
identifier[print] ( literal[string] . identifier[join] ([ identifier[x] keyword[for] identifier[x] keyword[in] identifier[subcommands] keyword[if] identifier[x] . identifier[startswith] ( identifier[current] )]))
identifier[sys] . identifier[exit] ( literal[int] ) | def autocomplete():
"""Entry Point for completion of main and subcommand options.
"""
# Don't complete if user hasn't sourced bash_completion file.
if 'PIP_AUTO_COMPLETE' not in os.environ:
return # depends on [control=['if'], data=[]]
cwords = os.environ['COMP_WORDS'].split()[1:]
cword = int(os.environ['COMP_CWORD'])
try:
current = cwords[cword - 1] # depends on [control=['try'], data=[]]
except IndexError:
current = '' # depends on [control=['except'], data=[]]
subcommands = [cmd for (cmd, summary) in get_summaries()]
options = []
# subcommand
try:
subcommand_name = [w for w in cwords if w in subcommands][0] # depends on [control=['try'], data=[]]
except IndexError:
subcommand_name = None # depends on [control=['except'], data=[]]
parser = create_main_parser()
# subcommand options
if subcommand_name:
# special case: 'help' subcommand has no options
if subcommand_name == 'help':
sys.exit(1) # depends on [control=['if'], data=[]]
# special case: list locally installed dists for show and uninstall
should_list_installed = subcommand_name in ['show', 'uninstall'] and (not current.startswith('-'))
if should_list_installed:
installed = []
lc = current.lower()
for dist in get_installed_distributions(local_only=True):
if dist.key.startswith(lc) and dist.key not in cwords[1:]:
installed.append(dist.key) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['dist']]
# if there are no dists installed, fall back to option completion
if installed:
for dist in installed:
print(dist) # depends on [control=['for'], data=['dist']]
sys.exit(1) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
subcommand = commands_dict[subcommand_name]()
for opt in subcommand.parser.option_list_all:
if opt.help != optparse.SUPPRESS_HELP:
for opt_str in opt._long_opts + opt._short_opts:
options.append((opt_str, opt.nargs)) # depends on [control=['for'], data=['opt_str']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['opt']]
# filter out previously specified options from available options
prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
options = [(x, v) for (x, v) in options if x not in prev_opts]
# filter options by current input
options = [(k, v) for (k, v) in options if k.startswith(current)]
# get completion type given cwords and available subcommand options
completion_type = get_path_completion_type(cwords, cword, subcommand.parser.option_list_all)
# get completion files and directories if ``completion_type`` is
# ``<file>``, ``<dir>`` or ``<path>``
if completion_type:
options = auto_complete_paths(current, completion_type)
options = ((opt, 0) for opt in options) # depends on [control=['if'], data=[]]
for option in options:
opt_label = option[0]
# append '=' to options which require args
if option[1] and option[0][:2] == '--':
opt_label += '=' # depends on [control=['if'], data=[]]
print(opt_label) # depends on [control=['for'], data=['option']] # depends on [control=['if'], data=[]]
else:
# show main parser options only when necessary
opts = [i.option_list for i in parser.option_groups]
opts.append(parser.option_list)
opts = (o for it in opts for o in it)
if current.startswith('-'):
for opt in opts:
if opt.help != optparse.SUPPRESS_HELP:
subcommands += opt._long_opts + opt._short_opts # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['opt']] # depends on [control=['if'], data=[]]
else:
# get completion type given cwords and all available options
completion_type = get_path_completion_type(cwords, cword, opts)
if completion_type:
subcommands = auto_complete_paths(current, completion_type) # depends on [control=['if'], data=[]]
print(' '.join([x for x in subcommands if x.startswith(current)]))
sys.exit(1) |
def send_email(from_addr: str,
host: str,
user: str,
password: str,
port: int = None,
use_tls: bool = True,
date: str = None,
sender: str = "",
reply_to: Union[str, List[str]] = "",
to: Union[str, List[str]] = "",
cc: Union[str, List[str]] = "",
bcc: Union[str, List[str]] = "",
subject: str = "",
body: str = "",
content_type: str = CONTENT_TYPE_TEXT,
charset: str = "utf8",
attachment_filenames: Sequence[str] = None,
attachment_binaries: Sequence[bytes] = None,
attachment_binary_filenames: Sequence[str] = None,
verbose: bool = False) -> Tuple[bool, str]:
"""
Sends an e-mail in text/html format using SMTP via TLS.
Args:
host: mail server host
user: username on mail server
password: password for username on mail server
port: port to use, or ``None`` for protocol default
use_tls: use TLS, rather than plain SMTP?
date: e-mail date in RFC 2822 format, or ``None`` for "now"
from_addr: name of the sender for the "From:" field
sender: name of the sender for the "Sender:" field
reply_to: name of the sender for the "Reply-To:" field
to: e-mail address(es) of the recipients for "To:" field
cc: e-mail address(es) of the recipients for "Cc:" field
bcc: e-mail address(es) of the recipients for "Bcc:" field
subject: e-mail subject
body: e-mail body
content_type: MIME type for body content, default ``text/plain``
charset: character set for body; default ``utf8``
attachment_filenames: filenames of attachments to add
attachment_binaries: binary objects to add as attachments
attachment_binary_filenames: filenames corresponding to
``attachment_binaries``
verbose: be verbose?
Returns:
tuple: ``(success, error_or_success_message)``
See
- https://tools.ietf.org/html/rfc2822
- https://tools.ietf.org/html/rfc5322
- http://segfault.in/2010/12/sending-gmail-from-python/
- http://stackoverflow.com/questions/64505
- http://stackoverflow.com/questions/3362600
Re security:
- TLS supersedes SSL:
https://en.wikipedia.org/wiki/Transport_Layer_Security
- https://en.wikipedia.org/wiki/Email_encryption
- SMTP connections on ports 25 and 587 are commonly secured via TLS using
the ``STARTTLS`` command:
https://en.wikipedia.org/wiki/Simple_Mail_Transfer_Protocol
- https://tools.ietf.org/html/rfc8314
- "STARTTLS on port 587" is one common method. Django refers to this as
"explicit TLS" (its ``E_MAIL_USE_TLS`` setting; see
https://docs.djangoproject.com/en/2.1/ref/settings/#std:setting-EMAIL_USE_TLS).
- Port 465 is also used for "implicit TLS" (3.3 in
https://tools.ietf.org/html/rfc8314). Django refers to this as "implicit
TLS" too, or SSL; see its ``EMAIL_USE_SSL`` setting at
https://docs.djangoproject.com/en/2.1/ref/settings/#email-use-ssl). We
don't support that here.
""" # noqa
if isinstance(to, str):
to = [to]
if isinstance(cc, str):
cc = [cc]
if isinstance(bcc, str):
bcc = [bcc]
# -------------------------------------------------------------------------
# Make it
# -------------------------------------------------------------------------
try:
msg = make_email(
from_addr=from_addr,
date=date,
sender=sender,
reply_to=reply_to,
to=to,
cc=cc,
bcc=bcc,
subject=subject,
body=body,
content_type=content_type,
charset=charset,
attachment_filenames=attachment_filenames,
attachment_binaries=attachment_binaries,
attachment_binary_filenames=attachment_binary_filenames,
verbose=verbose,
)
except (AssertionError, ValueError) as e:
errmsg = str(e)
log.error("{}", errmsg)
return False, errmsg
# -------------------------------------------------------------------------
# Send it
# -------------------------------------------------------------------------
to_addrs = to + cc + bcc
try:
send_msg(
msg=msg,
from_addr=from_addr,
to_addrs=to_addrs,
host=host,
user=user,
password=password,
port=port,
use_tls=use_tls,
)
except RuntimeError as e:
errmsg = str(e)
log.error("{}", e)
return False, errmsg
return True, "Success" | def function[send_email, parameter[from_addr, host, user, password, port, use_tls, date, sender, reply_to, to, cc, bcc, subject, body, content_type, charset, attachment_filenames, attachment_binaries, attachment_binary_filenames, verbose]]:
constant[
Sends an e-mail in text/html format using SMTP via TLS.
Args:
host: mail server host
user: username on mail server
password: password for username on mail server
port: port to use, or ``None`` for protocol default
use_tls: use TLS, rather than plain SMTP?
date: e-mail date in RFC 2822 format, or ``None`` for "now"
from_addr: name of the sender for the "From:" field
sender: name of the sender for the "Sender:" field
reply_to: name of the sender for the "Reply-To:" field
to: e-mail address(es) of the recipients for "To:" field
cc: e-mail address(es) of the recipients for "Cc:" field
bcc: e-mail address(es) of the recipients for "Bcc:" field
subject: e-mail subject
body: e-mail body
content_type: MIME type for body content, default ``text/plain``
charset: character set for body; default ``utf8``
attachment_filenames: filenames of attachments to add
attachment_binaries: binary objects to add as attachments
attachment_binary_filenames: filenames corresponding to
``attachment_binaries``
verbose: be verbose?
Returns:
tuple: ``(success, error_or_success_message)``
See
- https://tools.ietf.org/html/rfc2822
- https://tools.ietf.org/html/rfc5322
- http://segfault.in/2010/12/sending-gmail-from-python/
- http://stackoverflow.com/questions/64505
- http://stackoverflow.com/questions/3362600
Re security:
- TLS supersedes SSL:
https://en.wikipedia.org/wiki/Transport_Layer_Security
- https://en.wikipedia.org/wiki/Email_encryption
- SMTP connections on ports 25 and 587 are commonly secured via TLS using
the ``STARTTLS`` command:
https://en.wikipedia.org/wiki/Simple_Mail_Transfer_Protocol
- https://tools.ietf.org/html/rfc8314
- "STARTTLS on port 587" is one common method. Django refers to this as
"explicit TLS" (its ``E_MAIL_USE_TLS`` setting; see
https://docs.djangoproject.com/en/2.1/ref/settings/#std:setting-EMAIL_USE_TLS).
- Port 465 is also used for "implicit TLS" (3.3 in
https://tools.ietf.org/html/rfc8314). Django refers to this as "implicit
TLS" too, or SSL; see its ``EMAIL_USE_SSL`` setting at
https://docs.djangoproject.com/en/2.1/ref/settings/#email-use-ssl). We
don't support that here.
]
if call[name[isinstance], parameter[name[to], name[str]]] begin[:]
variable[to] assign[=] list[[<ast.Name object at 0x7da1b189d510>]]
if call[name[isinstance], parameter[name[cc], name[str]]] begin[:]
variable[cc] assign[=] list[[<ast.Name object at 0x7da1b189c0d0>]]
if call[name[isinstance], parameter[name[bcc], name[str]]] begin[:]
variable[bcc] assign[=] list[[<ast.Name object at 0x7da1b170ab00>]]
<ast.Try object at 0x7da1b170a620>
variable[to_addrs] assign[=] binary_operation[binary_operation[name[to] + name[cc]] + name[bcc]]
<ast.Try object at 0x7da1b18e4130>
return[tuple[[<ast.Constant object at 0x7da1b18e73d0>, <ast.Constant object at 0x7da1b18e7580>]]] | keyword[def] identifier[send_email] ( identifier[from_addr] : identifier[str] ,
identifier[host] : identifier[str] ,
identifier[user] : identifier[str] ,
identifier[password] : identifier[str] ,
identifier[port] : identifier[int] = keyword[None] ,
identifier[use_tls] : identifier[bool] = keyword[True] ,
identifier[date] : identifier[str] = keyword[None] ,
identifier[sender] : identifier[str] = literal[string] ,
identifier[reply_to] : identifier[Union] [ identifier[str] , identifier[List] [ identifier[str] ]]= literal[string] ,
identifier[to] : identifier[Union] [ identifier[str] , identifier[List] [ identifier[str] ]]= literal[string] ,
identifier[cc] : identifier[Union] [ identifier[str] , identifier[List] [ identifier[str] ]]= literal[string] ,
identifier[bcc] : identifier[Union] [ identifier[str] , identifier[List] [ identifier[str] ]]= literal[string] ,
identifier[subject] : identifier[str] = literal[string] ,
identifier[body] : identifier[str] = literal[string] ,
identifier[content_type] : identifier[str] = identifier[CONTENT_TYPE_TEXT] ,
identifier[charset] : identifier[str] = literal[string] ,
identifier[attachment_filenames] : identifier[Sequence] [ identifier[str] ]= keyword[None] ,
identifier[attachment_binaries] : identifier[Sequence] [ identifier[bytes] ]= keyword[None] ,
identifier[attachment_binary_filenames] : identifier[Sequence] [ identifier[str] ]= keyword[None] ,
identifier[verbose] : identifier[bool] = keyword[False] )-> identifier[Tuple] [ identifier[bool] , identifier[str] ]:
literal[string]
keyword[if] identifier[isinstance] ( identifier[to] , identifier[str] ):
identifier[to] =[ identifier[to] ]
keyword[if] identifier[isinstance] ( identifier[cc] , identifier[str] ):
identifier[cc] =[ identifier[cc] ]
keyword[if] identifier[isinstance] ( identifier[bcc] , identifier[str] ):
identifier[bcc] =[ identifier[bcc] ]
keyword[try] :
identifier[msg] = identifier[make_email] (
identifier[from_addr] = identifier[from_addr] ,
identifier[date] = identifier[date] ,
identifier[sender] = identifier[sender] ,
identifier[reply_to] = identifier[reply_to] ,
identifier[to] = identifier[to] ,
identifier[cc] = identifier[cc] ,
identifier[bcc] = identifier[bcc] ,
identifier[subject] = identifier[subject] ,
identifier[body] = identifier[body] ,
identifier[content_type] = identifier[content_type] ,
identifier[charset] = identifier[charset] ,
identifier[attachment_filenames] = identifier[attachment_filenames] ,
identifier[attachment_binaries] = identifier[attachment_binaries] ,
identifier[attachment_binary_filenames] = identifier[attachment_binary_filenames] ,
identifier[verbose] = identifier[verbose] ,
)
keyword[except] ( identifier[AssertionError] , identifier[ValueError] ) keyword[as] identifier[e] :
identifier[errmsg] = identifier[str] ( identifier[e] )
identifier[log] . identifier[error] ( literal[string] , identifier[errmsg] )
keyword[return] keyword[False] , identifier[errmsg]
identifier[to_addrs] = identifier[to] + identifier[cc] + identifier[bcc]
keyword[try] :
identifier[send_msg] (
identifier[msg] = identifier[msg] ,
identifier[from_addr] = identifier[from_addr] ,
identifier[to_addrs] = identifier[to_addrs] ,
identifier[host] = identifier[host] ,
identifier[user] = identifier[user] ,
identifier[password] = identifier[password] ,
identifier[port] = identifier[port] ,
identifier[use_tls] = identifier[use_tls] ,
)
keyword[except] identifier[RuntimeError] keyword[as] identifier[e] :
identifier[errmsg] = identifier[str] ( identifier[e] )
identifier[log] . identifier[error] ( literal[string] , identifier[e] )
keyword[return] keyword[False] , identifier[errmsg]
keyword[return] keyword[True] , literal[string] | def send_email(from_addr: str, host: str, user: str, password: str, port: int=None, use_tls: bool=True, date: str=None, sender: str='', reply_to: Union[str, List[str]]='', to: Union[str, List[str]]='', cc: Union[str, List[str]]='', bcc: Union[str, List[str]]='', subject: str='', body: str='', content_type: str=CONTENT_TYPE_TEXT, charset: str='utf8', attachment_filenames: Sequence[str]=None, attachment_binaries: Sequence[bytes]=None, attachment_binary_filenames: Sequence[str]=None, verbose: bool=False) -> Tuple[bool, str]:
"""
Sends an e-mail in text/html format using SMTP via TLS.
Args:
host: mail server host
user: username on mail server
password: password for username on mail server
port: port to use, or ``None`` for protocol default
use_tls: use TLS, rather than plain SMTP?
date: e-mail date in RFC 2822 format, or ``None`` for "now"
from_addr: name of the sender for the "From:" field
sender: name of the sender for the "Sender:" field
reply_to: name of the sender for the "Reply-To:" field
to: e-mail address(es) of the recipients for "To:" field
cc: e-mail address(es) of the recipients for "Cc:" field
bcc: e-mail address(es) of the recipients for "Bcc:" field
subject: e-mail subject
body: e-mail body
content_type: MIME type for body content, default ``text/plain``
charset: character set for body; default ``utf8``
attachment_filenames: filenames of attachments to add
attachment_binaries: binary objects to add as attachments
attachment_binary_filenames: filenames corresponding to
``attachment_binaries``
verbose: be verbose?
Returns:
tuple: ``(success, error_or_success_message)``
See
- https://tools.ietf.org/html/rfc2822
- https://tools.ietf.org/html/rfc5322
- http://segfault.in/2010/12/sending-gmail-from-python/
- http://stackoverflow.com/questions/64505
- http://stackoverflow.com/questions/3362600
Re security:
- TLS supersedes SSL:
https://en.wikipedia.org/wiki/Transport_Layer_Security
- https://en.wikipedia.org/wiki/Email_encryption
- SMTP connections on ports 25 and 587 are commonly secured via TLS using
the ``STARTTLS`` command:
https://en.wikipedia.org/wiki/Simple_Mail_Transfer_Protocol
- https://tools.ietf.org/html/rfc8314
- "STARTTLS on port 587" is one common method. Django refers to this as
"explicit TLS" (its ``E_MAIL_USE_TLS`` setting; see
https://docs.djangoproject.com/en/2.1/ref/settings/#std:setting-EMAIL_USE_TLS).
- Port 465 is also used for "implicit TLS" (3.3 in
https://tools.ietf.org/html/rfc8314). Django refers to this as "implicit
TLS" too, or SSL; see its ``EMAIL_USE_SSL`` setting at
https://docs.djangoproject.com/en/2.1/ref/settings/#email-use-ssl). We
don't support that here.
""" # noqa
if isinstance(to, str):
to = [to] # depends on [control=['if'], data=[]]
if isinstance(cc, str):
cc = [cc] # depends on [control=['if'], data=[]]
if isinstance(bcc, str):
bcc = [bcc] # depends on [control=['if'], data=[]]
# -------------------------------------------------------------------------
# Make it
# -------------------------------------------------------------------------
try:
msg = make_email(from_addr=from_addr, date=date, sender=sender, reply_to=reply_to, to=to, cc=cc, bcc=bcc, subject=subject, body=body, content_type=content_type, charset=charset, attachment_filenames=attachment_filenames, attachment_binaries=attachment_binaries, attachment_binary_filenames=attachment_binary_filenames, verbose=verbose) # depends on [control=['try'], data=[]]
except (AssertionError, ValueError) as e:
errmsg = str(e)
log.error('{}', errmsg)
return (False, errmsg) # depends on [control=['except'], data=['e']]
# -------------------------------------------------------------------------
# Send it
# -------------------------------------------------------------------------
to_addrs = to + cc + bcc
try:
send_msg(msg=msg, from_addr=from_addr, to_addrs=to_addrs, host=host, user=user, password=password, port=port, use_tls=use_tls) # depends on [control=['try'], data=[]]
except RuntimeError as e:
errmsg = str(e)
log.error('{}', e)
return (False, errmsg) # depends on [control=['except'], data=['e']]
return (True, 'Success') |
def _wrapinstance(ptr, base=None):
"""Enable implicit cast of pointer to most suitable class
This behaviour is available in sip per default.
Based on http://nathanhorne.com/pyqtpyside-wrap-instance
Usage:
This mechanism kicks in under these circumstances.
1. Qt.py is using PySide 1 or 2.
2. A `base` argument is not provided.
See :func:`QtCompat.wrapInstance()`
Arguments:
ptr (long): Pointer to QObject in memory
base (QObject, optional): Base class to wrap with. Defaults to QObject,
which should handle anything.
"""
assert isinstance(ptr, long), "Argument 'ptr' must be of type <long>"
assert (base is None) or issubclass(base, Qt.QtCore.QObject), (
"Argument 'base' must be of type <QObject>")
if Qt.IsPyQt4 or Qt.IsPyQt5:
func = getattr(Qt, "_sip").wrapinstance
elif Qt.IsPySide2:
func = getattr(Qt, "_shiboken2").wrapInstance
elif Qt.IsPySide:
func = getattr(Qt, "_shiboken").wrapInstance
else:
raise AttributeError("'module' has no attribute 'wrapInstance'")
if base is None:
q_object = func(long(ptr), Qt.QtCore.QObject)
meta_object = q_object.metaObject()
class_name = meta_object.className()
super_class_name = meta_object.superClass().className()
if hasattr(Qt.QtWidgets, class_name):
base = getattr(Qt.QtWidgets, class_name)
elif hasattr(Qt.QtWidgets, super_class_name):
base = getattr(Qt.QtWidgets, super_class_name)
else:
base = Qt.QtCore.QObject
return func(long(ptr), base) | def function[_wrapinstance, parameter[ptr, base]]:
constant[Enable implicit cast of pointer to most suitable class
This behaviour is available in sip per default.
Based on http://nathanhorne.com/pyqtpyside-wrap-instance
Usage:
This mechanism kicks in under these circumstances.
1. Qt.py is using PySide 1 or 2.
2. A `base` argument is not provided.
See :func:`QtCompat.wrapInstance()`
Arguments:
ptr (long): Pointer to QObject in memory
base (QObject, optional): Base class to wrap with. Defaults to QObject,
which should handle anything.
]
assert[call[name[isinstance], parameter[name[ptr], name[long]]]]
assert[<ast.BoolOp object at 0x7da20c6c5e70>]
if <ast.BoolOp object at 0x7da20c6c5720> begin[:]
variable[func] assign[=] call[name[getattr], parameter[name[Qt], constant[_sip]]].wrapinstance
if compare[name[base] is constant[None]] begin[:]
variable[q_object] assign[=] call[name[func], parameter[call[name[long], parameter[name[ptr]]], name[Qt].QtCore.QObject]]
variable[meta_object] assign[=] call[name[q_object].metaObject, parameter[]]
variable[class_name] assign[=] call[name[meta_object].className, parameter[]]
variable[super_class_name] assign[=] call[call[name[meta_object].superClass, parameter[]].className, parameter[]]
if call[name[hasattr], parameter[name[Qt].QtWidgets, name[class_name]]] begin[:]
variable[base] assign[=] call[name[getattr], parameter[name[Qt].QtWidgets, name[class_name]]]
return[call[name[func], parameter[call[name[long], parameter[name[ptr]]], name[base]]]] | keyword[def] identifier[_wrapinstance] ( identifier[ptr] , identifier[base] = keyword[None] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[ptr] , identifier[long] ), literal[string]
keyword[assert] ( identifier[base] keyword[is] keyword[None] ) keyword[or] identifier[issubclass] ( identifier[base] , identifier[Qt] . identifier[QtCore] . identifier[QObject] ),(
literal[string] )
keyword[if] identifier[Qt] . identifier[IsPyQt4] keyword[or] identifier[Qt] . identifier[IsPyQt5] :
identifier[func] = identifier[getattr] ( identifier[Qt] , literal[string] ). identifier[wrapinstance]
keyword[elif] identifier[Qt] . identifier[IsPySide2] :
identifier[func] = identifier[getattr] ( identifier[Qt] , literal[string] ). identifier[wrapInstance]
keyword[elif] identifier[Qt] . identifier[IsPySide] :
identifier[func] = identifier[getattr] ( identifier[Qt] , literal[string] ). identifier[wrapInstance]
keyword[else] :
keyword[raise] identifier[AttributeError] ( literal[string] )
keyword[if] identifier[base] keyword[is] keyword[None] :
identifier[q_object] = identifier[func] ( identifier[long] ( identifier[ptr] ), identifier[Qt] . identifier[QtCore] . identifier[QObject] )
identifier[meta_object] = identifier[q_object] . identifier[metaObject] ()
identifier[class_name] = identifier[meta_object] . identifier[className] ()
identifier[super_class_name] = identifier[meta_object] . identifier[superClass] (). identifier[className] ()
keyword[if] identifier[hasattr] ( identifier[Qt] . identifier[QtWidgets] , identifier[class_name] ):
identifier[base] = identifier[getattr] ( identifier[Qt] . identifier[QtWidgets] , identifier[class_name] )
keyword[elif] identifier[hasattr] ( identifier[Qt] . identifier[QtWidgets] , identifier[super_class_name] ):
identifier[base] = identifier[getattr] ( identifier[Qt] . identifier[QtWidgets] , identifier[super_class_name] )
keyword[else] :
identifier[base] = identifier[Qt] . identifier[QtCore] . identifier[QObject]
keyword[return] identifier[func] ( identifier[long] ( identifier[ptr] ), identifier[base] ) | def _wrapinstance(ptr, base=None):
"""Enable implicit cast of pointer to most suitable class
This behaviour is available in sip per default.
Based on http://nathanhorne.com/pyqtpyside-wrap-instance
Usage:
This mechanism kicks in under these circumstances.
1. Qt.py is using PySide 1 or 2.
2. A `base` argument is not provided.
See :func:`QtCompat.wrapInstance()`
Arguments:
ptr (long): Pointer to QObject in memory
base (QObject, optional): Base class to wrap with. Defaults to QObject,
which should handle anything.
"""
assert isinstance(ptr, long), "Argument 'ptr' must be of type <long>"
assert base is None or issubclass(base, Qt.QtCore.QObject), "Argument 'base' must be of type <QObject>"
if Qt.IsPyQt4 or Qt.IsPyQt5:
func = getattr(Qt, '_sip').wrapinstance # depends on [control=['if'], data=[]]
elif Qt.IsPySide2:
func = getattr(Qt, '_shiboken2').wrapInstance # depends on [control=['if'], data=[]]
elif Qt.IsPySide:
func = getattr(Qt, '_shiboken').wrapInstance # depends on [control=['if'], data=[]]
else:
raise AttributeError("'module' has no attribute 'wrapInstance'")
if base is None:
q_object = func(long(ptr), Qt.QtCore.QObject)
meta_object = q_object.metaObject()
class_name = meta_object.className()
super_class_name = meta_object.superClass().className()
if hasattr(Qt.QtWidgets, class_name):
base = getattr(Qt.QtWidgets, class_name) # depends on [control=['if'], data=[]]
elif hasattr(Qt.QtWidgets, super_class_name):
base = getattr(Qt.QtWidgets, super_class_name) # depends on [control=['if'], data=[]]
else:
base = Qt.QtCore.QObject # depends on [control=['if'], data=['base']]
return func(long(ptr), base) |
def import_profile(self):
""" Import minimum needs from an existing json file.
The minimum needs are loaded from a file into the table. This state
is only saved if the form is accepted.
"""
# noinspection PyCallByClass,PyTypeChecker
file_name_dialog = QFileDialog(self)
file_name_dialog.setAcceptMode(QFileDialog.AcceptOpen)
file_name_dialog.setNameFilter(self.tr('JSON files (*.json *.JSON)'))
file_name_dialog.setDefaultSuffix('json')
path_name = resources_path('minimum_needs')
file_name_dialog.setDirectory(path_name)
if file_name_dialog.exec_():
file_name = file_name_dialog.selectedFiles()[0]
else:
return -1
if self.minimum_needs.read_from_file(file_name) == -1:
return -1
self.clear_resource_list()
self.populate_resource_list()
self.switch_context(self.profile_edit_page) | def function[import_profile, parameter[self]]:
constant[ Import minimum needs from an existing json file.
The minimum needs are loaded from a file into the table. This state
is only saved if the form is accepted.
]
variable[file_name_dialog] assign[=] call[name[QFileDialog], parameter[name[self]]]
call[name[file_name_dialog].setAcceptMode, parameter[name[QFileDialog].AcceptOpen]]
call[name[file_name_dialog].setNameFilter, parameter[call[name[self].tr, parameter[constant[JSON files (*.json *.JSON)]]]]]
call[name[file_name_dialog].setDefaultSuffix, parameter[constant[json]]]
variable[path_name] assign[=] call[name[resources_path], parameter[constant[minimum_needs]]]
call[name[file_name_dialog].setDirectory, parameter[name[path_name]]]
if call[name[file_name_dialog].exec_, parameter[]] begin[:]
variable[file_name] assign[=] call[call[name[file_name_dialog].selectedFiles, parameter[]]][constant[0]]
if compare[call[name[self].minimum_needs.read_from_file, parameter[name[file_name]]] equal[==] <ast.UnaryOp object at 0x7da18ede6ad0>] begin[:]
return[<ast.UnaryOp object at 0x7da18ede5570>]
call[name[self].clear_resource_list, parameter[]]
call[name[self].populate_resource_list, parameter[]]
call[name[self].switch_context, parameter[name[self].profile_edit_page]] | keyword[def] identifier[import_profile] ( identifier[self] ):
literal[string]
identifier[file_name_dialog] = identifier[QFileDialog] ( identifier[self] )
identifier[file_name_dialog] . identifier[setAcceptMode] ( identifier[QFileDialog] . identifier[AcceptOpen] )
identifier[file_name_dialog] . identifier[setNameFilter] ( identifier[self] . identifier[tr] ( literal[string] ))
identifier[file_name_dialog] . identifier[setDefaultSuffix] ( literal[string] )
identifier[path_name] = identifier[resources_path] ( literal[string] )
identifier[file_name_dialog] . identifier[setDirectory] ( identifier[path_name] )
keyword[if] identifier[file_name_dialog] . identifier[exec_] ():
identifier[file_name] = identifier[file_name_dialog] . identifier[selectedFiles] ()[ literal[int] ]
keyword[else] :
keyword[return] - literal[int]
keyword[if] identifier[self] . identifier[minimum_needs] . identifier[read_from_file] ( identifier[file_name] )==- literal[int] :
keyword[return] - literal[int]
identifier[self] . identifier[clear_resource_list] ()
identifier[self] . identifier[populate_resource_list] ()
identifier[self] . identifier[switch_context] ( identifier[self] . identifier[profile_edit_page] ) | def import_profile(self):
""" Import minimum needs from an existing json file.
The minimum needs are loaded from a file into the table. This state
is only saved if the form is accepted.
"""
# noinspection PyCallByClass,PyTypeChecker
file_name_dialog = QFileDialog(self)
file_name_dialog.setAcceptMode(QFileDialog.AcceptOpen)
file_name_dialog.setNameFilter(self.tr('JSON files (*.json *.JSON)'))
file_name_dialog.setDefaultSuffix('json')
path_name = resources_path('minimum_needs')
file_name_dialog.setDirectory(path_name)
if file_name_dialog.exec_():
file_name = file_name_dialog.selectedFiles()[0] # depends on [control=['if'], data=[]]
else:
return -1
if self.minimum_needs.read_from_file(file_name) == -1:
return -1 # depends on [control=['if'], data=[]]
self.clear_resource_list()
self.populate_resource_list()
self.switch_context(self.profile_edit_page) |
def _set_config_src(self, v, load=False):
"""
Setter method for config_src, mapped from YANG variable /brocade_tunnels_ext_rpc/get_tunnel_info/output/tunnel/config_src (config-src-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_src is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_src() directly.
YANG Description: Tunnel configuration source; indicates how
tunnel was created.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'bgp-evpn': {'value': 3}, u'vtep-controller': {'value': 1}, u'site-config': {'value': 2}},), is_leaf=True, yang_name="config-src", rest_name="config-src", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-tunnels-ext', defining_module='brocade-tunnels-ext', yang_type='config-src-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_src must be of a type compatible with config-src-type""",
'defined-type': "brocade-tunnels-ext:config-src-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'bgp-evpn': {'value': 3}, u'vtep-controller': {'value': 1}, u'site-config': {'value': 2}},), is_leaf=True, yang_name="config-src", rest_name="config-src", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-tunnels-ext', defining_module='brocade-tunnels-ext', yang_type='config-src-type', is_config=True)""",
})
self.__config_src = t
if hasattr(self, '_set'):
self._set() | def function[_set_config_src, parameter[self, v, load]]:
constant[
Setter method for config_src, mapped from YANG variable /brocade_tunnels_ext_rpc/get_tunnel_info/output/tunnel/config_src (config-src-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_src is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_src() directly.
YANG Description: Tunnel configuration source; indicates how
tunnel was created.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da1b24ba620>
name[self].__config_src assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_config_src] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[unicode] , identifier[restriction_type] = literal[string] , identifier[restriction_arg] ={ literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }},), identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[False] , identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__config_src] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_config_src(self, v, load=False):
"""
Setter method for config_src, mapped from YANG variable /brocade_tunnels_ext_rpc/get_tunnel_info/output/tunnel/config_src (config-src-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_src is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_src() directly.
YANG Description: Tunnel configuration source; indicates how
tunnel was created.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=RestrictedClassType(base_type=unicode, restriction_type='dict_key', restriction_arg={u'bgp-evpn': {'value': 3}, u'vtep-controller': {'value': 1}, u'site-config': {'value': 2}}), is_leaf=True, yang_name='config-src', rest_name='config-src', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-tunnels-ext', defining_module='brocade-tunnels-ext', yang_type='config-src-type', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'config_src must be of a type compatible with config-src-type', 'defined-type': 'brocade-tunnels-ext:config-src-type', 'generated-type': 'YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u\'bgp-evpn\': {\'value\': 3}, u\'vtep-controller\': {\'value\': 1}, u\'site-config\': {\'value\': 2}},), is_leaf=True, yang_name="config-src", rest_name="config-src", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace=\'urn:brocade.com:mgmt:brocade-tunnels-ext\', defining_module=\'brocade-tunnels-ext\', yang_type=\'config-src-type\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__config_src = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def raw(func, **func_args):
"""Decorator for eager functions checking input array
and stripping away the weld_type.
Stripping the weld_type is required to keep the same code in Series.apply and because
Numpy functions don't (all) have kwargs. Passing weld_type to NumPy functions is unexpected
and raises ValueError.
Parameters
----------
func : function
Function to execute eagerly over raw data.
func_args : kwargs
Arguments to pass to func, if any.
Returns
-------
function
"""
if len(func_args) == 0:
@wraps(func)
def wrapper(array, **kwargs):
if isinstance(array, WeldObject):
raise TypeError('Can only perform operation on raw data')
# need to not pass weld_type to whatever function
if 'weld_type' in kwargs:
del kwargs['weld_type']
return func(array, **kwargs)
return wrapper
else:
# here kwargs is only kept s.t. Series can still pass the weld_type
@wraps(func)
def wrapper(array, **kwargs):
if isinstance(array, WeldObject):
raise TypeError('Can only perform operation on raw data')
return func(array, **func_args)
return wrapper | def function[raw, parameter[func]]:
constant[Decorator for eager functions checking input array
and stripping away the weld_type.
Stripping the weld_type is required to keep the same code in Series.apply and because
Numpy functions don't (all) have kwargs. Passing weld_type to NumPy functions is unexpected
and raises ValueError.
Parameters
----------
func : function
Function to execute eagerly over raw data.
func_args : kwargs
Arguments to pass to func, if any.
Returns
-------
function
]
if compare[call[name[len], parameter[name[func_args]]] equal[==] constant[0]] begin[:]
def function[wrapper, parameter[array]]:
if call[name[isinstance], parameter[name[array], name[WeldObject]]] begin[:]
<ast.Raise object at 0x7da1b0a1d780>
if compare[constant[weld_type] in name[kwargs]] begin[:]
<ast.Delete object at 0x7da1b0a1e0e0>
return[call[name[func], parameter[name[array]]]]
return[name[wrapper]] | keyword[def] identifier[raw] ( identifier[func] ,** identifier[func_args] ):
literal[string]
keyword[if] identifier[len] ( identifier[func_args] )== literal[int] :
@ identifier[wraps] ( identifier[func] )
keyword[def] identifier[wrapper] ( identifier[array] ,** identifier[kwargs] ):
keyword[if] identifier[isinstance] ( identifier[array] , identifier[WeldObject] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[kwargs] :
keyword[del] identifier[kwargs] [ literal[string] ]
keyword[return] identifier[func] ( identifier[array] ,** identifier[kwargs] )
keyword[return] identifier[wrapper]
keyword[else] :
@ identifier[wraps] ( identifier[func] )
keyword[def] identifier[wrapper] ( identifier[array] ,** identifier[kwargs] ):
keyword[if] identifier[isinstance] ( identifier[array] , identifier[WeldObject] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[return] identifier[func] ( identifier[array] ,** identifier[func_args] )
keyword[return] identifier[wrapper] | def raw(func, **func_args):
"""Decorator for eager functions checking input array
and stripping away the weld_type.
Stripping the weld_type is required to keep the same code in Series.apply and because
Numpy functions don't (all) have kwargs. Passing weld_type to NumPy functions is unexpected
and raises ValueError.
Parameters
----------
func : function
Function to execute eagerly over raw data.
func_args : kwargs
Arguments to pass to func, if any.
Returns
-------
function
"""
if len(func_args) == 0:
@wraps(func)
def wrapper(array, **kwargs):
if isinstance(array, WeldObject):
raise TypeError('Can only perform operation on raw data') # depends on [control=['if'], data=[]]
# need to not pass weld_type to whatever function
if 'weld_type' in kwargs:
del kwargs['weld_type'] # depends on [control=['if'], data=['kwargs']]
return func(array, **kwargs)
return wrapper # depends on [control=['if'], data=[]]
else:
# here kwargs is only kept s.t. Series can still pass the weld_type
@wraps(func)
def wrapper(array, **kwargs):
if isinstance(array, WeldObject):
raise TypeError('Can only perform operation on raw data') # depends on [control=['if'], data=[]]
return func(array, **func_args)
return wrapper |
def handle_arguments(self, string, root, opening, closing):
"""
Handles phrase-arguments.
Sets the override and increment flags if found. Also makes
sure that the argument sequence is at the start of the phrase
and else warns about the unescaped meta characters. If the
arguments are indeed at the start but do not match the arguments
regular expression, an error is raised.
Arguments:
string (str): The string being parsed.
root (str): The current root phrase.
opening (int): The index of the opening paranthese.
closing (int): The index of the closing paranthese.
Returns:
The (possibly escaped) string, the root phrase (if no escaping,
then with arguments and flags) and the next meta match.
Raises:
errors.ParseError: If the arguments are invalid.
"""
# The actual argument string (ignore whitespace)
args = string[opening + 1 : closing].replace(" ", "")
# The argument sequence must be at the start of the phrase
# and must match the allowed argument regular expression
if opening > 0 or not self.arguments.match(args):
if opening == 0:
raise errors.ParseError("Invalid argument sequence!")
# If escape_meta does indeed escape a character and removes
# a backward slash, the positions 'opening' and 'closing' are no
# longer valid. escape_meta does a search for the next meta
# character though, which is then the closing parantheses,
# so we can use its index value (in the now escaped string)
string, meta = self.escape_meta(string, opening)
string, meta = self.escape_meta(string, meta.start())
return string, root, meta
if "!" in args:
root.override = True
args = args.replace("!", "")
if "+" in args:
root.increment = True
args = args.replace("+", "")
root.arguments = [int(i) for i in args.split(",") if i]
# Remove the argument string including parantheses
string = string[closing + 1:]
meta = self.meta.search(string)
return string, root, meta | def function[handle_arguments, parameter[self, string, root, opening, closing]]:
constant[
Handles phrase-arguments.
Sets the override and increment flags if found. Also makes
sure that the argument sequence is at the start of the phrase
and else warns about the unescaped meta characters. If the
arguments are indeed at the start but do not match the arguments
regular expression, an error is raised.
Arguments:
string (str): The string being parsed.
root (str): The current root phrase.
opening (int): The index of the opening paranthese.
closing (int): The index of the closing paranthese.
Returns:
The (possibly escaped) string, the root phrase (if no escaping,
then with arguments and flags) and the next meta match.
Raises:
errors.ParseError: If the arguments are invalid.
]
variable[args] assign[=] call[call[name[string]][<ast.Slice object at 0x7da18bcc9510>].replace, parameter[constant[ ], constant[]]]
if <ast.BoolOp object at 0x7da18bcc8160> begin[:]
if compare[name[opening] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da18bcca830>
<ast.Tuple object at 0x7da18bcca170> assign[=] call[name[self].escape_meta, parameter[name[string], name[opening]]]
<ast.Tuple object at 0x7da18bcc8670> assign[=] call[name[self].escape_meta, parameter[name[string], call[name[meta].start, parameter[]]]]
return[tuple[[<ast.Name object at 0x7da1b0ab9cc0>, <ast.Name object at 0x7da1b0ab8be0>, <ast.Name object at 0x7da1b0aba0e0>]]]
if compare[constant[!] in name[args]] begin[:]
name[root].override assign[=] constant[True]
variable[args] assign[=] call[name[args].replace, parameter[constant[!], constant[]]]
if compare[constant[+] in name[args]] begin[:]
name[root].increment assign[=] constant[True]
variable[args] assign[=] call[name[args].replace, parameter[constant[+], constant[]]]
name[root].arguments assign[=] <ast.ListComp object at 0x7da1b0a23d90>
variable[string] assign[=] call[name[string]][<ast.Slice object at 0x7da18dc9a080>]
variable[meta] assign[=] call[name[self].meta.search, parameter[name[string]]]
return[tuple[[<ast.Name object at 0x7da1b0a633d0>, <ast.Name object at 0x7da1b0a62f20>, <ast.Name object at 0x7da1b0a60640>]]] | keyword[def] identifier[handle_arguments] ( identifier[self] , identifier[string] , identifier[root] , identifier[opening] , identifier[closing] ):
literal[string]
identifier[args] = identifier[string] [ identifier[opening] + literal[int] : identifier[closing] ]. identifier[replace] ( literal[string] , literal[string] )
keyword[if] identifier[opening] > literal[int] keyword[or] keyword[not] identifier[self] . identifier[arguments] . identifier[match] ( identifier[args] ):
keyword[if] identifier[opening] == literal[int] :
keyword[raise] identifier[errors] . identifier[ParseError] ( literal[string] )
identifier[string] , identifier[meta] = identifier[self] . identifier[escape_meta] ( identifier[string] , identifier[opening] )
identifier[string] , identifier[meta] = identifier[self] . identifier[escape_meta] ( identifier[string] , identifier[meta] . identifier[start] ())
keyword[return] identifier[string] , identifier[root] , identifier[meta]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[root] . identifier[override] = keyword[True]
identifier[args] = identifier[args] . identifier[replace] ( literal[string] , literal[string] )
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[root] . identifier[increment] = keyword[True]
identifier[args] = identifier[args] . identifier[replace] ( literal[string] , literal[string] )
identifier[root] . identifier[arguments] =[ identifier[int] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[args] . identifier[split] ( literal[string] ) keyword[if] identifier[i] ]
identifier[string] = identifier[string] [ identifier[closing] + literal[int] :]
identifier[meta] = identifier[self] . identifier[meta] . identifier[search] ( identifier[string] )
keyword[return] identifier[string] , identifier[root] , identifier[meta] | def handle_arguments(self, string, root, opening, closing):
"""
Handles phrase-arguments.
Sets the override and increment flags if found. Also makes
sure that the argument sequence is at the start of the phrase
and else warns about the unescaped meta characters. If the
arguments are indeed at the start but do not match the arguments
regular expression, an error is raised.
Arguments:
string (str): The string being parsed.
root (str): The current root phrase.
opening (int): The index of the opening paranthese.
closing (int): The index of the closing paranthese.
Returns:
The (possibly escaped) string, the root phrase (if no escaping,
then with arguments and flags) and the next meta match.
Raises:
errors.ParseError: If the arguments are invalid.
""" # The actual argument string (ignore whitespace)
args = string[opening + 1:closing].replace(' ', '') # The argument sequence must be at the start of the phrase
# and must match the allowed argument regular expression
if opening > 0 or not self.arguments.match(args):
if opening == 0:
raise errors.ParseError('Invalid argument sequence!') # depends on [control=['if'], data=[]] # If escape_meta does indeed escape a character and removes
# a backward slash, the positions 'opening' and 'closing' are no
# longer valid. escape_meta does a search for the next meta
# character though, which is then the closing parantheses,
# so we can use its index value (in the now escaped string)
(string, meta) = self.escape_meta(string, opening)
(string, meta) = self.escape_meta(string, meta.start())
return (string, root, meta) # depends on [control=['if'], data=[]]
if '!' in args:
root.override = True
args = args.replace('!', '') # depends on [control=['if'], data=['args']]
if '+' in args:
root.increment = True
args = args.replace('+', '') # depends on [control=['if'], data=['args']]
root.arguments = [int(i) for i in args.split(',') if i] # Remove the argument string including parantheses
string = string[closing + 1:]
meta = self.meta.search(string)
return (string, root, meta) |
def add_scenario(self, parameter: 'Parameter', scenario_name: str = default_scenario):
"""
Add a scenario for this parameter.
:param scenario_name:
:param parameter:
:return:
"""
self.scenarios[scenario_name] = parameter | def function[add_scenario, parameter[self, parameter, scenario_name]]:
constant[
Add a scenario for this parameter.
:param scenario_name:
:param parameter:
:return:
]
call[name[self].scenarios][name[scenario_name]] assign[=] name[parameter] | keyword[def] identifier[add_scenario] ( identifier[self] , identifier[parameter] : literal[string] , identifier[scenario_name] : identifier[str] = identifier[default_scenario] ):
literal[string]
identifier[self] . identifier[scenarios] [ identifier[scenario_name] ]= identifier[parameter] | def add_scenario(self, parameter: 'Parameter', scenario_name: str=default_scenario):
"""
Add a scenario for this parameter.
:param scenario_name:
:param parameter:
:return:
"""
self.scenarios[scenario_name] = parameter |
def iqi(ql, qs, ns=None, rc=None, ot=None, coe=None,
moc=DEFAULT_ITER_MAXOBJECTCOUNT,):
# pylint: disable=line-too-long
"""
*New in pywbem 0.10 as experimental and finalized in 0.12.*
This function is a wrapper for
:meth:`~pywbem.WBEMConnection.IterQueryInstances`.
Execute a query in a namespace,
using the corresponding pull operations if supported by the WBEM server
or otherwise the corresponding traditional operation, and using the
Python :term:`py:generator` idiom to return the result.
This method is an alternative to using the pull operations directly,
that frees the user of having to know whether the WBEM server supports
pull operations.
Other than the other i...() functions, this function does not return
a generator object directly, but as a property of the returned object.
Parameters:
ql (:term:`string`):
Name of the query language used in the `qs` parameter, e.g.
"DMTF:CQL" for CIM Query Language, and "WQL" for WBEM Query
Language. Because this is not a filter query, "DMTF:FQL" is not a
valid query language for this request.
qs (:term:`string`):
Query string in the query language specified in the `ql` parameter.
ns (:term:`string`):
Name of the CIM namespace to be used (case independent).
If `None`, defaults to the default namespace of the connection.
rc (:class:`py:bool`):
Controls whether a class definition describing the properties of the
returned instances will be returned.
`None` will cause the server to use its default of `False`.
ot (:class:`~pywbem.Uint32`):
Operation timeout in seconds. This is the minimum time the WBEM server
must keep the enumeration session open between requests on that
session.
A value of 0 indicates that the server should never time out.
The server may reject the proposed value.
`None` will cause the server to use its default timeout.
coe (:class:`py:bool`):
Continue on error flag.
`None` will cause the server to use its default of `False`.
moc (:class:`~pywbem.Uint32`):
Maximum number of instances the WBEM server may return for each of
the open and pull requests issued during the iterations over the
returned generator object.
Zero and `None` are not allowed.
Returns:
:class:`~pywbem.IterQueryInstancesReturn`: An object with the
following properties:
* **query_result_class** (:class:`~pywbem.CIMClass`):
The query result class, if requested via the `rc` parameter.
`None`, if a query result class was not requested.
* **generator** (:term:`py:generator` iterating :class:`~pywbem.CIMInstance`):
A generator object that iterates the CIM instances representing the
query result. These instances do not have an instance path set.
""" # noqa: E501
return CONN.IterQueryInstances(FilterQueryLanguage=ql,
FilterQuery=qs,
namespace=ns,
ReturnQueryResultClass=rc,
OperationTimeout=ot,
ContinueOnError=coe,
MaxObjectCount=moc) | def function[iqi, parameter[ql, qs, ns, rc, ot, coe, moc]]:
constant[
*New in pywbem 0.10 as experimental and finalized in 0.12.*
This function is a wrapper for
:meth:`~pywbem.WBEMConnection.IterQueryInstances`.
Execute a query in a namespace,
using the corresponding pull operations if supported by the WBEM server
or otherwise the corresponding traditional operation, and using the
Python :term:`py:generator` idiom to return the result.
This method is an alternative to using the pull operations directly,
that frees the user of having to know whether the WBEM server supports
pull operations.
Other than the other i...() functions, this function does not return
a generator object directly, but as a property of the returned object.
Parameters:
ql (:term:`string`):
Name of the query language used in the `qs` parameter, e.g.
"DMTF:CQL" for CIM Query Language, and "WQL" for WBEM Query
Language. Because this is not a filter query, "DMTF:FQL" is not a
valid query language for this request.
qs (:term:`string`):
Query string in the query language specified in the `ql` parameter.
ns (:term:`string`):
Name of the CIM namespace to be used (case independent).
If `None`, defaults to the default namespace of the connection.
rc (:class:`py:bool`):
Controls whether a class definition describing the properties of the
returned instances will be returned.
`None` will cause the server to use its default of `False`.
ot (:class:`~pywbem.Uint32`):
Operation timeout in seconds. This is the minimum time the WBEM server
must keep the enumeration session open between requests on that
session.
A value of 0 indicates that the server should never time out.
The server may reject the proposed value.
`None` will cause the server to use its default timeout.
coe (:class:`py:bool`):
Continue on error flag.
`None` will cause the server to use its default of `False`.
moc (:class:`~pywbem.Uint32`):
Maximum number of instances the WBEM server may return for each of
the open and pull requests issued during the iterations over the
returned generator object.
Zero and `None` are not allowed.
Returns:
:class:`~pywbem.IterQueryInstancesReturn`: An object with the
following properties:
* **query_result_class** (:class:`~pywbem.CIMClass`):
The query result class, if requested via the `rc` parameter.
`None`, if a query result class was not requested.
* **generator** (:term:`py:generator` iterating :class:`~pywbem.CIMInstance`):
A generator object that iterates the CIM instances representing the
query result. These instances do not have an instance path set.
]
return[call[name[CONN].IterQueryInstances, parameter[]]] | keyword[def] identifier[iqi] ( identifier[ql] , identifier[qs] , identifier[ns] = keyword[None] , identifier[rc] = keyword[None] , identifier[ot] = keyword[None] , identifier[coe] = keyword[None] ,
identifier[moc] = identifier[DEFAULT_ITER_MAXOBJECTCOUNT] ,):
literal[string]
keyword[return] identifier[CONN] . identifier[IterQueryInstances] ( identifier[FilterQueryLanguage] = identifier[ql] ,
identifier[FilterQuery] = identifier[qs] ,
identifier[namespace] = identifier[ns] ,
identifier[ReturnQueryResultClass] = identifier[rc] ,
identifier[OperationTimeout] = identifier[ot] ,
identifier[ContinueOnError] = identifier[coe] ,
identifier[MaxObjectCount] = identifier[moc] ) | def iqi(ql, qs, ns=None, rc=None, ot=None, coe=None, moc=DEFAULT_ITER_MAXOBJECTCOUNT):
# pylint: disable=line-too-long
'\n *New in pywbem 0.10 as experimental and finalized in 0.12.*\n\n This function is a wrapper for\n :meth:`~pywbem.WBEMConnection.IterQueryInstances`.\n\n Execute a query in a namespace,\n using the corresponding pull operations if supported by the WBEM server\n or otherwise the corresponding traditional operation, and using the\n Python :term:`py:generator` idiom to return the result.\n\n This method is an alternative to using the pull operations directly,\n that frees the user of having to know whether the WBEM server supports\n pull operations.\n\n Other than the other i...() functions, this function does not return\n a generator object directly, but as a property of the returned object.\n\n Parameters:\n\n ql (:term:`string`):\n Name of the query language used in the `qs` parameter, e.g.\n "DMTF:CQL" for CIM Query Language, and "WQL" for WBEM Query\n Language. Because this is not a filter query, "DMTF:FQL" is not a\n valid query language for this request.\n\n qs (:term:`string`):\n Query string in the query language specified in the `ql` parameter.\n\n ns (:term:`string`):\n Name of the CIM namespace to be used (case independent).\n\n If `None`, defaults to the default namespace of the connection.\n\n rc (:class:`py:bool`):\n Controls whether a class definition describing the properties of the\n returned instances will be returned.\n\n `None` will cause the server to use its default of `False`.\n\n ot (:class:`~pywbem.Uint32`):\n Operation timeout in seconds. This is the minimum time the WBEM server\n must keep the enumeration session open between requests on that\n session.\n\n A value of 0 indicates that the server should never time out.\n\n The server may reject the proposed value.\n\n `None` will cause the server to use its default timeout.\n\n coe (:class:`py:bool`):\n Continue on error flag.\n\n `None` will cause the server to use its default of `False`.\n\n moc (:class:`~pywbem.Uint32`):\n Maximum number of instances the WBEM server may return for each of\n the open and pull requests issued during the iterations over the\n returned generator object.\n\n Zero and `None` are not allowed.\n\n Returns:\n\n :class:`~pywbem.IterQueryInstancesReturn`: An object with the\n following properties:\n\n * **query_result_class** (:class:`~pywbem.CIMClass`):\n\n The query result class, if requested via the `rc` parameter.\n\n `None`, if a query result class was not requested.\n\n * **generator** (:term:`py:generator` iterating :class:`~pywbem.CIMInstance`):\n\n A generator object that iterates the CIM instances representing the\n query result. These instances do not have an instance path set.\n ' # noqa: E501
return CONN.IterQueryInstances(FilterQueryLanguage=ql, FilterQuery=qs, namespace=ns, ReturnQueryResultClass=rc, OperationTimeout=ot, ContinueOnError=coe, MaxObjectCount=moc) |
def get_messages(self, queue_name, num_messages=None,
visibility_timeout=None, timeout=None):
'''
Retrieves one or more messages from the front of the queue.
When a message is retrieved from the queue, the response includes the message
content and a pop_receipt value, which is required to delete the message.
The message is not automatically deleted from the queue, but after it has
been retrieved, it is not visible to other clients for the time interval
specified by the visibility_timeout parameter.
:param str queue_name:
The name of the queue to get messages from.
:param int num_messages:
A nonzero integer value that specifies the number of
messages to retrieve from the queue, up to a maximum of 32. If
fewer are visible, the visible messages are returned. By default,
a single message is retrieved from the queue with this operation.
:param int visibility_timeout:
Specifies the new visibility timeout value, in seconds, relative
to server time. The new value must be larger than or equal to 1
second, and cannot be larger than 7 days. The visibility timeout of
a message can be set to a value later than the expiry time.
:param int timeout:
The server timeout, expressed in seconds.
:return: A list of :class:`~azure.storage.queue.models.QueueMessage` objects.
:rtype: list of :class:`~azure.storage.queue.models.QueueMessage`
'''
_validate_not_none('queue_name', queue_name)
request = HTTPRequest()
request.method = 'GET'
request.host = self._get_host()
request.path = _get_path(queue_name, True)
request.query = [
('numofmessages', _to_str(num_messages)),
('visibilitytimeout', _to_str(visibility_timeout)),
('timeout', _int_to_str(timeout))
]
response = self._perform_request(request)
return _convert_xml_to_queue_messages(response, self.decode_function) | def function[get_messages, parameter[self, queue_name, num_messages, visibility_timeout, timeout]]:
constant[
Retrieves one or more messages from the front of the queue.
When a message is retrieved from the queue, the response includes the message
content and a pop_receipt value, which is required to delete the message.
The message is not automatically deleted from the queue, but after it has
been retrieved, it is not visible to other clients for the time interval
specified by the visibility_timeout parameter.
:param str queue_name:
The name of the queue to get messages from.
:param int num_messages:
A nonzero integer value that specifies the number of
messages to retrieve from the queue, up to a maximum of 32. If
fewer are visible, the visible messages are returned. By default,
a single message is retrieved from the queue with this operation.
:param int visibility_timeout:
Specifies the new visibility timeout value, in seconds, relative
to server time. The new value must be larger than or equal to 1
second, and cannot be larger than 7 days. The visibility timeout of
a message can be set to a value later than the expiry time.
:param int timeout:
The server timeout, expressed in seconds.
:return: A list of :class:`~azure.storage.queue.models.QueueMessage` objects.
:rtype: list of :class:`~azure.storage.queue.models.QueueMessage`
]
call[name[_validate_not_none], parameter[constant[queue_name], name[queue_name]]]
variable[request] assign[=] call[name[HTTPRequest], parameter[]]
name[request].method assign[=] constant[GET]
name[request].host assign[=] call[name[self]._get_host, parameter[]]
name[request].path assign[=] call[name[_get_path], parameter[name[queue_name], constant[True]]]
name[request].query assign[=] list[[<ast.Tuple object at 0x7da1b1a6d000>, <ast.Tuple object at 0x7da1b1a6ddb0>, <ast.Tuple object at 0x7da1b1a6d570>]]
variable[response] assign[=] call[name[self]._perform_request, parameter[name[request]]]
return[call[name[_convert_xml_to_queue_messages], parameter[name[response], name[self].decode_function]]] | keyword[def] identifier[get_messages] ( identifier[self] , identifier[queue_name] , identifier[num_messages] = keyword[None] ,
identifier[visibility_timeout] = keyword[None] , identifier[timeout] = keyword[None] ):
literal[string]
identifier[_validate_not_none] ( literal[string] , identifier[queue_name] )
identifier[request] = identifier[HTTPRequest] ()
identifier[request] . identifier[method] = literal[string]
identifier[request] . identifier[host] = identifier[self] . identifier[_get_host] ()
identifier[request] . identifier[path] = identifier[_get_path] ( identifier[queue_name] , keyword[True] )
identifier[request] . identifier[query] =[
( literal[string] , identifier[_to_str] ( identifier[num_messages] )),
( literal[string] , identifier[_to_str] ( identifier[visibility_timeout] )),
( literal[string] , identifier[_int_to_str] ( identifier[timeout] ))
]
identifier[response] = identifier[self] . identifier[_perform_request] ( identifier[request] )
keyword[return] identifier[_convert_xml_to_queue_messages] ( identifier[response] , identifier[self] . identifier[decode_function] ) | def get_messages(self, queue_name, num_messages=None, visibility_timeout=None, timeout=None):
"""
Retrieves one or more messages from the front of the queue.
When a message is retrieved from the queue, the response includes the message
content and a pop_receipt value, which is required to delete the message.
The message is not automatically deleted from the queue, but after it has
been retrieved, it is not visible to other clients for the time interval
specified by the visibility_timeout parameter.
:param str queue_name:
The name of the queue to get messages from.
:param int num_messages:
A nonzero integer value that specifies the number of
messages to retrieve from the queue, up to a maximum of 32. If
fewer are visible, the visible messages are returned. By default,
a single message is retrieved from the queue with this operation.
:param int visibility_timeout:
Specifies the new visibility timeout value, in seconds, relative
to server time. The new value must be larger than or equal to 1
second, and cannot be larger than 7 days. The visibility timeout of
a message can be set to a value later than the expiry time.
:param int timeout:
The server timeout, expressed in seconds.
:return: A list of :class:`~azure.storage.queue.models.QueueMessage` objects.
:rtype: list of :class:`~azure.storage.queue.models.QueueMessage`
"""
_validate_not_none('queue_name', queue_name)
request = HTTPRequest()
request.method = 'GET'
request.host = self._get_host()
request.path = _get_path(queue_name, True)
request.query = [('numofmessages', _to_str(num_messages)), ('visibilitytimeout', _to_str(visibility_timeout)), ('timeout', _int_to_str(timeout))]
response = self._perform_request(request)
return _convert_xml_to_queue_messages(response, self.decode_function) |
def create_engine(
database,
minsize=1,
maxsize=10,
loop=None,
dialect=_dialect,
paramstyle=None,
**kwargs):
"""
A coroutine for Engine creation.
Returns Engine instance with embedded connection pool.
The pool has *minsize* opened connections to sqlite3.
"""
coro = _create_engine(
database=database,
minsize=minsize,
maxsize=maxsize,
loop=loop,
dialect=dialect,
paramstyle=paramstyle,
**kwargs
)
return _EngineContextManager(coro) | def function[create_engine, parameter[database, minsize, maxsize, loop, dialect, paramstyle]]:
constant[
A coroutine for Engine creation.
Returns Engine instance with embedded connection pool.
The pool has *minsize* opened connections to sqlite3.
]
variable[coro] assign[=] call[name[_create_engine], parameter[]]
return[call[name[_EngineContextManager], parameter[name[coro]]]] | keyword[def] identifier[create_engine] (
identifier[database] ,
identifier[minsize] = literal[int] ,
identifier[maxsize] = literal[int] ,
identifier[loop] = keyword[None] ,
identifier[dialect] = identifier[_dialect] ,
identifier[paramstyle] = keyword[None] ,
** identifier[kwargs] ):
literal[string]
identifier[coro] = identifier[_create_engine] (
identifier[database] = identifier[database] ,
identifier[minsize] = identifier[minsize] ,
identifier[maxsize] = identifier[maxsize] ,
identifier[loop] = identifier[loop] ,
identifier[dialect] = identifier[dialect] ,
identifier[paramstyle] = identifier[paramstyle] ,
** identifier[kwargs]
)
keyword[return] identifier[_EngineContextManager] ( identifier[coro] ) | def create_engine(database, minsize=1, maxsize=10, loop=None, dialect=_dialect, paramstyle=None, **kwargs):
"""
A coroutine for Engine creation.
Returns Engine instance with embedded connection pool.
The pool has *minsize* opened connections to sqlite3.
"""
coro = _create_engine(database=database, minsize=minsize, maxsize=maxsize, loop=loop, dialect=dialect, paramstyle=paramstyle, **kwargs)
return _EngineContextManager(coro) |
def ttl(self, steps, relative_time=None):
'''
Return the ttl given the number of steps, None if steps is not defined
or we're otherwise unable to calculate one. If relative_time is defined,
then return a ttl that is the number of seconds from now that the
record should be expired.
'''
if steps:
# Approximate the ttl based on number of seconds, since it's
# "close enough"
if relative_time:
rtime = self.to_bucket(relative_time)
ntime = self.to_bucket(time.time())
# Convert to number of days
day_diff = (self.from_bucket(ntime, native=True) - self.from_bucket(rtime, native=True)).days
# Convert steps to number of days as well
step_diff = (steps*SIMPLE_TIMES[self._step[0]]) / SIMPLE_TIMES['d']
# The relative time is beyond our TTL cutoff
if day_diff > step_diff:
return 0
# The relative time is in "recent" past or future
else:
return (step_diff - day_diff) * SIMPLE_TIMES['d']
return steps * SIMPLE_TIMES[ self._step[0] ]
return None | def function[ttl, parameter[self, steps, relative_time]]:
constant[
Return the ttl given the number of steps, None if steps is not defined
or we're otherwise unable to calculate one. If relative_time is defined,
then return a ttl that is the number of seconds from now that the
record should be expired.
]
if name[steps] begin[:]
if name[relative_time] begin[:]
variable[rtime] assign[=] call[name[self].to_bucket, parameter[name[relative_time]]]
variable[ntime] assign[=] call[name[self].to_bucket, parameter[call[name[time].time, parameter[]]]]
variable[day_diff] assign[=] binary_operation[call[name[self].from_bucket, parameter[name[ntime]]] - call[name[self].from_bucket, parameter[name[rtime]]]].days
variable[step_diff] assign[=] binary_operation[binary_operation[name[steps] * call[name[SIMPLE_TIMES]][call[name[self]._step][constant[0]]]] / call[name[SIMPLE_TIMES]][constant[d]]]
if compare[name[day_diff] greater[>] name[step_diff]] begin[:]
return[constant[0]]
return[binary_operation[name[steps] * call[name[SIMPLE_TIMES]][call[name[self]._step][constant[0]]]]]
return[constant[None]] | keyword[def] identifier[ttl] ( identifier[self] , identifier[steps] , identifier[relative_time] = keyword[None] ):
literal[string]
keyword[if] identifier[steps] :
keyword[if] identifier[relative_time] :
identifier[rtime] = identifier[self] . identifier[to_bucket] ( identifier[relative_time] )
identifier[ntime] = identifier[self] . identifier[to_bucket] ( identifier[time] . identifier[time] ())
identifier[day_diff] =( identifier[self] . identifier[from_bucket] ( identifier[ntime] , identifier[native] = keyword[True] )- identifier[self] . identifier[from_bucket] ( identifier[rtime] , identifier[native] = keyword[True] )). identifier[days]
identifier[step_diff] =( identifier[steps] * identifier[SIMPLE_TIMES] [ identifier[self] . identifier[_step] [ literal[int] ]])/ identifier[SIMPLE_TIMES] [ literal[string] ]
keyword[if] identifier[day_diff] > identifier[step_diff] :
keyword[return] literal[int]
keyword[else] :
keyword[return] ( identifier[step_diff] - identifier[day_diff] )* identifier[SIMPLE_TIMES] [ literal[string] ]
keyword[return] identifier[steps] * identifier[SIMPLE_TIMES] [ identifier[self] . identifier[_step] [ literal[int] ]]
keyword[return] keyword[None] | def ttl(self, steps, relative_time=None):
"""
Return the ttl given the number of steps, None if steps is not defined
or we're otherwise unable to calculate one. If relative_time is defined,
then return a ttl that is the number of seconds from now that the
record should be expired.
"""
if steps:
# Approximate the ttl based on number of seconds, since it's
# "close enough"
if relative_time:
rtime = self.to_bucket(relative_time)
ntime = self.to_bucket(time.time())
# Convert to number of days
day_diff = (self.from_bucket(ntime, native=True) - self.from_bucket(rtime, native=True)).days
# Convert steps to number of days as well
step_diff = steps * SIMPLE_TIMES[self._step[0]] / SIMPLE_TIMES['d']
# The relative time is beyond our TTL cutoff
if day_diff > step_diff:
return 0 # depends on [control=['if'], data=[]]
else:
# The relative time is in "recent" past or future
return (step_diff - day_diff) * SIMPLE_TIMES['d'] # depends on [control=['if'], data=[]]
return steps * SIMPLE_TIMES[self._step[0]] # depends on [control=['if'], data=[]]
return None |
def DFS(G):
"""
Algorithm for depth-first searching the vertices of a graph.
"""
if not G.vertices:
raise GraphInsertError("This graph have no vertices.")
color = {}
pred = {}
reach = {}
finish = {}
def DFSvisit(G, current, time):
color[current] = 'grey'
time += 1
reach[current] = time
for vertex in G.vertices[current]:
if color[vertex] == 'white':
pred[vertex] = current
time = DFSvisit(G, vertex, time)
color[current] = 'black'
time += 1
finish[current] = time
return time
for vertex in G.vertices:
color[vertex] = 'white'
pred[vertex] = None
reach[vertex] = 0
finish[vertex] = 0
time = 0
for vertex in G.vertices:
if color[vertex] == 'white':
time = DFSvisit(G, vertex, time)
# Dictionary for vertex data after DFS
# -> vertex_data = {vertex: (predecessor, reach, finish), }
vertex_data = {}
for vertex in G.vertices:
vertex_data[vertex] = (pred[vertex], reach[vertex], finish[vertex])
return vertex_data | def function[DFS, parameter[G]]:
constant[
Algorithm for depth-first searching the vertices of a graph.
]
if <ast.UnaryOp object at 0x7da18fe91270> begin[:]
<ast.Raise object at 0x7da18fe93970>
variable[color] assign[=] dictionary[[], []]
variable[pred] assign[=] dictionary[[], []]
variable[reach] assign[=] dictionary[[], []]
variable[finish] assign[=] dictionary[[], []]
def function[DFSvisit, parameter[G, current, time]]:
call[name[color]][name[current]] assign[=] constant[grey]
<ast.AugAssign object at 0x7da18fe907c0>
call[name[reach]][name[current]] assign[=] name[time]
for taget[name[vertex]] in starred[call[name[G].vertices][name[current]]] begin[:]
if compare[call[name[color]][name[vertex]] equal[==] constant[white]] begin[:]
call[name[pred]][name[vertex]] assign[=] name[current]
variable[time] assign[=] call[name[DFSvisit], parameter[name[G], name[vertex], name[time]]]
call[name[color]][name[current]] assign[=] constant[black]
<ast.AugAssign object at 0x7da18fe92c50>
call[name[finish]][name[current]] assign[=] name[time]
return[name[time]]
for taget[name[vertex]] in starred[name[G].vertices] begin[:]
call[name[color]][name[vertex]] assign[=] constant[white]
call[name[pred]][name[vertex]] assign[=] constant[None]
call[name[reach]][name[vertex]] assign[=] constant[0]
call[name[finish]][name[vertex]] assign[=] constant[0]
variable[time] assign[=] constant[0]
for taget[name[vertex]] in starred[name[G].vertices] begin[:]
if compare[call[name[color]][name[vertex]] equal[==] constant[white]] begin[:]
variable[time] assign[=] call[name[DFSvisit], parameter[name[G], name[vertex], name[time]]]
variable[vertex_data] assign[=] dictionary[[], []]
for taget[name[vertex]] in starred[name[G].vertices] begin[:]
call[name[vertex_data]][name[vertex]] assign[=] tuple[[<ast.Subscript object at 0x7da1b14c63b0>, <ast.Subscript object at 0x7da20eb29a50>, <ast.Subscript object at 0x7da20eb29e70>]]
return[name[vertex_data]] | keyword[def] identifier[DFS] ( identifier[G] ):
literal[string]
keyword[if] keyword[not] identifier[G] . identifier[vertices] :
keyword[raise] identifier[GraphInsertError] ( literal[string] )
identifier[color] ={}
identifier[pred] ={}
identifier[reach] ={}
identifier[finish] ={}
keyword[def] identifier[DFSvisit] ( identifier[G] , identifier[current] , identifier[time] ):
identifier[color] [ identifier[current] ]= literal[string]
identifier[time] += literal[int]
identifier[reach] [ identifier[current] ]= identifier[time]
keyword[for] identifier[vertex] keyword[in] identifier[G] . identifier[vertices] [ identifier[current] ]:
keyword[if] identifier[color] [ identifier[vertex] ]== literal[string] :
identifier[pred] [ identifier[vertex] ]= identifier[current]
identifier[time] = identifier[DFSvisit] ( identifier[G] , identifier[vertex] , identifier[time] )
identifier[color] [ identifier[current] ]= literal[string]
identifier[time] += literal[int]
identifier[finish] [ identifier[current] ]= identifier[time]
keyword[return] identifier[time]
keyword[for] identifier[vertex] keyword[in] identifier[G] . identifier[vertices] :
identifier[color] [ identifier[vertex] ]= literal[string]
identifier[pred] [ identifier[vertex] ]= keyword[None]
identifier[reach] [ identifier[vertex] ]= literal[int]
identifier[finish] [ identifier[vertex] ]= literal[int]
identifier[time] = literal[int]
keyword[for] identifier[vertex] keyword[in] identifier[G] . identifier[vertices] :
keyword[if] identifier[color] [ identifier[vertex] ]== literal[string] :
identifier[time] = identifier[DFSvisit] ( identifier[G] , identifier[vertex] , identifier[time] )
identifier[vertex_data] ={}
keyword[for] identifier[vertex] keyword[in] identifier[G] . identifier[vertices] :
identifier[vertex_data] [ identifier[vertex] ]=( identifier[pred] [ identifier[vertex] ], identifier[reach] [ identifier[vertex] ], identifier[finish] [ identifier[vertex] ])
keyword[return] identifier[vertex_data] | def DFS(G):
"""
Algorithm for depth-first searching the vertices of a graph.
"""
if not G.vertices:
raise GraphInsertError('This graph have no vertices.') # depends on [control=['if'], data=[]]
color = {}
pred = {}
reach = {}
finish = {}
def DFSvisit(G, current, time):
color[current] = 'grey'
time += 1
reach[current] = time
for vertex in G.vertices[current]:
if color[vertex] == 'white':
pred[vertex] = current
time = DFSvisit(G, vertex, time) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['vertex']]
color[current] = 'black'
time += 1
finish[current] = time
return time
for vertex in G.vertices:
color[vertex] = 'white'
pred[vertex] = None
reach[vertex] = 0
finish[vertex] = 0 # depends on [control=['for'], data=['vertex']]
time = 0
for vertex in G.vertices:
if color[vertex] == 'white':
time = DFSvisit(G, vertex, time) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['vertex']] # Dictionary for vertex data after DFS
# -> vertex_data = {vertex: (predecessor, reach, finish), }
vertex_data = {}
for vertex in G.vertices:
vertex_data[vertex] = (pred[vertex], reach[vertex], finish[vertex]) # depends on [control=['for'], data=['vertex']]
return vertex_data |
def by_id(cls, id, conn=None, google_user=None,
google_password=None):
""" Open a spreadsheet via its resource ID. This is more precise
than opening a document by title, and should be used with
preference. """
conn = Connection.connect(conn=conn, google_user=google_user,
google_password=google_password)
return cls(id=id, conn=conn) | def function[by_id, parameter[cls, id, conn, google_user, google_password]]:
constant[ Open a spreadsheet via its resource ID. This is more precise
than opening a document by title, and should be used with
preference. ]
variable[conn] assign[=] call[name[Connection].connect, parameter[]]
return[call[name[cls], parameter[]]] | keyword[def] identifier[by_id] ( identifier[cls] , identifier[id] , identifier[conn] = keyword[None] , identifier[google_user] = keyword[None] ,
identifier[google_password] = keyword[None] ):
literal[string]
identifier[conn] = identifier[Connection] . identifier[connect] ( identifier[conn] = identifier[conn] , identifier[google_user] = identifier[google_user] ,
identifier[google_password] = identifier[google_password] )
keyword[return] identifier[cls] ( identifier[id] = identifier[id] , identifier[conn] = identifier[conn] ) | def by_id(cls, id, conn=None, google_user=None, google_password=None):
""" Open a spreadsheet via its resource ID. This is more precise
than opening a document by title, and should be used with
preference. """
conn = Connection.connect(conn=conn, google_user=google_user, google_password=google_password)
return cls(id=id, conn=conn) |
def resume(self):
"restore and re-raise any exception"
if '_saved' not in vars(self):
return
type, exc = map(pickle.loads, self._saved)
six.reraise(type, exc, self._tb) | def function[resume, parameter[self]]:
constant[restore and re-raise any exception]
if compare[constant[_saved] <ast.NotIn object at 0x7da2590d7190> call[name[vars], parameter[name[self]]]] begin[:]
return[None]
<ast.Tuple object at 0x7da1b1b11870> assign[=] call[name[map], parameter[name[pickle].loads, name[self]._saved]]
call[name[six].reraise, parameter[name[type], name[exc], name[self]._tb]] | keyword[def] identifier[resume] ( identifier[self] ):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[vars] ( identifier[self] ):
keyword[return]
identifier[type] , identifier[exc] = identifier[map] ( identifier[pickle] . identifier[loads] , identifier[self] . identifier[_saved] )
identifier[six] . identifier[reraise] ( identifier[type] , identifier[exc] , identifier[self] . identifier[_tb] ) | def resume(self):
"""restore and re-raise any exception"""
if '_saved' not in vars(self):
return # depends on [control=['if'], data=[]]
(type, exc) = map(pickle.loads, self._saved)
six.reraise(type, exc, self._tb) |
def rename_afw_states(afw: dict, suffix: str):
""" Side effect on input! Renames all the states of the AFW
adding a **suffix**.
It is an utility function used during testing to avoid automata to have
states with names in common.
Avoid suffix that can lead to special name like "as", "and",...
:param dict afw: input AFW.
:param str suffix: string to be added at beginning of each state name.
"""
conversion_dict = {}
new_states = set()
new_accepting = set()
for state in afw['states']:
conversion_dict[state] = '' + suffix + state
new_states.add('' + suffix + state)
if state in afw['accepting_states']:
new_accepting.add('' + suffix + state)
afw['states'] = new_states
afw['initial_state'] = '' + suffix + afw['initial_state']
afw['accepting_states'] = new_accepting
new_transitions = {}
for transition in afw['transitions']:
new_transition = __replace_all(conversion_dict, transition[0])
new_transitions[new_transition, transition[1]] = \
__replace_all(conversion_dict, afw['transitions'][transition])
afw['transitions'] = new_transitions | def function[rename_afw_states, parameter[afw, suffix]]:
constant[ Side effect on input! Renames all the states of the AFW
adding a **suffix**.
It is an utility function used during testing to avoid automata to have
states with names in common.
Avoid suffix that can lead to special name like "as", "and",...
:param dict afw: input AFW.
:param str suffix: string to be added at beginning of each state name.
]
variable[conversion_dict] assign[=] dictionary[[], []]
variable[new_states] assign[=] call[name[set], parameter[]]
variable[new_accepting] assign[=] call[name[set], parameter[]]
for taget[name[state]] in starred[call[name[afw]][constant[states]]] begin[:]
call[name[conversion_dict]][name[state]] assign[=] binary_operation[binary_operation[constant[] + name[suffix]] + name[state]]
call[name[new_states].add, parameter[binary_operation[binary_operation[constant[] + name[suffix]] + name[state]]]]
if compare[name[state] in call[name[afw]][constant[accepting_states]]] begin[:]
call[name[new_accepting].add, parameter[binary_operation[binary_operation[constant[] + name[suffix]] + name[state]]]]
call[name[afw]][constant[states]] assign[=] name[new_states]
call[name[afw]][constant[initial_state]] assign[=] binary_operation[binary_operation[constant[] + name[suffix]] + call[name[afw]][constant[initial_state]]]
call[name[afw]][constant[accepting_states]] assign[=] name[new_accepting]
variable[new_transitions] assign[=] dictionary[[], []]
for taget[name[transition]] in starred[call[name[afw]][constant[transitions]]] begin[:]
variable[new_transition] assign[=] call[name[__replace_all], parameter[name[conversion_dict], call[name[transition]][constant[0]]]]
call[name[new_transitions]][tuple[[<ast.Name object at 0x7da1b26485e0>, <ast.Subscript object at 0x7da1b2648bb0>]]] assign[=] call[name[__replace_all], parameter[name[conversion_dict], call[call[name[afw]][constant[transitions]]][name[transition]]]]
call[name[afw]][constant[transitions]] assign[=] name[new_transitions] | keyword[def] identifier[rename_afw_states] ( identifier[afw] : identifier[dict] , identifier[suffix] : identifier[str] ):
literal[string]
identifier[conversion_dict] ={}
identifier[new_states] = identifier[set] ()
identifier[new_accepting] = identifier[set] ()
keyword[for] identifier[state] keyword[in] identifier[afw] [ literal[string] ]:
identifier[conversion_dict] [ identifier[state] ]= literal[string] + identifier[suffix] + identifier[state]
identifier[new_states] . identifier[add] ( literal[string] + identifier[suffix] + identifier[state] )
keyword[if] identifier[state] keyword[in] identifier[afw] [ literal[string] ]:
identifier[new_accepting] . identifier[add] ( literal[string] + identifier[suffix] + identifier[state] )
identifier[afw] [ literal[string] ]= identifier[new_states]
identifier[afw] [ literal[string] ]= literal[string] + identifier[suffix] + identifier[afw] [ literal[string] ]
identifier[afw] [ literal[string] ]= identifier[new_accepting]
identifier[new_transitions] ={}
keyword[for] identifier[transition] keyword[in] identifier[afw] [ literal[string] ]:
identifier[new_transition] = identifier[__replace_all] ( identifier[conversion_dict] , identifier[transition] [ literal[int] ])
identifier[new_transitions] [ identifier[new_transition] , identifier[transition] [ literal[int] ]]= identifier[__replace_all] ( identifier[conversion_dict] , identifier[afw] [ literal[string] ][ identifier[transition] ])
identifier[afw] [ literal[string] ]= identifier[new_transitions] | def rename_afw_states(afw: dict, suffix: str):
""" Side effect on input! Renames all the states of the AFW
adding a **suffix**.
It is an utility function used during testing to avoid automata to have
states with names in common.
Avoid suffix that can lead to special name like "as", "and",...
:param dict afw: input AFW.
:param str suffix: string to be added at beginning of each state name.
"""
conversion_dict = {}
new_states = set()
new_accepting = set()
for state in afw['states']:
conversion_dict[state] = '' + suffix + state
new_states.add('' + suffix + state)
if state in afw['accepting_states']:
new_accepting.add('' + suffix + state) # depends on [control=['if'], data=['state']] # depends on [control=['for'], data=['state']]
afw['states'] = new_states
afw['initial_state'] = '' + suffix + afw['initial_state']
afw['accepting_states'] = new_accepting
new_transitions = {}
for transition in afw['transitions']:
new_transition = __replace_all(conversion_dict, transition[0])
new_transitions[new_transition, transition[1]] = __replace_all(conversion_dict, afw['transitions'][transition]) # depends on [control=['for'], data=['transition']]
afw['transitions'] = new_transitions |
def monitor_run(self): # pragma: no cover
"""
Monitor the workflows events and display spinner while running.
:param workflow: the workflow object
"""
spinner = itertools.cycle(['-', '/', '|', '\\'])
while not self.complete:
for i in xrange(300):
sys.stdout.write(spinner.next())
sys.stdout.flush()
sys.stdout.write('\b')
time.sleep(0.03)
if self.succeeded:
sys.stdout.write("\nWorkflow completed successfully\n")
return True
else:
sys.stdout.write("\nWorkflow failed: %s\n" % self.status)
return False | def function[monitor_run, parameter[self]]:
constant[
Monitor the workflows events and display spinner while running.
:param workflow: the workflow object
]
variable[spinner] assign[=] call[name[itertools].cycle, parameter[list[[<ast.Constant object at 0x7da18f722ad0>, <ast.Constant object at 0x7da18f721e70>, <ast.Constant object at 0x7da18f721b70>, <ast.Constant object at 0x7da18f721780>]]]]
while <ast.UnaryOp object at 0x7da18f723850> begin[:]
for taget[name[i]] in starred[call[name[xrange], parameter[constant[300]]]] begin[:]
call[name[sys].stdout.write, parameter[call[name[spinner].next, parameter[]]]]
call[name[sys].stdout.flush, parameter[]]
call[name[sys].stdout.write, parameter[constant[]]]
call[name[time].sleep, parameter[constant[0.03]]]
if name[self].succeeded begin[:]
call[name[sys].stdout.write, parameter[constant[
Workflow completed successfully
]]]
return[constant[True]] | keyword[def] identifier[monitor_run] ( identifier[self] ):
literal[string]
identifier[spinner] = identifier[itertools] . identifier[cycle] ([ literal[string] , literal[string] , literal[string] , literal[string] ])
keyword[while] keyword[not] identifier[self] . identifier[complete] :
keyword[for] identifier[i] keyword[in] identifier[xrange] ( literal[int] ):
identifier[sys] . identifier[stdout] . identifier[write] ( identifier[spinner] . identifier[next] ())
identifier[sys] . identifier[stdout] . identifier[flush] ()
identifier[sys] . identifier[stdout] . identifier[write] ( literal[string] )
identifier[time] . identifier[sleep] ( literal[int] )
keyword[if] identifier[self] . identifier[succeeded] :
identifier[sys] . identifier[stdout] . identifier[write] ( literal[string] )
keyword[return] keyword[True]
keyword[else] :
identifier[sys] . identifier[stdout] . identifier[write] ( literal[string] % identifier[self] . identifier[status] )
keyword[return] keyword[False] | def monitor_run(self): # pragma: no cover
'\n Monitor the workflows events and display spinner while running.\n :param workflow: the workflow object\n '
spinner = itertools.cycle(['-', '/', '|', '\\'])
while not self.complete:
for i in xrange(300):
sys.stdout.write(spinner.next())
sys.stdout.flush()
sys.stdout.write('\x08')
time.sleep(0.03) # depends on [control=['for'], data=[]] # depends on [control=['while'], data=[]]
if self.succeeded:
sys.stdout.write('\nWorkflow completed successfully\n')
return True # depends on [control=['if'], data=[]]
else:
sys.stdout.write('\nWorkflow failed: %s\n' % self.status)
return False |
def set_options(cls, obj, options=None, backend=None, **kwargs):
"""
Pure Python function for customize HoloViews objects in terms of
their style, plot and normalization options.
The options specification is a dictionary containing the target
for customization as a {type}.{group}.{label} keys. An example of
such a key is 'Image' which would customize all Image components
in the object. The key 'Image.Channel' would only customize Images
in the object that have the group 'Channel'.
The corresponding value is then a list of Option objects specified
with an appropriate category ('plot', 'style' or 'norm'). For
instance, using the keys described above, the specs could be:
{'Image:[Options('style', cmap='jet')]}
Or setting two types of option at once:
{'Image.Channel':[Options('plot', size=50),
Options('style', cmap='Blues')]}
Relationship to the %%opts magic
----------------------------------
This function matches the functionality supplied by the %%opts
cell magic in the IPython extension. In fact, you can use the same
syntax as the IPython cell magic to achieve the same customization
as shown above:
from holoviews.util.parser import OptsSpec
set_options(my_image, OptsSpec.parse("Image (cmap='jet')"))
Then setting both plot and style options:
set_options(my_image, OptsSpec.parse("Image [size=50] (cmap='Blues')"))
"""
# Note that an alternate, more verbose and less recommended
# syntax can also be used:
# {'Image.Channel:{'plot': Options(size=50),
# 'style': Options('style', cmap='Blues')]}
options = cls.merge_options(Store.options(backend=backend).groups.keys(), options, **kwargs)
spec, compositor_applied = cls.expand_compositor_keys(options)
custom_trees, id_mapping = cls.create_custom_trees(obj, spec)
cls.update_backends(id_mapping, custom_trees, backend=backend)
# Propagate ids to the objects
not_used = []
for (match_id, new_id) in id_mapping:
applied = cls.propagate_ids(obj, match_id, new_id, compositor_applied+list(spec.keys()), backend=backend)
if not applied:
not_used.append(new_id)
# Clean up unused custom option trees
for new_id in set(not_used):
cleanup_custom_options(new_id)
return obj | def function[set_options, parameter[cls, obj, options, backend]]:
constant[
Pure Python function for customize HoloViews objects in terms of
their style, plot and normalization options.
The options specification is a dictionary containing the target
for customization as a {type}.{group}.{label} keys. An example of
such a key is 'Image' which would customize all Image components
in the object. The key 'Image.Channel' would only customize Images
in the object that have the group 'Channel'.
The corresponding value is then a list of Option objects specified
with an appropriate category ('plot', 'style' or 'norm'). For
instance, using the keys described above, the specs could be:
{'Image:[Options('style', cmap='jet')]}
Or setting two types of option at once:
{'Image.Channel':[Options('plot', size=50),
Options('style', cmap='Blues')]}
Relationship to the %%opts magic
----------------------------------
This function matches the functionality supplied by the %%opts
cell magic in the IPython extension. In fact, you can use the same
syntax as the IPython cell magic to achieve the same customization
as shown above:
from holoviews.util.parser import OptsSpec
set_options(my_image, OptsSpec.parse("Image (cmap='jet')"))
Then setting both plot and style options:
set_options(my_image, OptsSpec.parse("Image [size=50] (cmap='Blues')"))
]
variable[options] assign[=] call[name[cls].merge_options, parameter[call[call[name[Store].options, parameter[]].groups.keys, parameter[]], name[options]]]
<ast.Tuple object at 0x7da20c9939a0> assign[=] call[name[cls].expand_compositor_keys, parameter[name[options]]]
<ast.Tuple object at 0x7da20c991540> assign[=] call[name[cls].create_custom_trees, parameter[name[obj], name[spec]]]
call[name[cls].update_backends, parameter[name[id_mapping], name[custom_trees]]]
variable[not_used] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da20c991c90>, <ast.Name object at 0x7da20c993e20>]]] in starred[name[id_mapping]] begin[:]
variable[applied] assign[=] call[name[cls].propagate_ids, parameter[name[obj], name[match_id], name[new_id], binary_operation[name[compositor_applied] + call[name[list], parameter[call[name[spec].keys, parameter[]]]]]]]
if <ast.UnaryOp object at 0x7da20c991870> begin[:]
call[name[not_used].append, parameter[name[new_id]]]
for taget[name[new_id]] in starred[call[name[set], parameter[name[not_used]]]] begin[:]
call[name[cleanup_custom_options], parameter[name[new_id]]]
return[name[obj]] | keyword[def] identifier[set_options] ( identifier[cls] , identifier[obj] , identifier[options] = keyword[None] , identifier[backend] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[options] = identifier[cls] . identifier[merge_options] ( identifier[Store] . identifier[options] ( identifier[backend] = identifier[backend] ). identifier[groups] . identifier[keys] (), identifier[options] ,** identifier[kwargs] )
identifier[spec] , identifier[compositor_applied] = identifier[cls] . identifier[expand_compositor_keys] ( identifier[options] )
identifier[custom_trees] , identifier[id_mapping] = identifier[cls] . identifier[create_custom_trees] ( identifier[obj] , identifier[spec] )
identifier[cls] . identifier[update_backends] ( identifier[id_mapping] , identifier[custom_trees] , identifier[backend] = identifier[backend] )
identifier[not_used] =[]
keyword[for] ( identifier[match_id] , identifier[new_id] ) keyword[in] identifier[id_mapping] :
identifier[applied] = identifier[cls] . identifier[propagate_ids] ( identifier[obj] , identifier[match_id] , identifier[new_id] , identifier[compositor_applied] + identifier[list] ( identifier[spec] . identifier[keys] ()), identifier[backend] = identifier[backend] )
keyword[if] keyword[not] identifier[applied] :
identifier[not_used] . identifier[append] ( identifier[new_id] )
keyword[for] identifier[new_id] keyword[in] identifier[set] ( identifier[not_used] ):
identifier[cleanup_custom_options] ( identifier[new_id] )
keyword[return] identifier[obj] | def set_options(cls, obj, options=None, backend=None, **kwargs):
"""
Pure Python function for customize HoloViews objects in terms of
their style, plot and normalization options.
The options specification is a dictionary containing the target
for customization as a {type}.{group}.{label} keys. An example of
such a key is 'Image' which would customize all Image components
in the object. The key 'Image.Channel' would only customize Images
in the object that have the group 'Channel'.
The corresponding value is then a list of Option objects specified
with an appropriate category ('plot', 'style' or 'norm'). For
instance, using the keys described above, the specs could be:
{'Image:[Options('style', cmap='jet')]}
Or setting two types of option at once:
{'Image.Channel':[Options('plot', size=50),
Options('style', cmap='Blues')]}
Relationship to the %%opts magic
----------------------------------
This function matches the functionality supplied by the %%opts
cell magic in the IPython extension. In fact, you can use the same
syntax as the IPython cell magic to achieve the same customization
as shown above:
from holoviews.util.parser import OptsSpec
set_options(my_image, OptsSpec.parse("Image (cmap='jet')"))
Then setting both plot and style options:
set_options(my_image, OptsSpec.parse("Image [size=50] (cmap='Blues')"))
"""
# Note that an alternate, more verbose and less recommended
# syntax can also be used:
# {'Image.Channel:{'plot': Options(size=50),
# 'style': Options('style', cmap='Blues')]}
options = cls.merge_options(Store.options(backend=backend).groups.keys(), options, **kwargs)
(spec, compositor_applied) = cls.expand_compositor_keys(options)
(custom_trees, id_mapping) = cls.create_custom_trees(obj, spec)
cls.update_backends(id_mapping, custom_trees, backend=backend)
# Propagate ids to the objects
not_used = []
for (match_id, new_id) in id_mapping:
applied = cls.propagate_ids(obj, match_id, new_id, compositor_applied + list(spec.keys()), backend=backend)
if not applied:
not_used.append(new_id) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# Clean up unused custom option trees
for new_id in set(not_used):
cleanup_custom_options(new_id) # depends on [control=['for'], data=['new_id']]
return obj |
def scan_ipaddr(ipaddr, line, project, split_path, apikey):
"""
If an IP Address is found, scan it
"""
logger.info('Found what I believe is an IP Address: %s', line.strip())
logger.info('File %s. Parsed IP Address: %s', split_path, ipaddr)
with open(reports_dir + "ips-" + project + ".log", "a") as gate_report:
gate_report.write('File {} contains what I believe is an IP Address: {}\n'.format(split_path, ipaddr))
v_api = virus_total.VirusTotal()
scan_ip = v_api.send_ip(ipaddr, apikey)
response_code = scan_ip['response_code']
verbose_msg = scan_ip['verbose_msg']
urls = scan_ip['detected_urls']
with open(reports_dir + "ips-" + project + ".log", "a") as gate_report:
if urls:
logger.error('%s has been known to resolve to the following malicious urls:', ipaddr)
gate_report.write('{} has been known to resolve to the following malicious urls:\n'.format(ipaddr))
for url in urls:
logger.info('%s on date: %s', url['url'], url['scan_date'])
gate_report.write('{} on {}\n'.format(url['url'], url['scan_date']))
sleep(0.2)
else:
logger.info('No malicious DNS history found for: %s', ipaddr)
gate_report.write('No malicious DNS history found for: {}\n'.format(ipaddr)) | def function[scan_ipaddr, parameter[ipaddr, line, project, split_path, apikey]]:
constant[
If an IP Address is found, scan it
]
call[name[logger].info, parameter[constant[Found what I believe is an IP Address: %s], call[name[line].strip, parameter[]]]]
call[name[logger].info, parameter[constant[File %s. Parsed IP Address: %s], name[split_path], name[ipaddr]]]
with call[name[open], parameter[binary_operation[binary_operation[binary_operation[name[reports_dir] + constant[ips-]] + name[project]] + constant[.log]], constant[a]]] begin[:]
call[name[gate_report].write, parameter[call[constant[File {} contains what I believe is an IP Address: {}
].format, parameter[name[split_path], name[ipaddr]]]]]
variable[v_api] assign[=] call[name[virus_total].VirusTotal, parameter[]]
variable[scan_ip] assign[=] call[name[v_api].send_ip, parameter[name[ipaddr], name[apikey]]]
variable[response_code] assign[=] call[name[scan_ip]][constant[response_code]]
variable[verbose_msg] assign[=] call[name[scan_ip]][constant[verbose_msg]]
variable[urls] assign[=] call[name[scan_ip]][constant[detected_urls]]
with call[name[open], parameter[binary_operation[binary_operation[binary_operation[name[reports_dir] + constant[ips-]] + name[project]] + constant[.log]], constant[a]]] begin[:]
if name[urls] begin[:]
call[name[logger].error, parameter[constant[%s has been known to resolve to the following malicious urls:], name[ipaddr]]]
call[name[gate_report].write, parameter[call[constant[{} has been known to resolve to the following malicious urls:
].format, parameter[name[ipaddr]]]]]
for taget[name[url]] in starred[name[urls]] begin[:]
call[name[logger].info, parameter[constant[%s on date: %s], call[name[url]][constant[url]], call[name[url]][constant[scan_date]]]]
call[name[gate_report].write, parameter[call[constant[{} on {}
].format, parameter[call[name[url]][constant[url]], call[name[url]][constant[scan_date]]]]]]
call[name[sleep], parameter[constant[0.2]]] | keyword[def] identifier[scan_ipaddr] ( identifier[ipaddr] , identifier[line] , identifier[project] , identifier[split_path] , identifier[apikey] ):
literal[string]
identifier[logger] . identifier[info] ( literal[string] , identifier[line] . identifier[strip] ())
identifier[logger] . identifier[info] ( literal[string] , identifier[split_path] , identifier[ipaddr] )
keyword[with] identifier[open] ( identifier[reports_dir] + literal[string] + identifier[project] + literal[string] , literal[string] ) keyword[as] identifier[gate_report] :
identifier[gate_report] . identifier[write] ( literal[string] . identifier[format] ( identifier[split_path] , identifier[ipaddr] ))
identifier[v_api] = identifier[virus_total] . identifier[VirusTotal] ()
identifier[scan_ip] = identifier[v_api] . identifier[send_ip] ( identifier[ipaddr] , identifier[apikey] )
identifier[response_code] = identifier[scan_ip] [ literal[string] ]
identifier[verbose_msg] = identifier[scan_ip] [ literal[string] ]
identifier[urls] = identifier[scan_ip] [ literal[string] ]
keyword[with] identifier[open] ( identifier[reports_dir] + literal[string] + identifier[project] + literal[string] , literal[string] ) keyword[as] identifier[gate_report] :
keyword[if] identifier[urls] :
identifier[logger] . identifier[error] ( literal[string] , identifier[ipaddr] )
identifier[gate_report] . identifier[write] ( literal[string] . identifier[format] ( identifier[ipaddr] ))
keyword[for] identifier[url] keyword[in] identifier[urls] :
identifier[logger] . identifier[info] ( literal[string] , identifier[url] [ literal[string] ], identifier[url] [ literal[string] ])
identifier[gate_report] . identifier[write] ( literal[string] . identifier[format] ( identifier[url] [ literal[string] ], identifier[url] [ literal[string] ]))
identifier[sleep] ( literal[int] )
keyword[else] :
identifier[logger] . identifier[info] ( literal[string] , identifier[ipaddr] )
identifier[gate_report] . identifier[write] ( literal[string] . identifier[format] ( identifier[ipaddr] )) | def scan_ipaddr(ipaddr, line, project, split_path, apikey):
"""
If an IP Address is found, scan it
"""
logger.info('Found what I believe is an IP Address: %s', line.strip())
logger.info('File %s. Parsed IP Address: %s', split_path, ipaddr)
with open(reports_dir + 'ips-' + project + '.log', 'a') as gate_report:
gate_report.write('File {} contains what I believe is an IP Address: {}\n'.format(split_path, ipaddr)) # depends on [control=['with'], data=['gate_report']]
v_api = virus_total.VirusTotal()
scan_ip = v_api.send_ip(ipaddr, apikey)
response_code = scan_ip['response_code']
verbose_msg = scan_ip['verbose_msg']
urls = scan_ip['detected_urls']
with open(reports_dir + 'ips-' + project + '.log', 'a') as gate_report:
if urls:
logger.error('%s has been known to resolve to the following malicious urls:', ipaddr)
gate_report.write('{} has been known to resolve to the following malicious urls:\n'.format(ipaddr))
for url in urls:
logger.info('%s on date: %s', url['url'], url['scan_date'])
gate_report.write('{} on {}\n'.format(url['url'], url['scan_date']))
sleep(0.2) # depends on [control=['for'], data=['url']] # depends on [control=['if'], data=[]]
else:
logger.info('No malicious DNS history found for: %s', ipaddr)
gate_report.write('No malicious DNS history found for: {}\n'.format(ipaddr)) # depends on [control=['with'], data=['gate_report']] |
def get_nets_radb(self, response, is_http=False):
"""
The function for parsing network blocks from ASN origin data.
Args:
response (:obj:`str`): The response from the RADB whois/http
server.
is_http (:obj:`bool`): If the query is RADB HTTP instead of whois,
set to True. Defaults to False.
Returns:
list: A list of network block dictionaries
::
[{
'cidr' (str) - The assigned CIDR
'start' (int) - The index for the start of the parsed
network block
'end' (int) - The index for the end of the parsed network
block
}]
"""
nets = []
if is_http:
regex = r'route(?:6)?:[^\S\n]+(?P<val>.+?)<br>'
else:
regex = r'^route(?:6)?:[^\S\n]+(?P<val>.+|.+)$'
# Iterate through all of the networks found, storing the CIDR value
# and the start and end positions.
for match in re.finditer(
regex,
response,
re.MULTILINE
):
try:
net = copy.deepcopy(BASE_NET)
net['cidr'] = match.group(1).strip()
net['start'] = match.start()
net['end'] = match.end()
nets.append(net)
except ValueError: # pragma: no cover
pass
return nets | def function[get_nets_radb, parameter[self, response, is_http]]:
constant[
The function for parsing network blocks from ASN origin data.
Args:
response (:obj:`str`): The response from the RADB whois/http
server.
is_http (:obj:`bool`): If the query is RADB HTTP instead of whois,
set to True. Defaults to False.
Returns:
list: A list of network block dictionaries
::
[{
'cidr' (str) - The assigned CIDR
'start' (int) - The index for the start of the parsed
network block
'end' (int) - The index for the end of the parsed network
block
}]
]
variable[nets] assign[=] list[[]]
if name[is_http] begin[:]
variable[regex] assign[=] constant[route(?:6)?:[^\S\n]+(?P<val>.+?)<br>]
for taget[name[match]] in starred[call[name[re].finditer, parameter[name[regex], name[response], name[re].MULTILINE]]] begin[:]
<ast.Try object at 0x7da1b1528070>
return[name[nets]] | keyword[def] identifier[get_nets_radb] ( identifier[self] , identifier[response] , identifier[is_http] = keyword[False] ):
literal[string]
identifier[nets] =[]
keyword[if] identifier[is_http] :
identifier[regex] = literal[string]
keyword[else] :
identifier[regex] = literal[string]
keyword[for] identifier[match] keyword[in] identifier[re] . identifier[finditer] (
identifier[regex] ,
identifier[response] ,
identifier[re] . identifier[MULTILINE]
):
keyword[try] :
identifier[net] = identifier[copy] . identifier[deepcopy] ( identifier[BASE_NET] )
identifier[net] [ literal[string] ]= identifier[match] . identifier[group] ( literal[int] ). identifier[strip] ()
identifier[net] [ literal[string] ]= identifier[match] . identifier[start] ()
identifier[net] [ literal[string] ]= identifier[match] . identifier[end] ()
identifier[nets] . identifier[append] ( identifier[net] )
keyword[except] identifier[ValueError] :
keyword[pass]
keyword[return] identifier[nets] | def get_nets_radb(self, response, is_http=False):
"""
The function for parsing network blocks from ASN origin data.
Args:
response (:obj:`str`): The response from the RADB whois/http
server.
is_http (:obj:`bool`): If the query is RADB HTTP instead of whois,
set to True. Defaults to False.
Returns:
list: A list of network block dictionaries
::
[{
'cidr' (str) - The assigned CIDR
'start' (int) - The index for the start of the parsed
network block
'end' (int) - The index for the end of the parsed network
block
}]
"""
nets = []
if is_http:
regex = 'route(?:6)?:[^\\S\\n]+(?P<val>.+?)<br>' # depends on [control=['if'], data=[]]
else:
regex = '^route(?:6)?:[^\\S\\n]+(?P<val>.+|.+)$'
# Iterate through all of the networks found, storing the CIDR value
# and the start and end positions.
for match in re.finditer(regex, response, re.MULTILINE):
try:
net = copy.deepcopy(BASE_NET)
net['cidr'] = match.group(1).strip()
net['start'] = match.start()
net['end'] = match.end()
nets.append(net) # depends on [control=['try'], data=[]]
except ValueError: # pragma: no cover
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['match']]
return nets |
def fancy_SCAT(points, low_bound, high_bound, x_max, y_max):
"""
runs SCAT test and returns 'Pass' or 'Fail'
"""
# iterate through all relevant points and see if any of them fall outside of your SCAT box
# {'points_arai': [(x,y),(x,y)], 'points_ptrm': [(x,y),(x,y)], ...}
SCAT = 'Pass'
SCATs = {'SCAT_arai': 'Pass', 'SCAT_ptrm': 'Pass', 'SCAT_tail': 'Pass'}
for point_type in points:
#print 'point_type', point_type
for point in points[point_type]:
#print 'point', point
result = in_SCAT_box(point[0], point[1], low_bound, high_bound, x_max, y_max)
if not result:
# print "SCAT TEST FAILED"
x = 'SCAT' + point_type[6:]
#print 'lib point type', point_type
#print 'xxxx', x
SCATs[x] = 'Fail'
SCAT = 'Fail'
return SCAT, SCATs | def function[fancy_SCAT, parameter[points, low_bound, high_bound, x_max, y_max]]:
constant[
runs SCAT test and returns 'Pass' or 'Fail'
]
variable[SCAT] assign[=] constant[Pass]
variable[SCATs] assign[=] dictionary[[<ast.Constant object at 0x7da18f00e560>, <ast.Constant object at 0x7da18f00e320>, <ast.Constant object at 0x7da18f00f3a0>], [<ast.Constant object at 0x7da18f00f160>, <ast.Constant object at 0x7da18f00d6c0>, <ast.Constant object at 0x7da18f00f1f0>]]
for taget[name[point_type]] in starred[name[points]] begin[:]
for taget[name[point]] in starred[call[name[points]][name[point_type]]] begin[:]
variable[result] assign[=] call[name[in_SCAT_box], parameter[call[name[point]][constant[0]], call[name[point]][constant[1]], name[low_bound], name[high_bound], name[x_max], name[y_max]]]
if <ast.UnaryOp object at 0x7da18f00c640> begin[:]
variable[x] assign[=] binary_operation[constant[SCAT] + call[name[point_type]][<ast.Slice object at 0x7da18f00d240>]]
call[name[SCATs]][name[x]] assign[=] constant[Fail]
variable[SCAT] assign[=] constant[Fail]
return[tuple[[<ast.Name object at 0x7da18f00ef80>, <ast.Name object at 0x7da18f00c310>]]] | keyword[def] identifier[fancy_SCAT] ( identifier[points] , identifier[low_bound] , identifier[high_bound] , identifier[x_max] , identifier[y_max] ):
literal[string]
identifier[SCAT] = literal[string]
identifier[SCATs] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] }
keyword[for] identifier[point_type] keyword[in] identifier[points] :
keyword[for] identifier[point] keyword[in] identifier[points] [ identifier[point_type] ]:
identifier[result] = identifier[in_SCAT_box] ( identifier[point] [ literal[int] ], identifier[point] [ literal[int] ], identifier[low_bound] , identifier[high_bound] , identifier[x_max] , identifier[y_max] )
keyword[if] keyword[not] identifier[result] :
identifier[x] = literal[string] + identifier[point_type] [ literal[int] :]
identifier[SCATs] [ identifier[x] ]= literal[string]
identifier[SCAT] = literal[string]
keyword[return] identifier[SCAT] , identifier[SCATs] | def fancy_SCAT(points, low_bound, high_bound, x_max, y_max):
"""
runs SCAT test and returns 'Pass' or 'Fail'
"""
# iterate through all relevant points and see if any of them fall outside of your SCAT box
# {'points_arai': [(x,y),(x,y)], 'points_ptrm': [(x,y),(x,y)], ...}
SCAT = 'Pass'
SCATs = {'SCAT_arai': 'Pass', 'SCAT_ptrm': 'Pass', 'SCAT_tail': 'Pass'}
for point_type in points:
#print 'point_type', point_type
for point in points[point_type]:
#print 'point', point
result = in_SCAT_box(point[0], point[1], low_bound, high_bound, x_max, y_max)
if not result:
# print "SCAT TEST FAILED"
x = 'SCAT' + point_type[6:]
#print 'lib point type', point_type
#print 'xxxx', x
SCATs[x] = 'Fail'
SCAT = 'Fail' # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['point']] # depends on [control=['for'], data=['point_type']]
return (SCAT, SCATs) |
def extract_adf (archive, compression, cmd, verbosity, interactive, outdir):
"""Extract an ADF archive."""
return [cmd, archive, '-d', outdir] | def function[extract_adf, parameter[archive, compression, cmd, verbosity, interactive, outdir]]:
constant[Extract an ADF archive.]
return[list[[<ast.Name object at 0x7da1b0715480>, <ast.Name object at 0x7da1b0716920>, <ast.Constant object at 0x7da1b07163b0>, <ast.Name object at 0x7da1b0717dc0>]]] | keyword[def] identifier[extract_adf] ( identifier[archive] , identifier[compression] , identifier[cmd] , identifier[verbosity] , identifier[interactive] , identifier[outdir] ):
literal[string]
keyword[return] [ identifier[cmd] , identifier[archive] , literal[string] , identifier[outdir] ] | def extract_adf(archive, compression, cmd, verbosity, interactive, outdir):
"""Extract an ADF archive."""
return [cmd, archive, '-d', outdir] |
def generate_poisson_noise(image, exposure_time_map, seed=-1):
"""
Generate a two-dimensional poisson noise_maps-mappers from an image.
Values are computed from a Poisson distribution using the image's input values in units of counts.
Parameters
----------
image : ndarray
The 2D image, whose values in counts are used to draw Poisson noise_maps values.
exposure_time_map : Union(ndarray, int)
2D array of the exposure time in each pixel used to convert to / from counts and electrons per second.
seed : int
The seed of the random number generator, used for the random noise_maps maps.
Returns
-------
poisson_noise_map: ndarray
An array describing simulated poisson noise_maps
"""
setup_random_seed(seed)
image_counts = np.multiply(image, exposure_time_map)
return image - np.divide(np.random.poisson(image_counts, image.shape), exposure_time_map) | def function[generate_poisson_noise, parameter[image, exposure_time_map, seed]]:
constant[
Generate a two-dimensional poisson noise_maps-mappers from an image.
Values are computed from a Poisson distribution using the image's input values in units of counts.
Parameters
----------
image : ndarray
The 2D image, whose values in counts are used to draw Poisson noise_maps values.
exposure_time_map : Union(ndarray, int)
2D array of the exposure time in each pixel used to convert to / from counts and electrons per second.
seed : int
The seed of the random number generator, used for the random noise_maps maps.
Returns
-------
poisson_noise_map: ndarray
An array describing simulated poisson noise_maps
]
call[name[setup_random_seed], parameter[name[seed]]]
variable[image_counts] assign[=] call[name[np].multiply, parameter[name[image], name[exposure_time_map]]]
return[binary_operation[name[image] - call[name[np].divide, parameter[call[name[np].random.poisson, parameter[name[image_counts], name[image].shape]], name[exposure_time_map]]]]] | keyword[def] identifier[generate_poisson_noise] ( identifier[image] , identifier[exposure_time_map] , identifier[seed] =- literal[int] ):
literal[string]
identifier[setup_random_seed] ( identifier[seed] )
identifier[image_counts] = identifier[np] . identifier[multiply] ( identifier[image] , identifier[exposure_time_map] )
keyword[return] identifier[image] - identifier[np] . identifier[divide] ( identifier[np] . identifier[random] . identifier[poisson] ( identifier[image_counts] , identifier[image] . identifier[shape] ), identifier[exposure_time_map] ) | def generate_poisson_noise(image, exposure_time_map, seed=-1):
"""
Generate a two-dimensional poisson noise_maps-mappers from an image.
Values are computed from a Poisson distribution using the image's input values in units of counts.
Parameters
----------
image : ndarray
The 2D image, whose values in counts are used to draw Poisson noise_maps values.
exposure_time_map : Union(ndarray, int)
2D array of the exposure time in each pixel used to convert to / from counts and electrons per second.
seed : int
The seed of the random number generator, used for the random noise_maps maps.
Returns
-------
poisson_noise_map: ndarray
An array describing simulated poisson noise_maps
"""
setup_random_seed(seed)
image_counts = np.multiply(image, exposure_time_map)
return image - np.divide(np.random.poisson(image_counts, image.shape), exposure_time_map) |
def fetch(self, customer_id, token_id, data={}, **kwargs):
""""
Fetch Token for given Id and given customer Id
Args:
customer_id : Customer Id for which tokens have to be fetched
token_id : Id for which TOken object has to be fetched
Returns:
Token dict for given token Id
"""
url = "{}/{}/tokens/{}".format(self.base_url, customer_id, token_id)
return self.get_url(url, data, **kwargs) | def function[fetch, parameter[self, customer_id, token_id, data]]:
constant["
Fetch Token for given Id and given customer Id
Args:
customer_id : Customer Id for which tokens have to be fetched
token_id : Id for which TOken object has to be fetched
Returns:
Token dict for given token Id
]
variable[url] assign[=] call[constant[{}/{}/tokens/{}].format, parameter[name[self].base_url, name[customer_id], name[token_id]]]
return[call[name[self].get_url, parameter[name[url], name[data]]]] | keyword[def] identifier[fetch] ( identifier[self] , identifier[customer_id] , identifier[token_id] , identifier[data] ={},** identifier[kwargs] ):
literal[string]
identifier[url] = literal[string] . identifier[format] ( identifier[self] . identifier[base_url] , identifier[customer_id] , identifier[token_id] )
keyword[return] identifier[self] . identifier[get_url] ( identifier[url] , identifier[data] ,** identifier[kwargs] ) | def fetch(self, customer_id, token_id, data={}, **kwargs):
""""
Fetch Token for given Id and given customer Id
Args:
customer_id : Customer Id for which tokens have to be fetched
token_id : Id for which TOken object has to be fetched
Returns:
Token dict for given token Id
"""
url = '{}/{}/tokens/{}'.format(self.base_url, customer_id, token_id)
return self.get_url(url, data, **kwargs) |
def _content(note, content):
"""
content of the note
:param note: note object
:param content: content string to make the main body of the note
:return:
"""
note.content = EvernoteMgr.set_header()
note.content += sanitize(content)
return note | def function[_content, parameter[note, content]]:
constant[
content of the note
:param note: note object
:param content: content string to make the main body of the note
:return:
]
name[note].content assign[=] call[name[EvernoteMgr].set_header, parameter[]]
<ast.AugAssign object at 0x7da1b26ac5e0>
return[name[note]] | keyword[def] identifier[_content] ( identifier[note] , identifier[content] ):
literal[string]
identifier[note] . identifier[content] = identifier[EvernoteMgr] . identifier[set_header] ()
identifier[note] . identifier[content] += identifier[sanitize] ( identifier[content] )
keyword[return] identifier[note] | def _content(note, content):
"""
content of the note
:param note: note object
:param content: content string to make the main body of the note
:return:
"""
note.content = EvernoteMgr.set_header()
note.content += sanitize(content)
return note |
def firstSacDist(fm):
"""
Computes the distribution of angle and length
combinations that were made as first saccades
Parameters:
fm : ocupy.fixmat
The fixation data to be analysed
"""
ang, leng, ad, ld = anglendiff(fm, return_abs=True)
y_arg = leng[0][np.roll(fm.fix == min(fm.fix), 1)]/fm.pixels_per_degree
x_arg = reshift(ang[0][np.roll(fm.fix == min(fm.fix), 1)])
bins = [list(range(int(ceil(np.nanmax(y_arg)))+1)), np.linspace(-180, 180, 361)]
return makeHist(x_arg, y_arg, fit=None, bins = bins) | def function[firstSacDist, parameter[fm]]:
constant[
Computes the distribution of angle and length
combinations that were made as first saccades
Parameters:
fm : ocupy.fixmat
The fixation data to be analysed
]
<ast.Tuple object at 0x7da1b26ae830> assign[=] call[name[anglendiff], parameter[name[fm]]]
variable[y_arg] assign[=] binary_operation[call[call[name[leng]][constant[0]]][call[name[np].roll, parameter[compare[name[fm].fix equal[==] call[name[min], parameter[name[fm].fix]]], constant[1]]]] / name[fm].pixels_per_degree]
variable[x_arg] assign[=] call[name[reshift], parameter[call[call[name[ang]][constant[0]]][call[name[np].roll, parameter[compare[name[fm].fix equal[==] call[name[min], parameter[name[fm].fix]]], constant[1]]]]]]
variable[bins] assign[=] list[[<ast.Call object at 0x7da2054a6950>, <ast.Call object at 0x7da2054a4c10>]]
return[call[name[makeHist], parameter[name[x_arg], name[y_arg]]]] | keyword[def] identifier[firstSacDist] ( identifier[fm] ):
literal[string]
identifier[ang] , identifier[leng] , identifier[ad] , identifier[ld] = identifier[anglendiff] ( identifier[fm] , identifier[return_abs] = keyword[True] )
identifier[y_arg] = identifier[leng] [ literal[int] ][ identifier[np] . identifier[roll] ( identifier[fm] . identifier[fix] == identifier[min] ( identifier[fm] . identifier[fix] ), literal[int] )]/ identifier[fm] . identifier[pixels_per_degree]
identifier[x_arg] = identifier[reshift] ( identifier[ang] [ literal[int] ][ identifier[np] . identifier[roll] ( identifier[fm] . identifier[fix] == identifier[min] ( identifier[fm] . identifier[fix] ), literal[int] )])
identifier[bins] =[ identifier[list] ( identifier[range] ( identifier[int] ( identifier[ceil] ( identifier[np] . identifier[nanmax] ( identifier[y_arg] )))+ literal[int] )), identifier[np] . identifier[linspace] (- literal[int] , literal[int] , literal[int] )]
keyword[return] identifier[makeHist] ( identifier[x_arg] , identifier[y_arg] , identifier[fit] = keyword[None] , identifier[bins] = identifier[bins] ) | def firstSacDist(fm):
"""
Computes the distribution of angle and length
combinations that were made as first saccades
Parameters:
fm : ocupy.fixmat
The fixation data to be analysed
"""
(ang, leng, ad, ld) = anglendiff(fm, return_abs=True)
y_arg = leng[0][np.roll(fm.fix == min(fm.fix), 1)] / fm.pixels_per_degree
x_arg = reshift(ang[0][np.roll(fm.fix == min(fm.fix), 1)])
bins = [list(range(int(ceil(np.nanmax(y_arg))) + 1)), np.linspace(-180, 180, 361)]
return makeHist(x_arg, y_arg, fit=None, bins=bins) |
def send_message(self, id: str, message: str) -> Dict[str, Any]:
"""Send a message to a channel
For formatting options, see the documentation:
https://discordapp.com/developers/docs/resources/channel#create-message
Args:
id: channel snowflake id
message: your message (string)
Returns:
Dictionary object of the new message
"""
if not self.connected:
raise ValueError('Websocket not connected')
return self._query(f'channels/{id}/messages', 'POST', {'content': message}) | def function[send_message, parameter[self, id, message]]:
constant[Send a message to a channel
For formatting options, see the documentation:
https://discordapp.com/developers/docs/resources/channel#create-message
Args:
id: channel snowflake id
message: your message (string)
Returns:
Dictionary object of the new message
]
if <ast.UnaryOp object at 0x7da207f03610> begin[:]
<ast.Raise object at 0x7da207f001f0>
return[call[name[self]._query, parameter[<ast.JoinedStr object at 0x7da207f022c0>, constant[POST], dictionary[[<ast.Constant object at 0x7da207f01c00>], [<ast.Name object at 0x7da207f03d90>]]]]] | keyword[def] identifier[send_message] ( identifier[self] , identifier[id] : identifier[str] , identifier[message] : identifier[str] )-> identifier[Dict] [ identifier[str] , identifier[Any] ]:
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[connected] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[self] . identifier[_query] ( literal[string] , literal[string] ,{ literal[string] : identifier[message] }) | def send_message(self, id: str, message: str) -> Dict[str, Any]:
"""Send a message to a channel
For formatting options, see the documentation:
https://discordapp.com/developers/docs/resources/channel#create-message
Args:
id: channel snowflake id
message: your message (string)
Returns:
Dictionary object of the new message
"""
if not self.connected:
raise ValueError('Websocket not connected') # depends on [control=['if'], data=[]]
return self._query(f'channels/{id}/messages', 'POST', {'content': message}) |
def decode(self, file_name):
"""
Parses the filename, creating a FileTag from it.
It will try both the old and the new conventions, if the filename does
not conform any of them, then an empty FileTag will be returned.
:param file_name: filename to parse
:return: a FileTag instance
"""
try:
file_tag = self._filename_decoder_new.decode(file_name)
except:
try:
file_tag = self._filename_decoder_old.decode(file_name)
except:
file_tag = FileTag(0, 0, '', '', '')
return file_tag | def function[decode, parameter[self, file_name]]:
constant[
Parses the filename, creating a FileTag from it.
It will try both the old and the new conventions, if the filename does
not conform any of them, then an empty FileTag will be returned.
:param file_name: filename to parse
:return: a FileTag instance
]
<ast.Try object at 0x7da1b19b74f0>
return[name[file_tag]] | keyword[def] identifier[decode] ( identifier[self] , identifier[file_name] ):
literal[string]
keyword[try] :
identifier[file_tag] = identifier[self] . identifier[_filename_decoder_new] . identifier[decode] ( identifier[file_name] )
keyword[except] :
keyword[try] :
identifier[file_tag] = identifier[self] . identifier[_filename_decoder_old] . identifier[decode] ( identifier[file_name] )
keyword[except] :
identifier[file_tag] = identifier[FileTag] ( literal[int] , literal[int] , literal[string] , literal[string] , literal[string] )
keyword[return] identifier[file_tag] | def decode(self, file_name):
"""
Parses the filename, creating a FileTag from it.
It will try both the old and the new conventions, if the filename does
not conform any of them, then an empty FileTag will be returned.
:param file_name: filename to parse
:return: a FileTag instance
"""
try:
file_tag = self._filename_decoder_new.decode(file_name) # depends on [control=['try'], data=[]]
except:
try:
file_tag = self._filename_decoder_old.decode(file_name) # depends on [control=['try'], data=[]]
except:
file_tag = FileTag(0, 0, '', '', '') # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]]
return file_tag |
def Deserialize(self, reader):
"""
Deserialize full object.
Args:
reader (neo.IO.BinaryReader):
"""
super(Block, self).Deserialize(reader)
self.Transactions = []
byt = reader.ReadVarInt()
transaction_length = byt
if transaction_length < 1:
raise Exception('Invalid format')
for i in range(0, transaction_length):
tx = Transaction.DeserializeFrom(reader)
self.Transactions.append(tx)
if MerkleTree.ComputeRoot([tx.Hash for tx in self.Transactions]) != self.MerkleRoot:
raise Exception("Merkle Root Mismatch") | def function[Deserialize, parameter[self, reader]]:
constant[
Deserialize full object.
Args:
reader (neo.IO.BinaryReader):
]
call[call[name[super], parameter[name[Block], name[self]]].Deserialize, parameter[name[reader]]]
name[self].Transactions assign[=] list[[]]
variable[byt] assign[=] call[name[reader].ReadVarInt, parameter[]]
variable[transaction_length] assign[=] name[byt]
if compare[name[transaction_length] less[<] constant[1]] begin[:]
<ast.Raise object at 0x7da1b1dd0a30>
for taget[name[i]] in starred[call[name[range], parameter[constant[0], name[transaction_length]]]] begin[:]
variable[tx] assign[=] call[name[Transaction].DeserializeFrom, parameter[name[reader]]]
call[name[self].Transactions.append, parameter[name[tx]]]
if compare[call[name[MerkleTree].ComputeRoot, parameter[<ast.ListComp object at 0x7da1b1dd2020>]] not_equal[!=] name[self].MerkleRoot] begin[:]
<ast.Raise object at 0x7da1b1dd2950> | keyword[def] identifier[Deserialize] ( identifier[self] , identifier[reader] ):
literal[string]
identifier[super] ( identifier[Block] , identifier[self] ). identifier[Deserialize] ( identifier[reader] )
identifier[self] . identifier[Transactions] =[]
identifier[byt] = identifier[reader] . identifier[ReadVarInt] ()
identifier[transaction_length] = identifier[byt]
keyword[if] identifier[transaction_length] < literal[int] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[transaction_length] ):
identifier[tx] = identifier[Transaction] . identifier[DeserializeFrom] ( identifier[reader] )
identifier[self] . identifier[Transactions] . identifier[append] ( identifier[tx] )
keyword[if] identifier[MerkleTree] . identifier[ComputeRoot] ([ identifier[tx] . identifier[Hash] keyword[for] identifier[tx] keyword[in] identifier[self] . identifier[Transactions] ])!= identifier[self] . identifier[MerkleRoot] :
keyword[raise] identifier[Exception] ( literal[string] ) | def Deserialize(self, reader):
"""
Deserialize full object.
Args:
reader (neo.IO.BinaryReader):
"""
super(Block, self).Deserialize(reader)
self.Transactions = []
byt = reader.ReadVarInt()
transaction_length = byt
if transaction_length < 1:
raise Exception('Invalid format') # depends on [control=['if'], data=[]]
for i in range(0, transaction_length):
tx = Transaction.DeserializeFrom(reader)
self.Transactions.append(tx) # depends on [control=['for'], data=[]]
if MerkleTree.ComputeRoot([tx.Hash for tx in self.Transactions]) != self.MerkleRoot:
raise Exception('Merkle Root Mismatch') # depends on [control=['if'], data=[]] |
def stop_volume(name, force=False):
'''
Stop a gluster volume
name
Volume name
force
Force stop the volume
.. versionadded:: 2015.8.4
CLI Example:
.. code-block:: bash
salt '*' glusterfs.stop_volume mycluster
'''
volinfo = info()
if name not in volinfo:
log.error('Cannot stop non-existing volume %s', name)
return False
if int(volinfo[name]['status']) != 1:
log.warning('Attempt to stop already stopped volume %s', name)
return True
cmd = 'volume stop {0}'.format(name)
if force:
cmd += ' force'
return _gluster(cmd) | def function[stop_volume, parameter[name, force]]:
constant[
Stop a gluster volume
name
Volume name
force
Force stop the volume
.. versionadded:: 2015.8.4
CLI Example:
.. code-block:: bash
salt '*' glusterfs.stop_volume mycluster
]
variable[volinfo] assign[=] call[name[info], parameter[]]
if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[volinfo]] begin[:]
call[name[log].error, parameter[constant[Cannot stop non-existing volume %s], name[name]]]
return[constant[False]]
if compare[call[name[int], parameter[call[call[name[volinfo]][name[name]]][constant[status]]]] not_equal[!=] constant[1]] begin[:]
call[name[log].warning, parameter[constant[Attempt to stop already stopped volume %s], name[name]]]
return[constant[True]]
variable[cmd] assign[=] call[constant[volume stop {0}].format, parameter[name[name]]]
if name[force] begin[:]
<ast.AugAssign object at 0x7da1b200b370>
return[call[name[_gluster], parameter[name[cmd]]]] | keyword[def] identifier[stop_volume] ( identifier[name] , identifier[force] = keyword[False] ):
literal[string]
identifier[volinfo] = identifier[info] ()
keyword[if] identifier[name] keyword[not] keyword[in] identifier[volinfo] :
identifier[log] . identifier[error] ( literal[string] , identifier[name] )
keyword[return] keyword[False]
keyword[if] identifier[int] ( identifier[volinfo] [ identifier[name] ][ literal[string] ])!= literal[int] :
identifier[log] . identifier[warning] ( literal[string] , identifier[name] )
keyword[return] keyword[True]
identifier[cmd] = literal[string] . identifier[format] ( identifier[name] )
keyword[if] identifier[force] :
identifier[cmd] += literal[string]
keyword[return] identifier[_gluster] ( identifier[cmd] ) | def stop_volume(name, force=False):
"""
Stop a gluster volume
name
Volume name
force
Force stop the volume
.. versionadded:: 2015.8.4
CLI Example:
.. code-block:: bash
salt '*' glusterfs.stop_volume mycluster
"""
volinfo = info()
if name not in volinfo:
log.error('Cannot stop non-existing volume %s', name)
return False # depends on [control=['if'], data=['name']]
if int(volinfo[name]['status']) != 1:
log.warning('Attempt to stop already stopped volume %s', name)
return True # depends on [control=['if'], data=[]]
cmd = 'volume stop {0}'.format(name)
if force:
cmd += ' force' # depends on [control=['if'], data=[]]
return _gluster(cmd) |
def sign(self, entry, signer=None):
"""Adds and sign an entry"""
if (self.get(entry) is not None):
return
if (entry.rrsig is None) and (self.private is not None):
entry.rrsig = DNSSignatureS(entry.name,
_TYPE_RRSIG, _CLASS_IN, entry, self.private, signer)
self.add(entry)
if (self.private is not None):
self.add(entry.rrsig) | def function[sign, parameter[self, entry, signer]]:
constant[Adds and sign an entry]
if compare[call[name[self].get, parameter[name[entry]]] is_not constant[None]] begin[:]
return[None]
if <ast.BoolOp object at 0x7da18fe93070> begin[:]
name[entry].rrsig assign[=] call[name[DNSSignatureS], parameter[name[entry].name, name[_TYPE_RRSIG], name[_CLASS_IN], name[entry], name[self].private, name[signer]]]
call[name[self].add, parameter[name[entry]]]
if compare[name[self].private is_not constant[None]] begin[:]
call[name[self].add, parameter[name[entry].rrsig]] | keyword[def] identifier[sign] ( identifier[self] , identifier[entry] , identifier[signer] = keyword[None] ):
literal[string]
keyword[if] ( identifier[self] . identifier[get] ( identifier[entry] ) keyword[is] keyword[not] keyword[None] ):
keyword[return]
keyword[if] ( identifier[entry] . identifier[rrsig] keyword[is] keyword[None] ) keyword[and] ( identifier[self] . identifier[private] keyword[is] keyword[not] keyword[None] ):
identifier[entry] . identifier[rrsig] = identifier[DNSSignatureS] ( identifier[entry] . identifier[name] ,
identifier[_TYPE_RRSIG] , identifier[_CLASS_IN] , identifier[entry] , identifier[self] . identifier[private] , identifier[signer] )
identifier[self] . identifier[add] ( identifier[entry] )
keyword[if] ( identifier[self] . identifier[private] keyword[is] keyword[not] keyword[None] ):
identifier[self] . identifier[add] ( identifier[entry] . identifier[rrsig] ) | def sign(self, entry, signer=None):
"""Adds and sign an entry"""
if self.get(entry) is not None:
return # depends on [control=['if'], data=[]]
if entry.rrsig is None and self.private is not None:
entry.rrsig = DNSSignatureS(entry.name, _TYPE_RRSIG, _CLASS_IN, entry, self.private, signer) # depends on [control=['if'], data=[]]
self.add(entry)
if self.private is not None:
self.add(entry.rrsig) # depends on [control=['if'], data=[]] |
def insert_table(self, label = None, name = None, **kwargs):
"""
Insert a table in the Survey object
"""
data_frame = kwargs.pop('data_frame', None)
if data_frame is None:
data_frame = kwargs.pop('dataframe', None)
to_hdf_kwargs = kwargs.pop('to_hdf_kwargs', dict())
if data_frame is not None:
assert isinstance(data_frame, pandas.DataFrame)
if data_frame is not None:
if label is None:
label = name
table = Table(label = label, name = name, survey = self)
assert table.survey.hdf5_file_path is not None
log.debug("Saving table {} in {}".format(name, table.survey.hdf5_file_path))
table.save_data_frame(data_frame, **to_hdf_kwargs)
if name not in self.tables:
self.tables[name] = dict()
for key, val in kwargs.items():
self.tables[name][key] = val | def function[insert_table, parameter[self, label, name]]:
constant[
Insert a table in the Survey object
]
variable[data_frame] assign[=] call[name[kwargs].pop, parameter[constant[data_frame], constant[None]]]
if compare[name[data_frame] is constant[None]] begin[:]
variable[data_frame] assign[=] call[name[kwargs].pop, parameter[constant[dataframe], constant[None]]]
variable[to_hdf_kwargs] assign[=] call[name[kwargs].pop, parameter[constant[to_hdf_kwargs], call[name[dict], parameter[]]]]
if compare[name[data_frame] is_not constant[None]] begin[:]
assert[call[name[isinstance], parameter[name[data_frame], name[pandas].DataFrame]]]
if compare[name[data_frame] is_not constant[None]] begin[:]
if compare[name[label] is constant[None]] begin[:]
variable[label] assign[=] name[name]
variable[table] assign[=] call[name[Table], parameter[]]
assert[compare[name[table].survey.hdf5_file_path is_not constant[None]]]
call[name[log].debug, parameter[call[constant[Saving table {} in {}].format, parameter[name[name], name[table].survey.hdf5_file_path]]]]
call[name[table].save_data_frame, parameter[name[data_frame]]]
if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[self].tables] begin[:]
call[name[self].tables][name[name]] assign[=] call[name[dict], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da18ede65f0>, <ast.Name object at 0x7da18ede56f0>]]] in starred[call[name[kwargs].items, parameter[]]] begin[:]
call[call[name[self].tables][name[name]]][name[key]] assign[=] name[val] | keyword[def] identifier[insert_table] ( identifier[self] , identifier[label] = keyword[None] , identifier[name] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[data_frame] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[data_frame] keyword[is] keyword[None] :
identifier[data_frame] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
identifier[to_hdf_kwargs] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[dict] ())
keyword[if] identifier[data_frame] keyword[is] keyword[not] keyword[None] :
keyword[assert] identifier[isinstance] ( identifier[data_frame] , identifier[pandas] . identifier[DataFrame] )
keyword[if] identifier[data_frame] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[label] keyword[is] keyword[None] :
identifier[label] = identifier[name]
identifier[table] = identifier[Table] ( identifier[label] = identifier[label] , identifier[name] = identifier[name] , identifier[survey] = identifier[self] )
keyword[assert] identifier[table] . identifier[survey] . identifier[hdf5_file_path] keyword[is] keyword[not] keyword[None]
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[name] , identifier[table] . identifier[survey] . identifier[hdf5_file_path] ))
identifier[table] . identifier[save_data_frame] ( identifier[data_frame] ,** identifier[to_hdf_kwargs] )
keyword[if] identifier[name] keyword[not] keyword[in] identifier[self] . identifier[tables] :
identifier[self] . identifier[tables] [ identifier[name] ]= identifier[dict] ()
keyword[for] identifier[key] , identifier[val] keyword[in] identifier[kwargs] . identifier[items] ():
identifier[self] . identifier[tables] [ identifier[name] ][ identifier[key] ]= identifier[val] | def insert_table(self, label=None, name=None, **kwargs):
"""
Insert a table in the Survey object
"""
data_frame = kwargs.pop('data_frame', None)
if data_frame is None:
data_frame = kwargs.pop('dataframe', None) # depends on [control=['if'], data=['data_frame']]
to_hdf_kwargs = kwargs.pop('to_hdf_kwargs', dict())
if data_frame is not None:
assert isinstance(data_frame, pandas.DataFrame) # depends on [control=['if'], data=['data_frame']]
if data_frame is not None:
if label is None:
label = name # depends on [control=['if'], data=['label']]
table = Table(label=label, name=name, survey=self)
assert table.survey.hdf5_file_path is not None
log.debug('Saving table {} in {}'.format(name, table.survey.hdf5_file_path))
table.save_data_frame(data_frame, **to_hdf_kwargs) # depends on [control=['if'], data=['data_frame']]
if name not in self.tables:
self.tables[name] = dict() # depends on [control=['if'], data=['name']]
for (key, val) in kwargs.items():
self.tables[name][key] = val # depends on [control=['for'], data=[]] |
def configure_logging(conf):
"""Initialize and configure logging."""
root_logger = logging.getLogger()
root_logger.setLevel(getattr(logging, conf.loglevel.upper()))
if conf.logtostderr:
add_stream_handler(root_logger, sys.stderr)
if conf.logtostdout:
add_stream_handler(root_logger, sys.stdout) | def function[configure_logging, parameter[conf]]:
constant[Initialize and configure logging.]
variable[root_logger] assign[=] call[name[logging].getLogger, parameter[]]
call[name[root_logger].setLevel, parameter[call[name[getattr], parameter[name[logging], call[name[conf].loglevel.upper, parameter[]]]]]]
if name[conf].logtostderr begin[:]
call[name[add_stream_handler], parameter[name[root_logger], name[sys].stderr]]
if name[conf].logtostdout begin[:]
call[name[add_stream_handler], parameter[name[root_logger], name[sys].stdout]] | keyword[def] identifier[configure_logging] ( identifier[conf] ):
literal[string]
identifier[root_logger] = identifier[logging] . identifier[getLogger] ()
identifier[root_logger] . identifier[setLevel] ( identifier[getattr] ( identifier[logging] , identifier[conf] . identifier[loglevel] . identifier[upper] ()))
keyword[if] identifier[conf] . identifier[logtostderr] :
identifier[add_stream_handler] ( identifier[root_logger] , identifier[sys] . identifier[stderr] )
keyword[if] identifier[conf] . identifier[logtostdout] :
identifier[add_stream_handler] ( identifier[root_logger] , identifier[sys] . identifier[stdout] ) | def configure_logging(conf):
"""Initialize and configure logging."""
root_logger = logging.getLogger()
root_logger.setLevel(getattr(logging, conf.loglevel.upper()))
if conf.logtostderr:
add_stream_handler(root_logger, sys.stderr) # depends on [control=['if'], data=[]]
if conf.logtostdout:
add_stream_handler(root_logger, sys.stdout) # depends on [control=['if'], data=[]] |
def validate_mutations(self, mutations):
'''This function has been refactored to use the SimpleMutation class.
The parameter is a list of Mutation objects. The function has no return value but raises a PDBValidationException
if the wildtype in the Mutation m does not match the residue type corresponding to residue m.ResidueID in the PDB file.
'''
# Chain, ResidueID, WildTypeAA, MutantAA
resID2AA = self.get_residue_id_to_type_map()
badmutations = []
for m in mutations:
wildtype = resID2AA.get(PDB.ChainResidueID2String(m.Chain, m.ResidueID), "")
if m.WildTypeAA != wildtype:
badmutations.append(m)
if badmutations:
raise PDBValidationException("The mutation(s) %s could not be matched against the PDB %s." % (", ".join(map(str, badmutations)), self.pdb_id)) | def function[validate_mutations, parameter[self, mutations]]:
constant[This function has been refactored to use the SimpleMutation class.
The parameter is a list of Mutation objects. The function has no return value but raises a PDBValidationException
if the wildtype in the Mutation m does not match the residue type corresponding to residue m.ResidueID in the PDB file.
]
variable[resID2AA] assign[=] call[name[self].get_residue_id_to_type_map, parameter[]]
variable[badmutations] assign[=] list[[]]
for taget[name[m]] in starred[name[mutations]] begin[:]
variable[wildtype] assign[=] call[name[resID2AA].get, parameter[call[name[PDB].ChainResidueID2String, parameter[name[m].Chain, name[m].ResidueID]], constant[]]]
if compare[name[m].WildTypeAA not_equal[!=] name[wildtype]] begin[:]
call[name[badmutations].append, parameter[name[m]]]
if name[badmutations] begin[:]
<ast.Raise object at 0x7da1b242db10> | keyword[def] identifier[validate_mutations] ( identifier[self] , identifier[mutations] ):
literal[string]
identifier[resID2AA] = identifier[self] . identifier[get_residue_id_to_type_map] ()
identifier[badmutations] =[]
keyword[for] identifier[m] keyword[in] identifier[mutations] :
identifier[wildtype] = identifier[resID2AA] . identifier[get] ( identifier[PDB] . identifier[ChainResidueID2String] ( identifier[m] . identifier[Chain] , identifier[m] . identifier[ResidueID] ), literal[string] )
keyword[if] identifier[m] . identifier[WildTypeAA] != identifier[wildtype] :
identifier[badmutations] . identifier[append] ( identifier[m] )
keyword[if] identifier[badmutations] :
keyword[raise] identifier[PDBValidationException] ( literal[string] %( literal[string] . identifier[join] ( identifier[map] ( identifier[str] , identifier[badmutations] )), identifier[self] . identifier[pdb_id] )) | def validate_mutations(self, mutations):
"""This function has been refactored to use the SimpleMutation class.
The parameter is a list of Mutation objects. The function has no return value but raises a PDBValidationException
if the wildtype in the Mutation m does not match the residue type corresponding to residue m.ResidueID in the PDB file.
"""
# Chain, ResidueID, WildTypeAA, MutantAA
resID2AA = self.get_residue_id_to_type_map()
badmutations = []
for m in mutations:
wildtype = resID2AA.get(PDB.ChainResidueID2String(m.Chain, m.ResidueID), '')
if m.WildTypeAA != wildtype:
badmutations.append(m) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['m']]
if badmutations:
raise PDBValidationException('The mutation(s) %s could not be matched against the PDB %s.' % (', '.join(map(str, badmutations)), self.pdb_id)) # depends on [control=['if'], data=[]] |
def get_function_cfg(func, show_inst=True):
"""Return a string of the control-flow graph of the function in DOT
format. If the input `func` is not a materialized function, the module
containing the function is parsed to create an actual LLVM module.
The `show_inst` flag controls whether the instructions of each block
are printed.
"""
assert func is not None
if isinstance(func, ir.Function):
mod = parse_assembly(str(func.module))
func = mod.get_function(func.name)
# Assume func is a materialized function
with ffi.OutputString() as dotstr:
ffi.lib.LLVMPY_WriteCFG(func, dotstr, show_inst)
return str(dotstr) | def function[get_function_cfg, parameter[func, show_inst]]:
constant[Return a string of the control-flow graph of the function in DOT
format. If the input `func` is not a materialized function, the module
containing the function is parsed to create an actual LLVM module.
The `show_inst` flag controls whether the instructions of each block
are printed.
]
assert[compare[name[func] is_not constant[None]]]
if call[name[isinstance], parameter[name[func], name[ir].Function]] begin[:]
variable[mod] assign[=] call[name[parse_assembly], parameter[call[name[str], parameter[name[func].module]]]]
variable[func] assign[=] call[name[mod].get_function, parameter[name[func].name]]
with call[name[ffi].OutputString, parameter[]] begin[:]
call[name[ffi].lib.LLVMPY_WriteCFG, parameter[name[func], name[dotstr], name[show_inst]]]
return[call[name[str], parameter[name[dotstr]]]] | keyword[def] identifier[get_function_cfg] ( identifier[func] , identifier[show_inst] = keyword[True] ):
literal[string]
keyword[assert] identifier[func] keyword[is] keyword[not] keyword[None]
keyword[if] identifier[isinstance] ( identifier[func] , identifier[ir] . identifier[Function] ):
identifier[mod] = identifier[parse_assembly] ( identifier[str] ( identifier[func] . identifier[module] ))
identifier[func] = identifier[mod] . identifier[get_function] ( identifier[func] . identifier[name] )
keyword[with] identifier[ffi] . identifier[OutputString] () keyword[as] identifier[dotstr] :
identifier[ffi] . identifier[lib] . identifier[LLVMPY_WriteCFG] ( identifier[func] , identifier[dotstr] , identifier[show_inst] )
keyword[return] identifier[str] ( identifier[dotstr] ) | def get_function_cfg(func, show_inst=True):
"""Return a string of the control-flow graph of the function in DOT
format. If the input `func` is not a materialized function, the module
containing the function is parsed to create an actual LLVM module.
The `show_inst` flag controls whether the instructions of each block
are printed.
"""
assert func is not None
if isinstance(func, ir.Function):
mod = parse_assembly(str(func.module))
func = mod.get_function(func.name) # depends on [control=['if'], data=[]]
# Assume func is a materialized function
with ffi.OutputString() as dotstr:
ffi.lib.LLVMPY_WriteCFG(func, dotstr, show_inst)
return str(dotstr) # depends on [control=['with'], data=['dotstr']] |
def source_range_slices(start, end, nr_var_dict):
"""
Given a range of source numbers, as well as a dictionary
containing the numbers of each source, returns a dictionary
containing slices for each source variable type.
"""
return OrderedDict((k, slice(s,e,1))
for k, (s, e)
in source_range_tuple(start, end, nr_var_dict).iteritems()) | def function[source_range_slices, parameter[start, end, nr_var_dict]]:
constant[
Given a range of source numbers, as well as a dictionary
containing the numbers of each source, returns a dictionary
containing slices for each source variable type.
]
return[call[name[OrderedDict], parameter[<ast.GeneratorExp object at 0x7da1b0fefc10>]]] | keyword[def] identifier[source_range_slices] ( identifier[start] , identifier[end] , identifier[nr_var_dict] ):
literal[string]
keyword[return] identifier[OrderedDict] (( identifier[k] , identifier[slice] ( identifier[s] , identifier[e] , literal[int] ))
keyword[for] identifier[k] ,( identifier[s] , identifier[e] )
keyword[in] identifier[source_range_tuple] ( identifier[start] , identifier[end] , identifier[nr_var_dict] ). identifier[iteritems] ()) | def source_range_slices(start, end, nr_var_dict):
"""
Given a range of source numbers, as well as a dictionary
containing the numbers of each source, returns a dictionary
containing slices for each source variable type.
"""
return OrderedDict(((k, slice(s, e, 1)) for (k, (s, e)) in source_range_tuple(start, end, nr_var_dict).iteritems())) |
def enable_result_transforms(func):
"""Decorator that tries to use the object provided using a kwarg called
'electrode_transformator' to transform the return values of an import
function. It is intended to be used to transform electrode numbers and
locations, i.e. for use in roll-along-measurement schemes.
The transformator object must have a function .transform, which takes three
parameters: data, electrode, topography and returns three correspondingly
transformed objects.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
func_transformator = kwargs.pop('electrode_transformator', None)
data, electrodes, topography = func(*args, **kwargs)
if func_transformator is not None:
data_transformed, electrodes_transformed, \
topography_transformed = func_transformator.transform(
data, electrodes, topography
)
return data_transformed, electrodes_transformed, \
topography_transformed
else:
return data, electrodes, topography
return wrapper | def function[enable_result_transforms, parameter[func]]:
constant[Decorator that tries to use the object provided using a kwarg called
'electrode_transformator' to transform the return values of an import
function. It is intended to be used to transform electrode numbers and
locations, i.e. for use in roll-along-measurement schemes.
The transformator object must have a function .transform, which takes three
parameters: data, electrode, topography and returns three correspondingly
transformed objects.
]
def function[wrapper, parameter[]]:
variable[func_transformator] assign[=] call[name[kwargs].pop, parameter[constant[electrode_transformator], constant[None]]]
<ast.Tuple object at 0x7da204963f70> assign[=] call[name[func], parameter[<ast.Starred object at 0x7da204961f30>]]
if compare[name[func_transformator] is_not constant[None]] begin[:]
<ast.Tuple object at 0x7da20eb2a140> assign[=] call[name[func_transformator].transform, parameter[name[data], name[electrodes], name[topography]]]
return[tuple[[<ast.Name object at 0x7da18f8115a0>, <ast.Name object at 0x7da18f8103a0>, <ast.Name object at 0x7da18f811900>]]]
return[name[wrapper]] | keyword[def] identifier[enable_result_transforms] ( identifier[func] ):
literal[string]
@ identifier[functools] . identifier[wraps] ( identifier[func] )
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ):
identifier[func_transformator] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
identifier[data] , identifier[electrodes] , identifier[topography] = identifier[func] (* identifier[args] ,** identifier[kwargs] )
keyword[if] identifier[func_transformator] keyword[is] keyword[not] keyword[None] :
identifier[data_transformed] , identifier[electrodes_transformed] , identifier[topography_transformed] = identifier[func_transformator] . identifier[transform] (
identifier[data] , identifier[electrodes] , identifier[topography]
)
keyword[return] identifier[data_transformed] , identifier[electrodes_transformed] , identifier[topography_transformed]
keyword[else] :
keyword[return] identifier[data] , identifier[electrodes] , identifier[topography]
keyword[return] identifier[wrapper] | def enable_result_transforms(func):
"""Decorator that tries to use the object provided using a kwarg called
'electrode_transformator' to transform the return values of an import
function. It is intended to be used to transform electrode numbers and
locations, i.e. for use in roll-along-measurement schemes.
The transformator object must have a function .transform, which takes three
parameters: data, electrode, topography and returns three correspondingly
transformed objects.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
func_transformator = kwargs.pop('electrode_transformator', None)
(data, electrodes, topography) = func(*args, **kwargs)
if func_transformator is not None:
(data_transformed, electrodes_transformed, topography_transformed) = func_transformator.transform(data, electrodes, topography)
return (data_transformed, electrodes_transformed, topography_transformed) # depends on [control=['if'], data=['func_transformator']]
else:
return (data, electrodes, topography)
return wrapper |
def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'entities') and self.entities is not None:
_dict['entities'] = [x._to_dict() for x in self.entities]
if hasattr(self, 'pagination') and self.pagination is not None:
_dict['pagination'] = self.pagination._to_dict()
return _dict | def function[_to_dict, parameter[self]]:
constant[Return a json dictionary representing this model.]
variable[_dict] assign[=] dictionary[[], []]
if <ast.BoolOp object at 0x7da204962b00> begin[:]
call[name[_dict]][constant[entities]] assign[=] <ast.ListComp object at 0x7da204962ef0>
if <ast.BoolOp object at 0x7da204961120> begin[:]
call[name[_dict]][constant[pagination]] assign[=] call[name[self].pagination._to_dict, parameter[]]
return[name[_dict]] | keyword[def] identifier[_to_dict] ( identifier[self] ):
literal[string]
identifier[_dict] ={}
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[entities] keyword[is] keyword[not] keyword[None] :
identifier[_dict] [ literal[string] ]=[ identifier[x] . identifier[_to_dict] () keyword[for] identifier[x] keyword[in] identifier[self] . identifier[entities] ]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[pagination] keyword[is] keyword[not] keyword[None] :
identifier[_dict] [ literal[string] ]= identifier[self] . identifier[pagination] . identifier[_to_dict] ()
keyword[return] identifier[_dict] | def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'entities') and self.entities is not None:
_dict['entities'] = [x._to_dict() for x in self.entities] # depends on [control=['if'], data=[]]
if hasattr(self, 'pagination') and self.pagination is not None:
_dict['pagination'] = self.pagination._to_dict() # depends on [control=['if'], data=[]]
return _dict |
def get_runs():
"""Get all runs, sort it and return a response."""
data = current_app.config["data"]
draw = parse_int_arg("draw", 1)
start = parse_int_arg("start", 0)
length = parse_int_arg("length", -1)
length = length if length >= 0 else None
order_column = request.args.get("order[0][column]")
order_dir = request.args.get("order[0][dir]")
query = parse_query_filter()
if order_column is not None:
order_column = \
request.args.get("columns[%d][name]" % int(order_column))
if order_column == "hostname":
order_column = "host.hostname"
runs = data.get_run_dao().get_runs(
start=start, limit=length,
sort_by=order_column, sort_direction=order_dir, query=query)
# records_total should be the total size of the records in the database,
# not what was returned
records_total = runs.count()
records_filtered = runs.count()
return Response(render_template(
"api/runs.js", runs=runs,
draw=draw, recordsTotal=records_total,
recordsFiltered=records_filtered),
mimetype="application/json") | def function[get_runs, parameter[]]:
constant[Get all runs, sort it and return a response.]
variable[data] assign[=] call[name[current_app].config][constant[data]]
variable[draw] assign[=] call[name[parse_int_arg], parameter[constant[draw], constant[1]]]
variable[start] assign[=] call[name[parse_int_arg], parameter[constant[start], constant[0]]]
variable[length] assign[=] call[name[parse_int_arg], parameter[constant[length], <ast.UnaryOp object at 0x7da204567700>]]
variable[length] assign[=] <ast.IfExp object at 0x7da204567e50>
variable[order_column] assign[=] call[name[request].args.get, parameter[constant[order[0][column]]]]
variable[order_dir] assign[=] call[name[request].args.get, parameter[constant[order[0][dir]]]]
variable[query] assign[=] call[name[parse_query_filter], parameter[]]
if compare[name[order_column] is_not constant[None]] begin[:]
variable[order_column] assign[=] call[name[request].args.get, parameter[binary_operation[constant[columns[%d][name]] <ast.Mod object at 0x7da2590d6920> call[name[int], parameter[name[order_column]]]]]]
if compare[name[order_column] equal[==] constant[hostname]] begin[:]
variable[order_column] assign[=] constant[host.hostname]
variable[runs] assign[=] call[call[name[data].get_run_dao, parameter[]].get_runs, parameter[]]
variable[records_total] assign[=] call[name[runs].count, parameter[]]
variable[records_filtered] assign[=] call[name[runs].count, parameter[]]
return[call[name[Response], parameter[call[name[render_template], parameter[constant[api/runs.js]]]]]] | keyword[def] identifier[get_runs] ():
literal[string]
identifier[data] = identifier[current_app] . identifier[config] [ literal[string] ]
identifier[draw] = identifier[parse_int_arg] ( literal[string] , literal[int] )
identifier[start] = identifier[parse_int_arg] ( literal[string] , literal[int] )
identifier[length] = identifier[parse_int_arg] ( literal[string] ,- literal[int] )
identifier[length] = identifier[length] keyword[if] identifier[length] >= literal[int] keyword[else] keyword[None]
identifier[order_column] = identifier[request] . identifier[args] . identifier[get] ( literal[string] )
identifier[order_dir] = identifier[request] . identifier[args] . identifier[get] ( literal[string] )
identifier[query] = identifier[parse_query_filter] ()
keyword[if] identifier[order_column] keyword[is] keyword[not] keyword[None] :
identifier[order_column] = identifier[request] . identifier[args] . identifier[get] ( literal[string] % identifier[int] ( identifier[order_column] ))
keyword[if] identifier[order_column] == literal[string] :
identifier[order_column] = literal[string]
identifier[runs] = identifier[data] . identifier[get_run_dao] (). identifier[get_runs] (
identifier[start] = identifier[start] , identifier[limit] = identifier[length] ,
identifier[sort_by] = identifier[order_column] , identifier[sort_direction] = identifier[order_dir] , identifier[query] = identifier[query] )
identifier[records_total] = identifier[runs] . identifier[count] ()
identifier[records_filtered] = identifier[runs] . identifier[count] ()
keyword[return] identifier[Response] ( identifier[render_template] (
literal[string] , identifier[runs] = identifier[runs] ,
identifier[draw] = identifier[draw] , identifier[recordsTotal] = identifier[records_total] ,
identifier[recordsFiltered] = identifier[records_filtered] ),
identifier[mimetype] = literal[string] ) | def get_runs():
"""Get all runs, sort it and return a response."""
data = current_app.config['data']
draw = parse_int_arg('draw', 1)
start = parse_int_arg('start', 0)
length = parse_int_arg('length', -1)
length = length if length >= 0 else None
order_column = request.args.get('order[0][column]')
order_dir = request.args.get('order[0][dir]')
query = parse_query_filter()
if order_column is not None:
order_column = request.args.get('columns[%d][name]' % int(order_column))
if order_column == 'hostname':
order_column = 'host.hostname' # depends on [control=['if'], data=['order_column']] # depends on [control=['if'], data=['order_column']]
runs = data.get_run_dao().get_runs(start=start, limit=length, sort_by=order_column, sort_direction=order_dir, query=query)
# records_total should be the total size of the records in the database,
# not what was returned
records_total = runs.count()
records_filtered = runs.count()
return Response(render_template('api/runs.js', runs=runs, draw=draw, recordsTotal=records_total, recordsFiltered=records_filtered), mimetype='application/json') |
def get_opt_attr(obj_pyxb, attr_str, default_val=None):
"""Get an optional attribute value from a PyXB element.
The attributes for elements that are optional according to the schema and
not set in the PyXB object are present and set to None.
PyXB validation will fail if required elements are missing.
Args:
obj_pyxb: PyXB object
attr_str: str
Name of an attribute that the PyXB object may contain.
default_val: any object
Value to return if the attribute is not present.
Returns:
str : Value of the attribute if present, else ``default_val``.
"""
v = getattr(obj_pyxb, attr_str, default_val)
return v if v is not None else default_val | def function[get_opt_attr, parameter[obj_pyxb, attr_str, default_val]]:
constant[Get an optional attribute value from a PyXB element.
The attributes for elements that are optional according to the schema and
not set in the PyXB object are present and set to None.
PyXB validation will fail if required elements are missing.
Args:
obj_pyxb: PyXB object
attr_str: str
Name of an attribute that the PyXB object may contain.
default_val: any object
Value to return if the attribute is not present.
Returns:
str : Value of the attribute if present, else ``default_val``.
]
variable[v] assign[=] call[name[getattr], parameter[name[obj_pyxb], name[attr_str], name[default_val]]]
return[<ast.IfExp object at 0x7da1b190cdc0>] | keyword[def] identifier[get_opt_attr] ( identifier[obj_pyxb] , identifier[attr_str] , identifier[default_val] = keyword[None] ):
literal[string]
identifier[v] = identifier[getattr] ( identifier[obj_pyxb] , identifier[attr_str] , identifier[default_val] )
keyword[return] identifier[v] keyword[if] identifier[v] keyword[is] keyword[not] keyword[None] keyword[else] identifier[default_val] | def get_opt_attr(obj_pyxb, attr_str, default_val=None):
"""Get an optional attribute value from a PyXB element.
The attributes for elements that are optional according to the schema and
not set in the PyXB object are present and set to None.
PyXB validation will fail if required elements are missing.
Args:
obj_pyxb: PyXB object
attr_str: str
Name of an attribute that the PyXB object may contain.
default_val: any object
Value to return if the attribute is not present.
Returns:
str : Value of the attribute if present, else ``default_val``.
"""
v = getattr(obj_pyxb, attr_str, default_val)
return v if v is not None else default_val |
def where(self, field_path, op_string, value):
"""Create a "where" query with this collection as parent.
See
:meth:`~.firestore_v1beta1.query.Query.where` for
more information on this method.
Args:
field_path (str): A field path (``.``-delimited list of
field names) for the field to filter on.
op_string (str): A comparison operation in the form of a string.
Acceptable values are ``<``, ``<=``, ``==``, ``>=``
and ``>``.
value (Any): The value to compare the field against in the filter.
If ``value`` is :data:`None` or a NaN, then ``==`` is the only
allowed operation.
Returns:
~.firestore_v1beta1.query.Query: A filtered query.
"""
query = query_mod.Query(self)
return query.where(field_path, op_string, value) | def function[where, parameter[self, field_path, op_string, value]]:
constant[Create a "where" query with this collection as parent.
See
:meth:`~.firestore_v1beta1.query.Query.where` for
more information on this method.
Args:
field_path (str): A field path (``.``-delimited list of
field names) for the field to filter on.
op_string (str): A comparison operation in the form of a string.
Acceptable values are ``<``, ``<=``, ``==``, ``>=``
and ``>``.
value (Any): The value to compare the field against in the filter.
If ``value`` is :data:`None` or a NaN, then ``==`` is the only
allowed operation.
Returns:
~.firestore_v1beta1.query.Query: A filtered query.
]
variable[query] assign[=] call[name[query_mod].Query, parameter[name[self]]]
return[call[name[query].where, parameter[name[field_path], name[op_string], name[value]]]] | keyword[def] identifier[where] ( identifier[self] , identifier[field_path] , identifier[op_string] , identifier[value] ):
literal[string]
identifier[query] = identifier[query_mod] . identifier[Query] ( identifier[self] )
keyword[return] identifier[query] . identifier[where] ( identifier[field_path] , identifier[op_string] , identifier[value] ) | def where(self, field_path, op_string, value):
"""Create a "where" query with this collection as parent.
See
:meth:`~.firestore_v1beta1.query.Query.where` for
more information on this method.
Args:
field_path (str): A field path (``.``-delimited list of
field names) for the field to filter on.
op_string (str): A comparison operation in the form of a string.
Acceptable values are ``<``, ``<=``, ``==``, ``>=``
and ``>``.
value (Any): The value to compare the field against in the filter.
If ``value`` is :data:`None` or a NaN, then ``==`` is the only
allowed operation.
Returns:
~.firestore_v1beta1.query.Query: A filtered query.
"""
query = query_mod.Query(self)
return query.where(field_path, op_string, value) |
def main():
"""Main function for pyssim."""
description = '\n'.join([
'Compares an image with a list of images using the SSIM metric.',
' Example:',
' pyssim test-images/test1-1.png "test-images/*"'
])
parser = argparse.ArgumentParser(
prog='pyssim', formatter_class=argparse.RawTextHelpFormatter,
description=description)
parser.add_argument('--cw', help='compute the complex wavelet SSIM',
action='store_true')
parser.add_argument(
'base_image', metavar='image1.png', type=argparse.FileType('r'))
parser.add_argument(
'comparison_images', metavar='image path with* or image2.png')
parser.add_argument('--width', type=int, default=None,
help='scales the image before computing SSIM')
parser.add_argument('--height', type=int, default=None,
help='scales the image before computing SSIM')
args = parser.parse_args()
if args.width and args.height:
size = (args.width, args.height)
else:
size = None
if not args.cw:
gaussian_kernel_sigma = 1.5
gaussian_kernel_width = 11
gaussian_kernel_1d = get_gaussian_kernel(
gaussian_kernel_width, gaussian_kernel_sigma)
comparison_images = glob.glob(args.comparison_images)
is_a_single_image = len(comparison_images) == 1
for comparison_image in comparison_images:
if args.cw:
ssim = SSIM(args.base_image.name, size=size)
ssim_value = ssim.cw_ssim_value(comparison_image)
else:
ssim = SSIM(args.base_image.name, gaussian_kernel_1d, size=size)
ssim_value = ssim.ssim_value(comparison_image)
if is_a_single_image:
sys.stdout.write('%.7g' % ssim_value)
else:
sys.stdout.write('%s - %s: %.7g' % (
args.base_image.name, comparison_image, ssim_value))
sys.stdout.write('\n') | def function[main, parameter[]]:
constant[Main function for pyssim.]
variable[description] assign[=] call[constant[
].join, parameter[list[[<ast.Constant object at 0x7da20c794a00>, <ast.Constant object at 0x7da20c794730>, <ast.Constant object at 0x7da20c7963e0>]]]]
variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]]
call[name[parser].add_argument, parameter[constant[--cw]]]
call[name[parser].add_argument, parameter[constant[base_image]]]
call[name[parser].add_argument, parameter[constant[comparison_images]]]
call[name[parser].add_argument, parameter[constant[--width]]]
call[name[parser].add_argument, parameter[constant[--height]]]
variable[args] assign[=] call[name[parser].parse_args, parameter[]]
if <ast.BoolOp object at 0x7da20c7959c0> begin[:]
variable[size] assign[=] tuple[[<ast.Attribute object at 0x7da20c795ea0>, <ast.Attribute object at 0x7da20c7952d0>]]
if <ast.UnaryOp object at 0x7da20c795c90> begin[:]
variable[gaussian_kernel_sigma] assign[=] constant[1.5]
variable[gaussian_kernel_width] assign[=] constant[11]
variable[gaussian_kernel_1d] assign[=] call[name[get_gaussian_kernel], parameter[name[gaussian_kernel_width], name[gaussian_kernel_sigma]]]
variable[comparison_images] assign[=] call[name[glob].glob, parameter[name[args].comparison_images]]
variable[is_a_single_image] assign[=] compare[call[name[len], parameter[name[comparison_images]]] equal[==] constant[1]]
for taget[name[comparison_image]] in starred[name[comparison_images]] begin[:]
if name[args].cw begin[:]
variable[ssim] assign[=] call[name[SSIM], parameter[name[args].base_image.name]]
variable[ssim_value] assign[=] call[name[ssim].cw_ssim_value, parameter[name[comparison_image]]]
if name[is_a_single_image] begin[:]
call[name[sys].stdout.write, parameter[binary_operation[constant[%.7g] <ast.Mod object at 0x7da2590d6920> name[ssim_value]]]]
call[name[sys].stdout.write, parameter[constant[
]]] | keyword[def] identifier[main] ():
literal[string]
identifier[description] = literal[string] . identifier[join] ([
literal[string] ,
literal[string] ,
literal[string]
])
identifier[parser] = identifier[argparse] . identifier[ArgumentParser] (
identifier[prog] = literal[string] , identifier[formatter_class] = identifier[argparse] . identifier[RawTextHelpFormatter] ,
identifier[description] = identifier[description] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] ,
identifier[action] = literal[string] )
identifier[parser] . identifier[add_argument] (
literal[string] , identifier[metavar] = literal[string] , identifier[type] = identifier[argparse] . identifier[FileType] ( literal[string] ))
identifier[parser] . identifier[add_argument] (
literal[string] , identifier[metavar] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[int] , identifier[default] = keyword[None] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[int] , identifier[default] = keyword[None] ,
identifier[help] = literal[string] )
identifier[args] = identifier[parser] . identifier[parse_args] ()
keyword[if] identifier[args] . identifier[width] keyword[and] identifier[args] . identifier[height] :
identifier[size] =( identifier[args] . identifier[width] , identifier[args] . identifier[height] )
keyword[else] :
identifier[size] = keyword[None]
keyword[if] keyword[not] identifier[args] . identifier[cw] :
identifier[gaussian_kernel_sigma] = literal[int]
identifier[gaussian_kernel_width] = literal[int]
identifier[gaussian_kernel_1d] = identifier[get_gaussian_kernel] (
identifier[gaussian_kernel_width] , identifier[gaussian_kernel_sigma] )
identifier[comparison_images] = identifier[glob] . identifier[glob] ( identifier[args] . identifier[comparison_images] )
identifier[is_a_single_image] = identifier[len] ( identifier[comparison_images] )== literal[int]
keyword[for] identifier[comparison_image] keyword[in] identifier[comparison_images] :
keyword[if] identifier[args] . identifier[cw] :
identifier[ssim] = identifier[SSIM] ( identifier[args] . identifier[base_image] . identifier[name] , identifier[size] = identifier[size] )
identifier[ssim_value] = identifier[ssim] . identifier[cw_ssim_value] ( identifier[comparison_image] )
keyword[else] :
identifier[ssim] = identifier[SSIM] ( identifier[args] . identifier[base_image] . identifier[name] , identifier[gaussian_kernel_1d] , identifier[size] = identifier[size] )
identifier[ssim_value] = identifier[ssim] . identifier[ssim_value] ( identifier[comparison_image] )
keyword[if] identifier[is_a_single_image] :
identifier[sys] . identifier[stdout] . identifier[write] ( literal[string] % identifier[ssim_value] )
keyword[else] :
identifier[sys] . identifier[stdout] . identifier[write] ( literal[string] %(
identifier[args] . identifier[base_image] . identifier[name] , identifier[comparison_image] , identifier[ssim_value] ))
identifier[sys] . identifier[stdout] . identifier[write] ( literal[string] ) | def main():
"""Main function for pyssim."""
description = '\n'.join(['Compares an image with a list of images using the SSIM metric.', ' Example:', ' pyssim test-images/test1-1.png "test-images/*"'])
parser = argparse.ArgumentParser(prog='pyssim', formatter_class=argparse.RawTextHelpFormatter, description=description)
parser.add_argument('--cw', help='compute the complex wavelet SSIM', action='store_true')
parser.add_argument('base_image', metavar='image1.png', type=argparse.FileType('r'))
parser.add_argument('comparison_images', metavar='image path with* or image2.png')
parser.add_argument('--width', type=int, default=None, help='scales the image before computing SSIM')
parser.add_argument('--height', type=int, default=None, help='scales the image before computing SSIM')
args = parser.parse_args()
if args.width and args.height:
size = (args.width, args.height) # depends on [control=['if'], data=[]]
else:
size = None
if not args.cw:
gaussian_kernel_sigma = 1.5
gaussian_kernel_width = 11
gaussian_kernel_1d = get_gaussian_kernel(gaussian_kernel_width, gaussian_kernel_sigma) # depends on [control=['if'], data=[]]
comparison_images = glob.glob(args.comparison_images)
is_a_single_image = len(comparison_images) == 1
for comparison_image in comparison_images:
if args.cw:
ssim = SSIM(args.base_image.name, size=size)
ssim_value = ssim.cw_ssim_value(comparison_image) # depends on [control=['if'], data=[]]
else:
ssim = SSIM(args.base_image.name, gaussian_kernel_1d, size=size)
ssim_value = ssim.ssim_value(comparison_image)
if is_a_single_image:
sys.stdout.write('%.7g' % ssim_value) # depends on [control=['if'], data=[]]
else:
sys.stdout.write('%s - %s: %.7g' % (args.base_image.name, comparison_image, ssim_value))
sys.stdout.write('\n') # depends on [control=['for'], data=['comparison_image']] |
def fit(self, X, y=None, groups=None):
"""Run fit with all sets of parameters.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training vector, where n_samples is the number of samples and
n_features is the number of features.
y : array-like, shape = [n_samples] or [n_samples, n_output], optional
Target relative to X for classification or regression;
None for unsupervised learning.
groups : array-like, with shape (n_samples,), optional
Group labels for the samples used while splitting the dataset into
train/test set.
"""
return self._fit(X, y, groups, ParameterGrid(self.param_grid)) | def function[fit, parameter[self, X, y, groups]]:
constant[Run fit with all sets of parameters.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training vector, where n_samples is the number of samples and
n_features is the number of features.
y : array-like, shape = [n_samples] or [n_samples, n_output], optional
Target relative to X for classification or regression;
None for unsupervised learning.
groups : array-like, with shape (n_samples,), optional
Group labels for the samples used while splitting the dataset into
train/test set.
]
return[call[name[self]._fit, parameter[name[X], name[y], name[groups], call[name[ParameterGrid], parameter[name[self].param_grid]]]]] | keyword[def] identifier[fit] ( identifier[self] , identifier[X] , identifier[y] = keyword[None] , identifier[groups] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[_fit] ( identifier[X] , identifier[y] , identifier[groups] , identifier[ParameterGrid] ( identifier[self] . identifier[param_grid] )) | def fit(self, X, y=None, groups=None):
"""Run fit with all sets of parameters.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training vector, where n_samples is the number of samples and
n_features is the number of features.
y : array-like, shape = [n_samples] or [n_samples, n_output], optional
Target relative to X for classification or regression;
None for unsupervised learning.
groups : array-like, with shape (n_samples,), optional
Group labels for the samples used while splitting the dataset into
train/test set.
"""
return self._fit(X, y, groups, ParameterGrid(self.param_grid)) |
def windows(iterable, length=2, overlap=0, padding=True):
""" Code snippet from Python Cookbook, 2nd Edition by David Ascher,
Alex Martelli and Anna Ravenscroft; O'Reilly 2005
Problem: You have an iterable s and need to make another iterable whose
items are sublists (i.e., sliding windows), each of the same given length,
over s' items, with successive windows overlapping by a specified amount.
"""
it = iter(iterable)
results = list(itertools.islice(it, length))
while len(results) == length:
yield results
results = results[length-overlap:]
results.extend(itertools.islice(it, length-overlap))
if padding and results:
results.extend(itertools.repeat(None, length-len(results)))
yield results | def function[windows, parameter[iterable, length, overlap, padding]]:
constant[ Code snippet from Python Cookbook, 2nd Edition by David Ascher,
Alex Martelli and Anna Ravenscroft; O'Reilly 2005
Problem: You have an iterable s and need to make another iterable whose
items are sublists (i.e., sliding windows), each of the same given length,
over s' items, with successive windows overlapping by a specified amount.
]
variable[it] assign[=] call[name[iter], parameter[name[iterable]]]
variable[results] assign[=] call[name[list], parameter[call[name[itertools].islice, parameter[name[it], name[length]]]]]
while compare[call[name[len], parameter[name[results]]] equal[==] name[length]] begin[:]
<ast.Yield object at 0x7da20c6a8340>
variable[results] assign[=] call[name[results]][<ast.Slice object at 0x7da20c6ab730>]
call[name[results].extend, parameter[call[name[itertools].islice, parameter[name[it], binary_operation[name[length] - name[overlap]]]]]]
if <ast.BoolOp object at 0x7da20c6ab880> begin[:]
call[name[results].extend, parameter[call[name[itertools].repeat, parameter[constant[None], binary_operation[name[length] - call[name[len], parameter[name[results]]]]]]]]
<ast.Yield object at 0x7da20c6a8be0> | keyword[def] identifier[windows] ( identifier[iterable] , identifier[length] = literal[int] , identifier[overlap] = literal[int] , identifier[padding] = keyword[True] ):
literal[string]
identifier[it] = identifier[iter] ( identifier[iterable] )
identifier[results] = identifier[list] ( identifier[itertools] . identifier[islice] ( identifier[it] , identifier[length] ))
keyword[while] identifier[len] ( identifier[results] )== identifier[length] :
keyword[yield] identifier[results]
identifier[results] = identifier[results] [ identifier[length] - identifier[overlap] :]
identifier[results] . identifier[extend] ( identifier[itertools] . identifier[islice] ( identifier[it] , identifier[length] - identifier[overlap] ))
keyword[if] identifier[padding] keyword[and] identifier[results] :
identifier[results] . identifier[extend] ( identifier[itertools] . identifier[repeat] ( keyword[None] , identifier[length] - identifier[len] ( identifier[results] )))
keyword[yield] identifier[results] | def windows(iterable, length=2, overlap=0, padding=True):
""" Code snippet from Python Cookbook, 2nd Edition by David Ascher,
Alex Martelli and Anna Ravenscroft; O'Reilly 2005
Problem: You have an iterable s and need to make another iterable whose
items are sublists (i.e., sliding windows), each of the same given length,
over s' items, with successive windows overlapping by a specified amount.
"""
it = iter(iterable)
results = list(itertools.islice(it, length))
while len(results) == length:
yield results
results = results[length - overlap:]
results.extend(itertools.islice(it, length - overlap)) # depends on [control=['while'], data=['length']]
if padding and results:
results.extend(itertools.repeat(None, length - len(results)))
yield results # depends on [control=['if'], data=[]] |
def pretty_date(time=False):
"""
Get a datetime object or a int() Epoch timestamp and return a
pretty string like 'an hour ago', 'Yesterday', '3 months ago',
'just now', etc
"""
from datetime import datetime
from django.utils import timezone
now = timezone.now()
if isinstance(time, int):
diff = now - datetime.fromtimestamp(time)
elif isinstance(time, datetime):
diff = now - time
elif not time:
diff = now - now
second_diff = diff.seconds
day_diff = diff.days
if day_diff < 0:
return ''
if day_diff == 0:
if second_diff < 10:
return "just now"
if second_diff < 60:
return str(second_diff) + " seconds ago"
if second_diff < 120:
return "a minute ago"
if second_diff < 3600:
return str(second_diff // 60) + " minutes ago"
if second_diff < 7200:
return "an hour ago"
if second_diff < 86400:
return str(second_diff // 3600) + " hours ago"
if day_diff == 1:
return "Yesterday"
if day_diff < 7:
return str(day_diff) + " days ago"
if day_diff < 31:
return str(day_diff // 7) + " weeks ago"
if day_diff < 365:
return str(day_diff // 30) + " months ago"
return str(day_diff // 365) + " years ago" | def function[pretty_date, parameter[time]]:
constant[
Get a datetime object or a int() Epoch timestamp and return a
pretty string like 'an hour ago', 'Yesterday', '3 months ago',
'just now', etc
]
from relative_module[datetime] import module[datetime]
from relative_module[django.utils] import module[timezone]
variable[now] assign[=] call[name[timezone].now, parameter[]]
if call[name[isinstance], parameter[name[time], name[int]]] begin[:]
variable[diff] assign[=] binary_operation[name[now] - call[name[datetime].fromtimestamp, parameter[name[time]]]]
variable[second_diff] assign[=] name[diff].seconds
variable[day_diff] assign[=] name[diff].days
if compare[name[day_diff] less[<] constant[0]] begin[:]
return[constant[]]
if compare[name[day_diff] equal[==] constant[0]] begin[:]
if compare[name[second_diff] less[<] constant[10]] begin[:]
return[constant[just now]]
if compare[name[second_diff] less[<] constant[60]] begin[:]
return[binary_operation[call[name[str], parameter[name[second_diff]]] + constant[ seconds ago]]]
if compare[name[second_diff] less[<] constant[120]] begin[:]
return[constant[a minute ago]]
if compare[name[second_diff] less[<] constant[3600]] begin[:]
return[binary_operation[call[name[str], parameter[binary_operation[name[second_diff] <ast.FloorDiv object at 0x7da2590d6bc0> constant[60]]]] + constant[ minutes ago]]]
if compare[name[second_diff] less[<] constant[7200]] begin[:]
return[constant[an hour ago]]
if compare[name[second_diff] less[<] constant[86400]] begin[:]
return[binary_operation[call[name[str], parameter[binary_operation[name[second_diff] <ast.FloorDiv object at 0x7da2590d6bc0> constant[3600]]]] + constant[ hours ago]]]
if compare[name[day_diff] equal[==] constant[1]] begin[:]
return[constant[Yesterday]]
if compare[name[day_diff] less[<] constant[7]] begin[:]
return[binary_operation[call[name[str], parameter[name[day_diff]]] + constant[ days ago]]]
if compare[name[day_diff] less[<] constant[31]] begin[:]
return[binary_operation[call[name[str], parameter[binary_operation[name[day_diff] <ast.FloorDiv object at 0x7da2590d6bc0> constant[7]]]] + constant[ weeks ago]]]
if compare[name[day_diff] less[<] constant[365]] begin[:]
return[binary_operation[call[name[str], parameter[binary_operation[name[day_diff] <ast.FloorDiv object at 0x7da2590d6bc0> constant[30]]]] + constant[ months ago]]]
return[binary_operation[call[name[str], parameter[binary_operation[name[day_diff] <ast.FloorDiv object at 0x7da2590d6bc0> constant[365]]]] + constant[ years ago]]] | keyword[def] identifier[pretty_date] ( identifier[time] = keyword[False] ):
literal[string]
keyword[from] identifier[datetime] keyword[import] identifier[datetime]
keyword[from] identifier[django] . identifier[utils] keyword[import] identifier[timezone]
identifier[now] = identifier[timezone] . identifier[now] ()
keyword[if] identifier[isinstance] ( identifier[time] , identifier[int] ):
identifier[diff] = identifier[now] - identifier[datetime] . identifier[fromtimestamp] ( identifier[time] )
keyword[elif] identifier[isinstance] ( identifier[time] , identifier[datetime] ):
identifier[diff] = identifier[now] - identifier[time]
keyword[elif] keyword[not] identifier[time] :
identifier[diff] = identifier[now] - identifier[now]
identifier[second_diff] = identifier[diff] . identifier[seconds]
identifier[day_diff] = identifier[diff] . identifier[days]
keyword[if] identifier[day_diff] < literal[int] :
keyword[return] literal[string]
keyword[if] identifier[day_diff] == literal[int] :
keyword[if] identifier[second_diff] < literal[int] :
keyword[return] literal[string]
keyword[if] identifier[second_diff] < literal[int] :
keyword[return] identifier[str] ( identifier[second_diff] )+ literal[string]
keyword[if] identifier[second_diff] < literal[int] :
keyword[return] literal[string]
keyword[if] identifier[second_diff] < literal[int] :
keyword[return] identifier[str] ( identifier[second_diff] // literal[int] )+ literal[string]
keyword[if] identifier[second_diff] < literal[int] :
keyword[return] literal[string]
keyword[if] identifier[second_diff] < literal[int] :
keyword[return] identifier[str] ( identifier[second_diff] // literal[int] )+ literal[string]
keyword[if] identifier[day_diff] == literal[int] :
keyword[return] literal[string]
keyword[if] identifier[day_diff] < literal[int] :
keyword[return] identifier[str] ( identifier[day_diff] )+ literal[string]
keyword[if] identifier[day_diff] < literal[int] :
keyword[return] identifier[str] ( identifier[day_diff] // literal[int] )+ literal[string]
keyword[if] identifier[day_diff] < literal[int] :
keyword[return] identifier[str] ( identifier[day_diff] // literal[int] )+ literal[string]
keyword[return] identifier[str] ( identifier[day_diff] // literal[int] )+ literal[string] | def pretty_date(time=False):
"""
Get a datetime object or a int() Epoch timestamp and return a
pretty string like 'an hour ago', 'Yesterday', '3 months ago',
'just now', etc
"""
from datetime import datetime
from django.utils import timezone
now = timezone.now()
if isinstance(time, int):
diff = now - datetime.fromtimestamp(time) # depends on [control=['if'], data=[]]
elif isinstance(time, datetime):
diff = now - time # depends on [control=['if'], data=[]]
elif not time:
diff = now - now # depends on [control=['if'], data=[]]
second_diff = diff.seconds
day_diff = diff.days
if day_diff < 0:
return '' # depends on [control=['if'], data=[]]
if day_diff == 0:
if second_diff < 10:
return 'just now' # depends on [control=['if'], data=[]]
if second_diff < 60:
return str(second_diff) + ' seconds ago' # depends on [control=['if'], data=['second_diff']]
if second_diff < 120:
return 'a minute ago' # depends on [control=['if'], data=[]]
if second_diff < 3600:
return str(second_diff // 60) + ' minutes ago' # depends on [control=['if'], data=['second_diff']]
if second_diff < 7200:
return 'an hour ago' # depends on [control=['if'], data=[]]
if second_diff < 86400:
return str(second_diff // 3600) + ' hours ago' # depends on [control=['if'], data=['second_diff']] # depends on [control=['if'], data=[]]
if day_diff == 1:
return 'Yesterday' # depends on [control=['if'], data=[]]
if day_diff < 7:
return str(day_diff) + ' days ago' # depends on [control=['if'], data=['day_diff']]
if day_diff < 31:
return str(day_diff // 7) + ' weeks ago' # depends on [control=['if'], data=['day_diff']]
if day_diff < 365:
return str(day_diff // 30) + ' months ago' # depends on [control=['if'], data=['day_diff']]
return str(day_diff // 365) + ' years ago' |
def convert_batchnorm(params, w_name, scope_name, inputs, layers, weights, names):
"""
Convert batch normalization layer.
Args:
params: dictionary with layer parameters
w_name: name prefix in state_dict
scope_name: pytorch scope name
inputs: pytorch node inputs
layers: dictionary with keras tensors
weights: pytorch state_dict
names: use short names for keras layers
"""
print('Converting batchnorm ...')
if names == 'short':
tf_name = 'BN' + random_string(6)
elif names == 'keep':
tf_name = w_name
else:
tf_name = w_name + str(random.random())
bias_name = '{0}.bias'.format(w_name)
weights_name = '{0}.weight'.format(w_name)
mean_name = '{0}.running_mean'.format(w_name)
var_name = '{0}.running_var'.format(w_name)
if bias_name in weights:
beta = weights[bias_name].numpy()
if weights_name in weights:
gamma = weights[weights_name].numpy()
mean = weights[mean_name].numpy()
variance = weights[var_name].numpy()
eps = params['epsilon']
momentum = params['momentum']
if weights_name not in weights:
bn = keras.layers.BatchNormalization(
axis=1, momentum=momentum, epsilon=eps,
center=False, scale=False,
weights=[mean, variance],
name=tf_name
)
else:
bn = keras.layers.BatchNormalization(
axis=1, momentum=momentum, epsilon=eps,
weights=[gamma, beta, mean, variance],
name=tf_name
)
layers[scope_name] = bn(layers[inputs[0]]) | def function[convert_batchnorm, parameter[params, w_name, scope_name, inputs, layers, weights, names]]:
constant[
Convert batch normalization layer.
Args:
params: dictionary with layer parameters
w_name: name prefix in state_dict
scope_name: pytorch scope name
inputs: pytorch node inputs
layers: dictionary with keras tensors
weights: pytorch state_dict
names: use short names for keras layers
]
call[name[print], parameter[constant[Converting batchnorm ...]]]
if compare[name[names] equal[==] constant[short]] begin[:]
variable[tf_name] assign[=] binary_operation[constant[BN] + call[name[random_string], parameter[constant[6]]]]
variable[bias_name] assign[=] call[constant[{0}.bias].format, parameter[name[w_name]]]
variable[weights_name] assign[=] call[constant[{0}.weight].format, parameter[name[w_name]]]
variable[mean_name] assign[=] call[constant[{0}.running_mean].format, parameter[name[w_name]]]
variable[var_name] assign[=] call[constant[{0}.running_var].format, parameter[name[w_name]]]
if compare[name[bias_name] in name[weights]] begin[:]
variable[beta] assign[=] call[call[name[weights]][name[bias_name]].numpy, parameter[]]
if compare[name[weights_name] in name[weights]] begin[:]
variable[gamma] assign[=] call[call[name[weights]][name[weights_name]].numpy, parameter[]]
variable[mean] assign[=] call[call[name[weights]][name[mean_name]].numpy, parameter[]]
variable[variance] assign[=] call[call[name[weights]][name[var_name]].numpy, parameter[]]
variable[eps] assign[=] call[name[params]][constant[epsilon]]
variable[momentum] assign[=] call[name[params]][constant[momentum]]
if compare[name[weights_name] <ast.NotIn object at 0x7da2590d7190> name[weights]] begin[:]
variable[bn] assign[=] call[name[keras].layers.BatchNormalization, parameter[]]
call[name[layers]][name[scope_name]] assign[=] call[name[bn], parameter[call[name[layers]][call[name[inputs]][constant[0]]]]] | keyword[def] identifier[convert_batchnorm] ( identifier[params] , identifier[w_name] , identifier[scope_name] , identifier[inputs] , identifier[layers] , identifier[weights] , identifier[names] ):
literal[string]
identifier[print] ( literal[string] )
keyword[if] identifier[names] == literal[string] :
identifier[tf_name] = literal[string] + identifier[random_string] ( literal[int] )
keyword[elif] identifier[names] == literal[string] :
identifier[tf_name] = identifier[w_name]
keyword[else] :
identifier[tf_name] = identifier[w_name] + identifier[str] ( identifier[random] . identifier[random] ())
identifier[bias_name] = literal[string] . identifier[format] ( identifier[w_name] )
identifier[weights_name] = literal[string] . identifier[format] ( identifier[w_name] )
identifier[mean_name] = literal[string] . identifier[format] ( identifier[w_name] )
identifier[var_name] = literal[string] . identifier[format] ( identifier[w_name] )
keyword[if] identifier[bias_name] keyword[in] identifier[weights] :
identifier[beta] = identifier[weights] [ identifier[bias_name] ]. identifier[numpy] ()
keyword[if] identifier[weights_name] keyword[in] identifier[weights] :
identifier[gamma] = identifier[weights] [ identifier[weights_name] ]. identifier[numpy] ()
identifier[mean] = identifier[weights] [ identifier[mean_name] ]. identifier[numpy] ()
identifier[variance] = identifier[weights] [ identifier[var_name] ]. identifier[numpy] ()
identifier[eps] = identifier[params] [ literal[string] ]
identifier[momentum] = identifier[params] [ literal[string] ]
keyword[if] identifier[weights_name] keyword[not] keyword[in] identifier[weights] :
identifier[bn] = identifier[keras] . identifier[layers] . identifier[BatchNormalization] (
identifier[axis] = literal[int] , identifier[momentum] = identifier[momentum] , identifier[epsilon] = identifier[eps] ,
identifier[center] = keyword[False] , identifier[scale] = keyword[False] ,
identifier[weights] =[ identifier[mean] , identifier[variance] ],
identifier[name] = identifier[tf_name]
)
keyword[else] :
identifier[bn] = identifier[keras] . identifier[layers] . identifier[BatchNormalization] (
identifier[axis] = literal[int] , identifier[momentum] = identifier[momentum] , identifier[epsilon] = identifier[eps] ,
identifier[weights] =[ identifier[gamma] , identifier[beta] , identifier[mean] , identifier[variance] ],
identifier[name] = identifier[tf_name]
)
identifier[layers] [ identifier[scope_name] ]= identifier[bn] ( identifier[layers] [ identifier[inputs] [ literal[int] ]]) | def convert_batchnorm(params, w_name, scope_name, inputs, layers, weights, names):
"""
Convert batch normalization layer.
Args:
params: dictionary with layer parameters
w_name: name prefix in state_dict
scope_name: pytorch scope name
inputs: pytorch node inputs
layers: dictionary with keras tensors
weights: pytorch state_dict
names: use short names for keras layers
"""
print('Converting batchnorm ...')
if names == 'short':
tf_name = 'BN' + random_string(6) # depends on [control=['if'], data=[]]
elif names == 'keep':
tf_name = w_name # depends on [control=['if'], data=[]]
else:
tf_name = w_name + str(random.random())
bias_name = '{0}.bias'.format(w_name)
weights_name = '{0}.weight'.format(w_name)
mean_name = '{0}.running_mean'.format(w_name)
var_name = '{0}.running_var'.format(w_name)
if bias_name in weights:
beta = weights[bias_name].numpy() # depends on [control=['if'], data=['bias_name', 'weights']]
if weights_name in weights:
gamma = weights[weights_name].numpy() # depends on [control=['if'], data=['weights_name', 'weights']]
mean = weights[mean_name].numpy()
variance = weights[var_name].numpy()
eps = params['epsilon']
momentum = params['momentum']
if weights_name not in weights:
bn = keras.layers.BatchNormalization(axis=1, momentum=momentum, epsilon=eps, center=False, scale=False, weights=[mean, variance], name=tf_name) # depends on [control=['if'], data=[]]
else:
bn = keras.layers.BatchNormalization(axis=1, momentum=momentum, epsilon=eps, weights=[gamma, beta, mean, variance], name=tf_name)
layers[scope_name] = bn(layers[inputs[0]]) |
def command_err(self, code=1, errmsg='MockupDB command failure',
*args, **kwargs):
"""Error reply to a command.
Returns True so it is suitable as an `~MockupDB.autoresponds` handler.
"""
kwargs.setdefault('ok', 0)
kwargs['code'] = code
kwargs['errmsg'] = errmsg
self.replies(*args, **kwargs)
return True | def function[command_err, parameter[self, code, errmsg]]:
constant[Error reply to a command.
Returns True so it is suitable as an `~MockupDB.autoresponds` handler.
]
call[name[kwargs].setdefault, parameter[constant[ok], constant[0]]]
call[name[kwargs]][constant[code]] assign[=] name[code]
call[name[kwargs]][constant[errmsg]] assign[=] name[errmsg]
call[name[self].replies, parameter[<ast.Starred object at 0x7da20c7c8fd0>]]
return[constant[True]] | keyword[def] identifier[command_err] ( identifier[self] , identifier[code] = literal[int] , identifier[errmsg] = literal[string] ,
* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] . identifier[setdefault] ( literal[string] , literal[int] )
identifier[kwargs] [ literal[string] ]= identifier[code]
identifier[kwargs] [ literal[string] ]= identifier[errmsg]
identifier[self] . identifier[replies] (* identifier[args] ,** identifier[kwargs] )
keyword[return] keyword[True] | def command_err(self, code=1, errmsg='MockupDB command failure', *args, **kwargs):
"""Error reply to a command.
Returns True so it is suitable as an `~MockupDB.autoresponds` handler.
"""
kwargs.setdefault('ok', 0)
kwargs['code'] = code
kwargs['errmsg'] = errmsg
self.replies(*args, **kwargs)
return True |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.