code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def main():
rollbar.init('ACCESS_TOKEN', environment='test', handler='twisted')
"""This runs the protocol on port 8000"""
factory = protocol.ServerFactory()
factory.protocol = Echo
reactor.listenTCP(8000, factory)
reactor.run() | def function[main, parameter[]]:
call[name[rollbar].init, parameter[constant[ACCESS_TOKEN]]]
constant[This runs the protocol on port 8000]
variable[factory] assign[=] call[name[protocol].ServerFactory, parameter[]]
name[factory].protocol assign[=] name[Echo]
call[name[reactor].listenTCP, parameter[constant[8000], name[factory]]]
call[name[reactor].run, parameter[]] | keyword[def] identifier[main] ():
identifier[rollbar] . identifier[init] ( literal[string] , identifier[environment] = literal[string] , identifier[handler] = literal[string] )
literal[string]
identifier[factory] = identifier[protocol] . identifier[ServerFactory] ()
identifier[factory] . identifier[protocol] = identifier[Echo]
identifier[reactor] . identifier[listenTCP] ( literal[int] , identifier[factory] )
identifier[reactor] . identifier[run] () | def main():
rollbar.init('ACCESS_TOKEN', environment='test', handler='twisted')
'This runs the protocol on port 8000'
factory = protocol.ServerFactory()
factory.protocol = Echo
reactor.listenTCP(8000, factory)
reactor.run() |
def createissue(self, project_id, title, **kwargs):
"""
Create a new issue
:param project_id: project id
:param title: title of the issue
:return: dict with the issue created
"""
data = {'id': id, 'title': title}
if kwargs:
data.update(kwargs)
request = requests.post(
'{0}/{1}/issues'.format(self.projects_url, project_id),
headers=self.headers, data=data, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 201:
return request.json()
else:
return False | def function[createissue, parameter[self, project_id, title]]:
constant[
Create a new issue
:param project_id: project id
:param title: title of the issue
:return: dict with the issue created
]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b28b41c0>, <ast.Constant object at 0x7da1b28b5930>], [<ast.Name object at 0x7da1b28b7cd0>, <ast.Name object at 0x7da1b28b7d30>]]
if name[kwargs] begin[:]
call[name[data].update, parameter[name[kwargs]]]
variable[request] assign[=] call[name[requests].post, parameter[call[constant[{0}/{1}/issues].format, parameter[name[self].projects_url, name[project_id]]]]]
if compare[name[request].status_code equal[==] constant[201]] begin[:]
return[call[name[request].json, parameter[]]] | keyword[def] identifier[createissue] ( identifier[self] , identifier[project_id] , identifier[title] ,** identifier[kwargs] ):
literal[string]
identifier[data] ={ literal[string] : identifier[id] , literal[string] : identifier[title] }
keyword[if] identifier[kwargs] :
identifier[data] . identifier[update] ( identifier[kwargs] )
identifier[request] = identifier[requests] . identifier[post] (
literal[string] . identifier[format] ( identifier[self] . identifier[projects_url] , identifier[project_id] ),
identifier[headers] = identifier[self] . identifier[headers] , identifier[data] = identifier[data] , identifier[verify] = identifier[self] . identifier[verify_ssl] , identifier[auth] = identifier[self] . identifier[auth] , identifier[timeout] = identifier[self] . identifier[timeout] )
keyword[if] identifier[request] . identifier[status_code] == literal[int] :
keyword[return] identifier[request] . identifier[json] ()
keyword[else] :
keyword[return] keyword[False] | def createissue(self, project_id, title, **kwargs):
"""
Create a new issue
:param project_id: project id
:param title: title of the issue
:return: dict with the issue created
"""
data = {'id': id, 'title': title}
if kwargs:
data.update(kwargs) # depends on [control=['if'], data=[]]
request = requests.post('{0}/{1}/issues'.format(self.projects_url, project_id), headers=self.headers, data=data, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 201:
return request.json() # depends on [control=['if'], data=[]]
else:
return False |
def recompute_data(self, ui):
"""Compute the data associated with this processor.
This method is thread safe and may take a long time to return. It should not be called from
the UI thread. Upon return, the results will be calculated with the latest data available
and the cache will not be marked dirty.
"""
self.__initialize_cache()
with self.__recompute_lock:
if self.__cached_value_dirty:
try:
calculated_data = self.get_calculated_data(ui)
except Exception as e:
import traceback
traceback.print_exc()
traceback.print_stack()
raise
self.__cache.set_cached_value(self.__display_item, self.__cache_property_name, calculated_data)
self.__cached_value = calculated_data
self.__cached_value_dirty = False
self.__cached_value_time = time.time()
else:
calculated_data = None
if calculated_data is None:
calculated_data = self.get_default_data()
if calculated_data is not None:
# if the default is not None, treat is as valid cached data
self.__cache.set_cached_value(self.__display_item, self.__cache_property_name, calculated_data)
self.__cached_value = calculated_data
self.__cached_value_dirty = False
self.__cached_value_time = time.time()
else:
# otherwise remove everything from the cache
self.__cache.remove_cached_value(self.__display_item, self.__cache_property_name)
self.__cached_value = None
self.__cached_value_dirty = None
self.__cached_value_time = 0
self.__recompute_lock.release()
if callable(self.on_thumbnail_updated):
self.on_thumbnail_updated()
self.__recompute_lock.acquire() | def function[recompute_data, parameter[self, ui]]:
constant[Compute the data associated with this processor.
This method is thread safe and may take a long time to return. It should not be called from
the UI thread. Upon return, the results will be calculated with the latest data available
and the cache will not be marked dirty.
]
call[name[self].__initialize_cache, parameter[]]
with name[self].__recompute_lock begin[:]
if name[self].__cached_value_dirty begin[:]
<ast.Try object at 0x7da18eb56650>
call[name[self].__cache.set_cached_value, parameter[name[self].__display_item, name[self].__cache_property_name, name[calculated_data]]]
name[self].__cached_value assign[=] name[calculated_data]
name[self].__cached_value_dirty assign[=] constant[False]
name[self].__cached_value_time assign[=] call[name[time].time, parameter[]]
if compare[name[calculated_data] is constant[None]] begin[:]
variable[calculated_data] assign[=] call[name[self].get_default_data, parameter[]]
if compare[name[calculated_data] is_not constant[None]] begin[:]
call[name[self].__cache.set_cached_value, parameter[name[self].__display_item, name[self].__cache_property_name, name[calculated_data]]]
name[self].__cached_value assign[=] name[calculated_data]
name[self].__cached_value_dirty assign[=] constant[False]
name[self].__cached_value_time assign[=] call[name[time].time, parameter[]]
call[name[self].__recompute_lock.release, parameter[]]
if call[name[callable], parameter[name[self].on_thumbnail_updated]] begin[:]
call[name[self].on_thumbnail_updated, parameter[]]
call[name[self].__recompute_lock.acquire, parameter[]] | keyword[def] identifier[recompute_data] ( identifier[self] , identifier[ui] ):
literal[string]
identifier[self] . identifier[__initialize_cache] ()
keyword[with] identifier[self] . identifier[__recompute_lock] :
keyword[if] identifier[self] . identifier[__cached_value_dirty] :
keyword[try] :
identifier[calculated_data] = identifier[self] . identifier[get_calculated_data] ( identifier[ui] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[import] identifier[traceback]
identifier[traceback] . identifier[print_exc] ()
identifier[traceback] . identifier[print_stack] ()
keyword[raise]
identifier[self] . identifier[__cache] . identifier[set_cached_value] ( identifier[self] . identifier[__display_item] , identifier[self] . identifier[__cache_property_name] , identifier[calculated_data] )
identifier[self] . identifier[__cached_value] = identifier[calculated_data]
identifier[self] . identifier[__cached_value_dirty] = keyword[False]
identifier[self] . identifier[__cached_value_time] = identifier[time] . identifier[time] ()
keyword[else] :
identifier[calculated_data] = keyword[None]
keyword[if] identifier[calculated_data] keyword[is] keyword[None] :
identifier[calculated_data] = identifier[self] . identifier[get_default_data] ()
keyword[if] identifier[calculated_data] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[__cache] . identifier[set_cached_value] ( identifier[self] . identifier[__display_item] , identifier[self] . identifier[__cache_property_name] , identifier[calculated_data] )
identifier[self] . identifier[__cached_value] = identifier[calculated_data]
identifier[self] . identifier[__cached_value_dirty] = keyword[False]
identifier[self] . identifier[__cached_value_time] = identifier[time] . identifier[time] ()
keyword[else] :
identifier[self] . identifier[__cache] . identifier[remove_cached_value] ( identifier[self] . identifier[__display_item] , identifier[self] . identifier[__cache_property_name] )
identifier[self] . identifier[__cached_value] = keyword[None]
identifier[self] . identifier[__cached_value_dirty] = keyword[None]
identifier[self] . identifier[__cached_value_time] = literal[int]
identifier[self] . identifier[__recompute_lock] . identifier[release] ()
keyword[if] identifier[callable] ( identifier[self] . identifier[on_thumbnail_updated] ):
identifier[self] . identifier[on_thumbnail_updated] ()
identifier[self] . identifier[__recompute_lock] . identifier[acquire] () | def recompute_data(self, ui):
"""Compute the data associated with this processor.
This method is thread safe and may take a long time to return. It should not be called from
the UI thread. Upon return, the results will be calculated with the latest data available
and the cache will not be marked dirty.
"""
self.__initialize_cache()
with self.__recompute_lock:
if self.__cached_value_dirty:
try:
calculated_data = self.get_calculated_data(ui) # depends on [control=['try'], data=[]]
except Exception as e:
import traceback
traceback.print_exc()
traceback.print_stack()
raise # depends on [control=['except'], data=[]]
self.__cache.set_cached_value(self.__display_item, self.__cache_property_name, calculated_data)
self.__cached_value = calculated_data
self.__cached_value_dirty = False
self.__cached_value_time = time.time() # depends on [control=['if'], data=[]]
else:
calculated_data = None
if calculated_data is None:
calculated_data = self.get_default_data()
if calculated_data is not None:
# if the default is not None, treat is as valid cached data
self.__cache.set_cached_value(self.__display_item, self.__cache_property_name, calculated_data)
self.__cached_value = calculated_data
self.__cached_value_dirty = False
self.__cached_value_time = time.time() # depends on [control=['if'], data=['calculated_data']]
else:
# otherwise remove everything from the cache
self.__cache.remove_cached_value(self.__display_item, self.__cache_property_name)
self.__cached_value = None
self.__cached_value_dirty = None
self.__cached_value_time = 0 # depends on [control=['if'], data=['calculated_data']]
self.__recompute_lock.release()
if callable(self.on_thumbnail_updated):
self.on_thumbnail_updated() # depends on [control=['if'], data=[]]
self.__recompute_lock.acquire() # depends on [control=['with'], data=[]] |
def _do_report(self, report, in_port, msg):
"""the process when the querier received a REPORT message."""
datapath = msg.datapath
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
if ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION:
size = 65535
else:
size = ofproto.OFPCML_MAX
update = False
self._mcast.setdefault(report.address, {})
if in_port not in self._mcast[report.address]:
update = True
self._mcast[report.address][in_port] = True
if update:
actions = []
for port in self._mcast[report.address]:
actions.append(parser.OFPActionOutput(port))
self._set_flow_entry(
datapath, actions, self.server_port, report.address)
self._set_flow_entry(
datapath,
[parser.OFPActionOutput(ofproto.OFPP_CONTROLLER, size)],
in_port, report.address) | def function[_do_report, parameter[self, report, in_port, msg]]:
constant[the process when the querier received a REPORT message.]
variable[datapath] assign[=] name[msg].datapath
variable[ofproto] assign[=] name[datapath].ofproto
variable[parser] assign[=] name[datapath].ofproto_parser
if compare[name[ofproto].OFP_VERSION equal[==] name[ofproto_v1_0].OFP_VERSION] begin[:]
variable[size] assign[=] constant[65535]
variable[update] assign[=] constant[False]
call[name[self]._mcast.setdefault, parameter[name[report].address, dictionary[[], []]]]
if compare[name[in_port] <ast.NotIn object at 0x7da2590d7190> call[name[self]._mcast][name[report].address]] begin[:]
variable[update] assign[=] constant[True]
call[call[name[self]._mcast][name[report].address]][name[in_port]] assign[=] constant[True]
if name[update] begin[:]
variable[actions] assign[=] list[[]]
for taget[name[port]] in starred[call[name[self]._mcast][name[report].address]] begin[:]
call[name[actions].append, parameter[call[name[parser].OFPActionOutput, parameter[name[port]]]]]
call[name[self]._set_flow_entry, parameter[name[datapath], name[actions], name[self].server_port, name[report].address]]
call[name[self]._set_flow_entry, parameter[name[datapath], list[[<ast.Call object at 0x7da1b1b3efe0>]], name[in_port], name[report].address]] | keyword[def] identifier[_do_report] ( identifier[self] , identifier[report] , identifier[in_port] , identifier[msg] ):
literal[string]
identifier[datapath] = identifier[msg] . identifier[datapath]
identifier[ofproto] = identifier[datapath] . identifier[ofproto]
identifier[parser] = identifier[datapath] . identifier[ofproto_parser]
keyword[if] identifier[ofproto] . identifier[OFP_VERSION] == identifier[ofproto_v1_0] . identifier[OFP_VERSION] :
identifier[size] = literal[int]
keyword[else] :
identifier[size] = identifier[ofproto] . identifier[OFPCML_MAX]
identifier[update] = keyword[False]
identifier[self] . identifier[_mcast] . identifier[setdefault] ( identifier[report] . identifier[address] ,{})
keyword[if] identifier[in_port] keyword[not] keyword[in] identifier[self] . identifier[_mcast] [ identifier[report] . identifier[address] ]:
identifier[update] = keyword[True]
identifier[self] . identifier[_mcast] [ identifier[report] . identifier[address] ][ identifier[in_port] ]= keyword[True]
keyword[if] identifier[update] :
identifier[actions] =[]
keyword[for] identifier[port] keyword[in] identifier[self] . identifier[_mcast] [ identifier[report] . identifier[address] ]:
identifier[actions] . identifier[append] ( identifier[parser] . identifier[OFPActionOutput] ( identifier[port] ))
identifier[self] . identifier[_set_flow_entry] (
identifier[datapath] , identifier[actions] , identifier[self] . identifier[server_port] , identifier[report] . identifier[address] )
identifier[self] . identifier[_set_flow_entry] (
identifier[datapath] ,
[ identifier[parser] . identifier[OFPActionOutput] ( identifier[ofproto] . identifier[OFPP_CONTROLLER] , identifier[size] )],
identifier[in_port] , identifier[report] . identifier[address] ) | def _do_report(self, report, in_port, msg):
"""the process when the querier received a REPORT message."""
datapath = msg.datapath
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
if ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION:
size = 65535 # depends on [control=['if'], data=[]]
else:
size = ofproto.OFPCML_MAX
update = False
self._mcast.setdefault(report.address, {})
if in_port not in self._mcast[report.address]:
update = True # depends on [control=['if'], data=[]]
self._mcast[report.address][in_port] = True
if update:
actions = []
for port in self._mcast[report.address]:
actions.append(parser.OFPActionOutput(port)) # depends on [control=['for'], data=['port']]
self._set_flow_entry(datapath, actions, self.server_port, report.address)
self._set_flow_entry(datapath, [parser.OFPActionOutput(ofproto.OFPP_CONTROLLER, size)], in_port, report.address) # depends on [control=['if'], data=[]] |
def visit_DictComp(self, node: AST, dfltChaining: bool = True) -> str:
"""Return `node`s representation as dict comprehension."""
return f"{{{self.visit(node.key)}: {self.visit(node.value)} " \
f"{' '.join(self.visit(gen) for gen in node.generators)}}}" | def function[visit_DictComp, parameter[self, node, dfltChaining]]:
constant[Return `node`s representation as dict comprehension.]
return[<ast.JoinedStr object at 0x7da1b2873940>] | keyword[def] identifier[visit_DictComp] ( identifier[self] , identifier[node] : identifier[AST] , identifier[dfltChaining] : identifier[bool] = keyword[True] )-> identifier[str] :
literal[string]
keyword[return] literal[string] literal[string] | def visit_DictComp(self, node: AST, dfltChaining: bool=True) -> str:
"""Return `node`s representation as dict comprehension."""
return f"{{{self.visit(node.key)}: {self.visit(node.value)} {' '.join((self.visit(gen) for gen in node.generators))}}}" |
def _spoken_representation_L1(lst_lst_char):
"""
>>> lst = [['M', 'O', 'R', 'S', 'E'], ['C', 'O', 'D', 'E']]
>>> _spoken_representation_L1(lst)
'M O R S E C O D E'
>>> lst = [[], ['M', 'O', 'R', 'S', 'E'], ['C', 'O', 'D', 'E']]
>>> _spoken_representation_L1(lst)
' M O R S E C O D E'
"""
s = ''
inter_char = ' '
inter_word = inter_char * 9
for i, word in enumerate(lst_lst_char):
if i >= 1:
s += inter_word
for j, c in enumerate(word):
if j != 0:
s += inter_char
s += _char_to_string_morse(c)
return s | def function[_spoken_representation_L1, parameter[lst_lst_char]]:
constant[
>>> lst = [['M', 'O', 'R', 'S', 'E'], ['C', 'O', 'D', 'E']]
>>> _spoken_representation_L1(lst)
'M O R S E C O D E'
>>> lst = [[], ['M', 'O', 'R', 'S', 'E'], ['C', 'O', 'D', 'E']]
>>> _spoken_representation_L1(lst)
' M O R S E C O D E'
]
variable[s] assign[=] constant[]
variable[inter_char] assign[=] constant[ ]
variable[inter_word] assign[=] binary_operation[name[inter_char] * constant[9]]
for taget[tuple[[<ast.Name object at 0x7da20e954760>, <ast.Name object at 0x7da20e957b80>]]] in starred[call[name[enumerate], parameter[name[lst_lst_char]]]] begin[:]
if compare[name[i] greater_or_equal[>=] constant[1]] begin[:]
<ast.AugAssign object at 0x7da20e955a50>
for taget[tuple[[<ast.Name object at 0x7da1b068bb20>, <ast.Name object at 0x7da1b0688d90>]]] in starred[call[name[enumerate], parameter[name[word]]]] begin[:]
if compare[name[j] not_equal[!=] constant[0]] begin[:]
<ast.AugAssign object at 0x7da1b06885e0>
<ast.AugAssign object at 0x7da1b068ab30>
return[name[s]] | keyword[def] identifier[_spoken_representation_L1] ( identifier[lst_lst_char] ):
literal[string]
identifier[s] = literal[string]
identifier[inter_char] = literal[string]
identifier[inter_word] = identifier[inter_char] * literal[int]
keyword[for] identifier[i] , identifier[word] keyword[in] identifier[enumerate] ( identifier[lst_lst_char] ):
keyword[if] identifier[i] >= literal[int] :
identifier[s] += identifier[inter_word]
keyword[for] identifier[j] , identifier[c] keyword[in] identifier[enumerate] ( identifier[word] ):
keyword[if] identifier[j] != literal[int] :
identifier[s] += identifier[inter_char]
identifier[s] += identifier[_char_to_string_morse] ( identifier[c] )
keyword[return] identifier[s] | def _spoken_representation_L1(lst_lst_char):
"""
>>> lst = [['M', 'O', 'R', 'S', 'E'], ['C', 'O', 'D', 'E']]
>>> _spoken_representation_L1(lst)
'M O R S E C O D E'
>>> lst = [[], ['M', 'O', 'R', 'S', 'E'], ['C', 'O', 'D', 'E']]
>>> _spoken_representation_L1(lst)
' M O R S E C O D E'
"""
s = ''
inter_char = ' '
inter_word = inter_char * 9
for (i, word) in enumerate(lst_lst_char):
if i >= 1:
s += inter_word # depends on [control=['if'], data=[]]
for (j, c) in enumerate(word):
if j != 0:
s += inter_char # depends on [control=['if'], data=[]]
s += _char_to_string_morse(c) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
return s |
def _set_show_mpls_rsvp_statistics(self, v, load=False):
"""
Setter method for show_mpls_rsvp_statistics, mapped from YANG variable /brocade_mpls_rpc/show_mpls_rsvp_statistics (rpc)
If this variable is read-only (config: false) in the
source YANG file, then _set_show_mpls_rsvp_statistics is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_show_mpls_rsvp_statistics() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=show_mpls_rsvp_statistics.show_mpls_rsvp_statistics, is_leaf=True, yang_name="show-mpls-rsvp-statistics", rest_name="show-mpls-rsvp-statistics", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'rpccmd', u'actionpoint': u'showMplsRsvpStatistics'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='rpc', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """show_mpls_rsvp_statistics must be of a type compatible with rpc""",
'defined-type': "rpc",
'generated-type': """YANGDynClass(base=show_mpls_rsvp_statistics.show_mpls_rsvp_statistics, is_leaf=True, yang_name="show-mpls-rsvp-statistics", rest_name="show-mpls-rsvp-statistics", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'rpccmd', u'actionpoint': u'showMplsRsvpStatistics'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='rpc', is_config=True)""",
})
self.__show_mpls_rsvp_statistics = t
if hasattr(self, '_set'):
self._set() | def function[_set_show_mpls_rsvp_statistics, parameter[self, v, load]]:
constant[
Setter method for show_mpls_rsvp_statistics, mapped from YANG variable /brocade_mpls_rpc/show_mpls_rsvp_statistics (rpc)
If this variable is read-only (config: false) in the
source YANG file, then _set_show_mpls_rsvp_statistics is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_show_mpls_rsvp_statistics() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da1b26af370>
name[self].__show_mpls_rsvp_statistics assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_show_mpls_rsvp_statistics] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[show_mpls_rsvp_statistics] . identifier[show_mpls_rsvp_statistics] , identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[False] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__show_mpls_rsvp_statistics] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_show_mpls_rsvp_statistics(self, v, load=False):
"""
Setter method for show_mpls_rsvp_statistics, mapped from YANG variable /brocade_mpls_rpc/show_mpls_rsvp_statistics (rpc)
If this variable is read-only (config: false) in the
source YANG file, then _set_show_mpls_rsvp_statistics is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_show_mpls_rsvp_statistics() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=show_mpls_rsvp_statistics.show_mpls_rsvp_statistics, is_leaf=True, yang_name='show-mpls-rsvp-statistics', rest_name='show-mpls-rsvp-statistics', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'rpccmd', u'actionpoint': u'showMplsRsvpStatistics'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='rpc', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'show_mpls_rsvp_statistics must be of a type compatible with rpc', 'defined-type': 'rpc', 'generated-type': 'YANGDynClass(base=show_mpls_rsvp_statistics.show_mpls_rsvp_statistics, is_leaf=True, yang_name="show-mpls-rsvp-statistics", rest_name="show-mpls-rsvp-statistics", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u\'tailf-common\': {u\'hidden\': u\'rpccmd\', u\'actionpoint\': u\'showMplsRsvpStatistics\'}}, namespace=\'urn:brocade.com:mgmt:brocade-mpls\', defining_module=\'brocade-mpls\', yang_type=\'rpc\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__show_mpls_rsvp_statistics = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def _enter_namespace(self, namespace_name):
"""
A namespace is usually an absolute file name of the grammar.
A special namespace '__base__' is used for BASETYPE namespace.
"""
if namespace_name not in self.namespaces:
self.namespaces[namespace_name] = {}
# BASETYPE namespace is imported in each namespace
# as the first namespace to be searched.
self._imported_namespaces[namespace_name] = \
[self.namespaces['__base__']]
self._namespace_stack.append(namespace_name) | def function[_enter_namespace, parameter[self, namespace_name]]:
constant[
A namespace is usually an absolute file name of the grammar.
A special namespace '__base__' is used for BASETYPE namespace.
]
if compare[name[namespace_name] <ast.NotIn object at 0x7da2590d7190> name[self].namespaces] begin[:]
call[name[self].namespaces][name[namespace_name]] assign[=] dictionary[[], []]
call[name[self]._imported_namespaces][name[namespace_name]] assign[=] list[[<ast.Subscript object at 0x7da18eb54760>]]
call[name[self]._namespace_stack.append, parameter[name[namespace_name]]] | keyword[def] identifier[_enter_namespace] ( identifier[self] , identifier[namespace_name] ):
literal[string]
keyword[if] identifier[namespace_name] keyword[not] keyword[in] identifier[self] . identifier[namespaces] :
identifier[self] . identifier[namespaces] [ identifier[namespace_name] ]={}
identifier[self] . identifier[_imported_namespaces] [ identifier[namespace_name] ]=[ identifier[self] . identifier[namespaces] [ literal[string] ]]
identifier[self] . identifier[_namespace_stack] . identifier[append] ( identifier[namespace_name] ) | def _enter_namespace(self, namespace_name):
"""
A namespace is usually an absolute file name of the grammar.
A special namespace '__base__' is used for BASETYPE namespace.
"""
if namespace_name not in self.namespaces:
self.namespaces[namespace_name] = {}
# BASETYPE namespace is imported in each namespace
# as the first namespace to be searched.
self._imported_namespaces[namespace_name] = [self.namespaces['__base__']] # depends on [control=['if'], data=['namespace_name']]
self._namespace_stack.append(namespace_name) |
def execute_wait(self, cmd, walltime=None, envs={}):
''' Synchronously execute a commandline string on the shell.
Args:
- cmd (string) : Commandline string to execute
- walltime (int) : walltime in seconds, this is not really used now.
Kwargs:
- envs (dict) : Dictionary of env variables. This will be used
to override the envs set at channel initialization.
Returns:
- retcode : Return code from the execution, -1 on fail
- stdout : stdout string
- stderr : stderr string
Raises:
None.
'''
retcode = -1
stdout = None
stderr = None
current_env = copy.deepcopy(self._envs)
current_env.update(envs)
try:
proc = subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=self.userhome,
env=current_env,
shell=True
)
proc.wait(timeout=walltime)
stdout = proc.stdout.read()
stderr = proc.stderr.read()
retcode = proc.returncode
except Exception as e:
print("Caught exception: {0}".format(e))
logger.warn("Execution of command [%s] failed due to \n %s ", cmd, e)
# Set retcode to non-zero so that this can be handled in the provider.
if retcode == 0:
retcode = -1
return (retcode, None, None)
return (retcode, stdout.decode("utf-8"), stderr.decode("utf-8")) | def function[execute_wait, parameter[self, cmd, walltime, envs]]:
constant[ Synchronously execute a commandline string on the shell.
Args:
- cmd (string) : Commandline string to execute
- walltime (int) : walltime in seconds, this is not really used now.
Kwargs:
- envs (dict) : Dictionary of env variables. This will be used
to override the envs set at channel initialization.
Returns:
- retcode : Return code from the execution, -1 on fail
- stdout : stdout string
- stderr : stderr string
Raises:
None.
]
variable[retcode] assign[=] <ast.UnaryOp object at 0x7da1b014d240>
variable[stdout] assign[=] constant[None]
variable[stderr] assign[=] constant[None]
variable[current_env] assign[=] call[name[copy].deepcopy, parameter[name[self]._envs]]
call[name[current_env].update, parameter[name[envs]]]
<ast.Try object at 0x7da1b01dbbb0>
return[tuple[[<ast.Name object at 0x7da1b01d9e10>, <ast.Call object at 0x7da1b01da440>, <ast.Call object at 0x7da1b01d8dc0>]]] | keyword[def] identifier[execute_wait] ( identifier[self] , identifier[cmd] , identifier[walltime] = keyword[None] , identifier[envs] ={}):
literal[string]
identifier[retcode] =- literal[int]
identifier[stdout] = keyword[None]
identifier[stderr] = keyword[None]
identifier[current_env] = identifier[copy] . identifier[deepcopy] ( identifier[self] . identifier[_envs] )
identifier[current_env] . identifier[update] ( identifier[envs] )
keyword[try] :
identifier[proc] = identifier[subprocess] . identifier[Popen] (
identifier[cmd] ,
identifier[stdout] = identifier[subprocess] . identifier[PIPE] ,
identifier[stderr] = identifier[subprocess] . identifier[PIPE] ,
identifier[cwd] = identifier[self] . identifier[userhome] ,
identifier[env] = identifier[current_env] ,
identifier[shell] = keyword[True]
)
identifier[proc] . identifier[wait] ( identifier[timeout] = identifier[walltime] )
identifier[stdout] = identifier[proc] . identifier[stdout] . identifier[read] ()
identifier[stderr] = identifier[proc] . identifier[stderr] . identifier[read] ()
identifier[retcode] = identifier[proc] . identifier[returncode]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[print] ( literal[string] . identifier[format] ( identifier[e] ))
identifier[logger] . identifier[warn] ( literal[string] , identifier[cmd] , identifier[e] )
keyword[if] identifier[retcode] == literal[int] :
identifier[retcode] =- literal[int]
keyword[return] ( identifier[retcode] , keyword[None] , keyword[None] )
keyword[return] ( identifier[retcode] , identifier[stdout] . identifier[decode] ( literal[string] ), identifier[stderr] . identifier[decode] ( literal[string] )) | def execute_wait(self, cmd, walltime=None, envs={}):
""" Synchronously execute a commandline string on the shell.
Args:
- cmd (string) : Commandline string to execute
- walltime (int) : walltime in seconds, this is not really used now.
Kwargs:
- envs (dict) : Dictionary of env variables. This will be used
to override the envs set at channel initialization.
Returns:
- retcode : Return code from the execution, -1 on fail
- stdout : stdout string
- stderr : stderr string
Raises:
None.
"""
retcode = -1
stdout = None
stderr = None
current_env = copy.deepcopy(self._envs)
current_env.update(envs)
try:
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.userhome, env=current_env, shell=True)
proc.wait(timeout=walltime)
stdout = proc.stdout.read()
stderr = proc.stderr.read()
retcode = proc.returncode # depends on [control=['try'], data=[]]
except Exception as e:
print('Caught exception: {0}'.format(e))
logger.warn('Execution of command [%s] failed due to \n %s ', cmd, e)
# Set retcode to non-zero so that this can be handled in the provider.
if retcode == 0:
retcode = -1 # depends on [control=['if'], data=['retcode']]
return (retcode, None, None) # depends on [control=['except'], data=['e']]
return (retcode, stdout.decode('utf-8'), stderr.decode('utf-8')) |
def get_trap_definitions():
"""Takes in no param as input to fetch SNMP TRAP definitions from HP IMC RESTFUL API
:param None
:return: object of type list containing the device asset details
"""
# checks to see if the imc credentials are already available
if auth is None or url is None:
set_imc_creds()
global r
get_trap_def_url = "/imcrs/fault/trapDefine/sync/query?enterpriseId=1.3.6.1.4.1.11&size=10000"
f_url = url + get_trap_def_url
payload = None
# creates the URL using the payload variable as the contents
r = requests.get(f_url, auth=auth, headers=headers)
# r.status_code
if r.status_code == 200:
trap_def_list = (json.loads(r.text))
return trap_def_list['trapDefine']
else:
print("get_dev_asset_details: An Error has occured") | def function[get_trap_definitions, parameter[]]:
constant[Takes in no param as input to fetch SNMP TRAP definitions from HP IMC RESTFUL API
:param None
:return: object of type list containing the device asset details
]
if <ast.BoolOp object at 0x7da20c6a9c60> begin[:]
call[name[set_imc_creds], parameter[]]
<ast.Global object at 0x7da20c6ab4f0>
variable[get_trap_def_url] assign[=] constant[/imcrs/fault/trapDefine/sync/query?enterpriseId=1.3.6.1.4.1.11&size=10000]
variable[f_url] assign[=] binary_operation[name[url] + name[get_trap_def_url]]
variable[payload] assign[=] constant[None]
variable[r] assign[=] call[name[requests].get, parameter[name[f_url]]]
if compare[name[r].status_code equal[==] constant[200]] begin[:]
variable[trap_def_list] assign[=] call[name[json].loads, parameter[name[r].text]]
return[call[name[trap_def_list]][constant[trapDefine]]] | keyword[def] identifier[get_trap_definitions] ():
literal[string]
keyword[if] identifier[auth] keyword[is] keyword[None] keyword[or] identifier[url] keyword[is] keyword[None] :
identifier[set_imc_creds] ()
keyword[global] identifier[r]
identifier[get_trap_def_url] = literal[string]
identifier[f_url] = identifier[url] + identifier[get_trap_def_url]
identifier[payload] = keyword[None]
identifier[r] = identifier[requests] . identifier[get] ( identifier[f_url] , identifier[auth] = identifier[auth] , identifier[headers] = identifier[headers] )
keyword[if] identifier[r] . identifier[status_code] == literal[int] :
identifier[trap_def_list] =( identifier[json] . identifier[loads] ( identifier[r] . identifier[text] ))
keyword[return] identifier[trap_def_list] [ literal[string] ]
keyword[else] :
identifier[print] ( literal[string] ) | def get_trap_definitions():
"""Takes in no param as input to fetch SNMP TRAP definitions from HP IMC RESTFUL API
:param None
:return: object of type list containing the device asset details
"""
# checks to see if the imc credentials are already available
if auth is None or url is None:
set_imc_creds() # depends on [control=['if'], data=[]]
global r
get_trap_def_url = '/imcrs/fault/trapDefine/sync/query?enterpriseId=1.3.6.1.4.1.11&size=10000'
f_url = url + get_trap_def_url
payload = None
# creates the URL using the payload variable as the contents
r = requests.get(f_url, auth=auth, headers=headers)
# r.status_code
if r.status_code == 200:
trap_def_list = json.loads(r.text)
return trap_def_list['trapDefine'] # depends on [control=['if'], data=[]]
else:
print('get_dev_asset_details: An Error has occured') |
def send(messages=None, conf=None, parse_mode=None, disable_web_page_preview=False, files=None, images=None,
captions=None, locations=None, timeout=30):
"""Send data over Telegram. All arguments are optional.
Always use this function with explicit keyword arguments. So
`send(messages=["Hello!"])` instead of `send(["Hello!"])` as the latter
will *break* when I change the order of the arguments.
The `file` type is the [file object][] returned by the `open()` function.
To send an image/file you open it in binary mode:
``` python
import telegram_send
with open("image.jpg", "rb") as f:
telegram_send.send(images=[f])
```
[file object]: https://docs.python.org/3/glossary.html#term-file-object
# Arguments
conf (str): Path of configuration file to use. Will use the default config if not specified.
`~` expands to user's home directory.
messages (List[str]): The messages to send.
parse_mode (str): Specifies formatting of messages, one of `["text", "markdown", "html"]`.
disable_web_page_preview (bool): Disables web page previews for all links in the messages.
files (List[file]): The files to send.
images (List[file]): The images to send.
captions (List[str]): The captions to send with the images.
locations (List[str]): The locations to send. Locations are strings containing the latitude and longitude
separated by whitespace or a comma.
timeout (int|float): The read timeout for network connections in seconds.
"""
conf = expanduser(conf) if conf else get_config_path()
config = configparser.ConfigParser()
if not config.read(conf) or not config.has_section("telegram"):
raise ConfigError("Config not found")
missing_options = set(["token", "chat_id"]) - set(config.options("telegram"))
if len(missing_options) > 0:
raise ConfigError("Missing options in config: {}".format(", ".join(missing_options)))
token = config.get("telegram", "token")
chat_id = int(config.get("telegram", "chat_id")) if config.get("telegram", "chat_id").isdigit() else config.get("telegram", "chat_id")
request = telegram.utils.request.Request(read_timeout=timeout)
bot = telegram.Bot(token, request=request)
# We let the user specify "text" as a parse mode to be more explicit about
# the lack of formatting applied to the message, but "text" isn't a supported
# parse_mode in python-telegram-bot. Instead, set the parse_mode to None
# in this case.
if parse_mode == "text":
parse_mode = None
if messages:
def send_message(message):
return bot.send_message(chat_id=chat_id, text=message, parse_mode=parse_mode, disable_web_page_preview=disable_web_page_preview)
for m in messages:
if len(m) > MAX_MESSAGE_LENGTH:
warn(markup("Message longer than MAX_MESSAGE_LENGTH=%d, splitting into smaller messages." % MAX_MESSAGE_LENGTH, "red"))
ms = split_message(m, MAX_MESSAGE_LENGTH)
for m in ms:
send_message(m)
elif len(m) == 0:
continue
else:
send_message(m)
if files:
for f in files:
bot.send_document(chat_id=chat_id, document=f)
if images:
if captions:
# make captions equal length when not all images have captions
captions += [None] * (len(images) - len(captions))
for (i, c) in zip(images, captions):
bot.send_photo(chat_id=chat_id, photo=i, caption=c)
else:
for i in images:
bot.send_photo(chat_id=chat_id, photo=i)
if locations:
it = iter(locations)
for loc in it:
if "," in loc:
lat, lon = loc.split(",")
else:
lat = loc
lon = next(it)
bot.send_location(chat_id=chat_id, latitude=float(lat), longitude=float(lon)) | def function[send, parameter[messages, conf, parse_mode, disable_web_page_preview, files, images, captions, locations, timeout]]:
constant[Send data over Telegram. All arguments are optional.
Always use this function with explicit keyword arguments. So
`send(messages=["Hello!"])` instead of `send(["Hello!"])` as the latter
will *break* when I change the order of the arguments.
The `file` type is the [file object][] returned by the `open()` function.
To send an image/file you open it in binary mode:
``` python
import telegram_send
with open("image.jpg", "rb") as f:
telegram_send.send(images=[f])
```
[file object]: https://docs.python.org/3/glossary.html#term-file-object
# Arguments
conf (str): Path of configuration file to use. Will use the default config if not specified.
`~` expands to user's home directory.
messages (List[str]): The messages to send.
parse_mode (str): Specifies formatting of messages, one of `["text", "markdown", "html"]`.
disable_web_page_preview (bool): Disables web page previews for all links in the messages.
files (List[file]): The files to send.
images (List[file]): The images to send.
captions (List[str]): The captions to send with the images.
locations (List[str]): The locations to send. Locations are strings containing the latitude and longitude
separated by whitespace or a comma.
timeout (int|float): The read timeout for network connections in seconds.
]
variable[conf] assign[=] <ast.IfExp object at 0x7da1b07a9690>
variable[config] assign[=] call[name[configparser].ConfigParser, parameter[]]
if <ast.BoolOp object at 0x7da1b07ab730> begin[:]
<ast.Raise object at 0x7da1b07aa020>
variable[missing_options] assign[=] binary_operation[call[name[set], parameter[list[[<ast.Constant object at 0x7da1b07a9b70>, <ast.Constant object at 0x7da1b07aacb0>]]]] - call[name[set], parameter[call[name[config].options, parameter[constant[telegram]]]]]]
if compare[call[name[len], parameter[name[missing_options]]] greater[>] constant[0]] begin[:]
<ast.Raise object at 0x7da1b07aa2f0>
variable[token] assign[=] call[name[config].get, parameter[constant[telegram], constant[token]]]
variable[chat_id] assign[=] <ast.IfExp object at 0x7da1b07a8fd0>
variable[request] assign[=] call[name[telegram].utils.request.Request, parameter[]]
variable[bot] assign[=] call[name[telegram].Bot, parameter[name[token]]]
if compare[name[parse_mode] equal[==] constant[text]] begin[:]
variable[parse_mode] assign[=] constant[None]
if name[messages] begin[:]
def function[send_message, parameter[message]]:
return[call[name[bot].send_message, parameter[]]]
for taget[name[m]] in starred[name[messages]] begin[:]
if compare[call[name[len], parameter[name[m]]] greater[>] name[MAX_MESSAGE_LENGTH]] begin[:]
call[name[warn], parameter[call[name[markup], parameter[binary_operation[constant[Message longer than MAX_MESSAGE_LENGTH=%d, splitting into smaller messages.] <ast.Mod object at 0x7da2590d6920> name[MAX_MESSAGE_LENGTH]], constant[red]]]]]
variable[ms] assign[=] call[name[split_message], parameter[name[m], name[MAX_MESSAGE_LENGTH]]]
for taget[name[m]] in starred[name[ms]] begin[:]
call[name[send_message], parameter[name[m]]]
if name[files] begin[:]
for taget[name[f]] in starred[name[files]] begin[:]
call[name[bot].send_document, parameter[]]
if name[images] begin[:]
if name[captions] begin[:]
<ast.AugAssign object at 0x7da1b07a9780>
for taget[tuple[[<ast.Name object at 0x7da1b07a8fa0>, <ast.Name object at 0x7da1b07a8f10>]]] in starred[call[name[zip], parameter[name[images], name[captions]]]] begin[:]
call[name[bot].send_photo, parameter[]]
if name[locations] begin[:]
variable[it] assign[=] call[name[iter], parameter[name[locations]]]
for taget[name[loc]] in starred[name[it]] begin[:]
if compare[constant[,] in name[loc]] begin[:]
<ast.Tuple object at 0x7da1b07abac0> assign[=] call[name[loc].split, parameter[constant[,]]]
call[name[bot].send_location, parameter[]] | keyword[def] identifier[send] ( identifier[messages] = keyword[None] , identifier[conf] = keyword[None] , identifier[parse_mode] = keyword[None] , identifier[disable_web_page_preview] = keyword[False] , identifier[files] = keyword[None] , identifier[images] = keyword[None] ,
identifier[captions] = keyword[None] , identifier[locations] = keyword[None] , identifier[timeout] = literal[int] ):
literal[string]
identifier[conf] = identifier[expanduser] ( identifier[conf] ) keyword[if] identifier[conf] keyword[else] identifier[get_config_path] ()
identifier[config] = identifier[configparser] . identifier[ConfigParser] ()
keyword[if] keyword[not] identifier[config] . identifier[read] ( identifier[conf] ) keyword[or] keyword[not] identifier[config] . identifier[has_section] ( literal[string] ):
keyword[raise] identifier[ConfigError] ( literal[string] )
identifier[missing_options] = identifier[set] ([ literal[string] , literal[string] ])- identifier[set] ( identifier[config] . identifier[options] ( literal[string] ))
keyword[if] identifier[len] ( identifier[missing_options] )> literal[int] :
keyword[raise] identifier[ConfigError] ( literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[missing_options] )))
identifier[token] = identifier[config] . identifier[get] ( literal[string] , literal[string] )
identifier[chat_id] = identifier[int] ( identifier[config] . identifier[get] ( literal[string] , literal[string] )) keyword[if] identifier[config] . identifier[get] ( literal[string] , literal[string] ). identifier[isdigit] () keyword[else] identifier[config] . identifier[get] ( literal[string] , literal[string] )
identifier[request] = identifier[telegram] . identifier[utils] . identifier[request] . identifier[Request] ( identifier[read_timeout] = identifier[timeout] )
identifier[bot] = identifier[telegram] . identifier[Bot] ( identifier[token] , identifier[request] = identifier[request] )
keyword[if] identifier[parse_mode] == literal[string] :
identifier[parse_mode] = keyword[None]
keyword[if] identifier[messages] :
keyword[def] identifier[send_message] ( identifier[message] ):
keyword[return] identifier[bot] . identifier[send_message] ( identifier[chat_id] = identifier[chat_id] , identifier[text] = identifier[message] , identifier[parse_mode] = identifier[parse_mode] , identifier[disable_web_page_preview] = identifier[disable_web_page_preview] )
keyword[for] identifier[m] keyword[in] identifier[messages] :
keyword[if] identifier[len] ( identifier[m] )> identifier[MAX_MESSAGE_LENGTH] :
identifier[warn] ( identifier[markup] ( literal[string] % identifier[MAX_MESSAGE_LENGTH] , literal[string] ))
identifier[ms] = identifier[split_message] ( identifier[m] , identifier[MAX_MESSAGE_LENGTH] )
keyword[for] identifier[m] keyword[in] identifier[ms] :
identifier[send_message] ( identifier[m] )
keyword[elif] identifier[len] ( identifier[m] )== literal[int] :
keyword[continue]
keyword[else] :
identifier[send_message] ( identifier[m] )
keyword[if] identifier[files] :
keyword[for] identifier[f] keyword[in] identifier[files] :
identifier[bot] . identifier[send_document] ( identifier[chat_id] = identifier[chat_id] , identifier[document] = identifier[f] )
keyword[if] identifier[images] :
keyword[if] identifier[captions] :
identifier[captions] +=[ keyword[None] ]*( identifier[len] ( identifier[images] )- identifier[len] ( identifier[captions] ))
keyword[for] ( identifier[i] , identifier[c] ) keyword[in] identifier[zip] ( identifier[images] , identifier[captions] ):
identifier[bot] . identifier[send_photo] ( identifier[chat_id] = identifier[chat_id] , identifier[photo] = identifier[i] , identifier[caption] = identifier[c] )
keyword[else] :
keyword[for] identifier[i] keyword[in] identifier[images] :
identifier[bot] . identifier[send_photo] ( identifier[chat_id] = identifier[chat_id] , identifier[photo] = identifier[i] )
keyword[if] identifier[locations] :
identifier[it] = identifier[iter] ( identifier[locations] )
keyword[for] identifier[loc] keyword[in] identifier[it] :
keyword[if] literal[string] keyword[in] identifier[loc] :
identifier[lat] , identifier[lon] = identifier[loc] . identifier[split] ( literal[string] )
keyword[else] :
identifier[lat] = identifier[loc]
identifier[lon] = identifier[next] ( identifier[it] )
identifier[bot] . identifier[send_location] ( identifier[chat_id] = identifier[chat_id] , identifier[latitude] = identifier[float] ( identifier[lat] ), identifier[longitude] = identifier[float] ( identifier[lon] )) | def send(messages=None, conf=None, parse_mode=None, disable_web_page_preview=False, files=None, images=None, captions=None, locations=None, timeout=30):
"""Send data over Telegram. All arguments are optional.
Always use this function with explicit keyword arguments. So
`send(messages=["Hello!"])` instead of `send(["Hello!"])` as the latter
will *break* when I change the order of the arguments.
The `file` type is the [file object][] returned by the `open()` function.
To send an image/file you open it in binary mode:
``` python
import telegram_send
with open("image.jpg", "rb") as f:
telegram_send.send(images=[f])
```
[file object]: https://docs.python.org/3/glossary.html#term-file-object
# Arguments
conf (str): Path of configuration file to use. Will use the default config if not specified.
`~` expands to user's home directory.
messages (List[str]): The messages to send.
parse_mode (str): Specifies formatting of messages, one of `["text", "markdown", "html"]`.
disable_web_page_preview (bool): Disables web page previews for all links in the messages.
files (List[file]): The files to send.
images (List[file]): The images to send.
captions (List[str]): The captions to send with the images.
locations (List[str]): The locations to send. Locations are strings containing the latitude and longitude
separated by whitespace or a comma.
timeout (int|float): The read timeout for network connections in seconds.
"""
conf = expanduser(conf) if conf else get_config_path()
config = configparser.ConfigParser()
if not config.read(conf) or not config.has_section('telegram'):
raise ConfigError('Config not found') # depends on [control=['if'], data=[]]
missing_options = set(['token', 'chat_id']) - set(config.options('telegram'))
if len(missing_options) > 0:
raise ConfigError('Missing options in config: {}'.format(', '.join(missing_options))) # depends on [control=['if'], data=[]]
token = config.get('telegram', 'token')
chat_id = int(config.get('telegram', 'chat_id')) if config.get('telegram', 'chat_id').isdigit() else config.get('telegram', 'chat_id')
request = telegram.utils.request.Request(read_timeout=timeout)
bot = telegram.Bot(token, request=request)
# We let the user specify "text" as a parse mode to be more explicit about
# the lack of formatting applied to the message, but "text" isn't a supported
# parse_mode in python-telegram-bot. Instead, set the parse_mode to None
# in this case.
if parse_mode == 'text':
parse_mode = None # depends on [control=['if'], data=['parse_mode']]
if messages:
def send_message(message):
return bot.send_message(chat_id=chat_id, text=message, parse_mode=parse_mode, disable_web_page_preview=disable_web_page_preview)
for m in messages:
if len(m) > MAX_MESSAGE_LENGTH:
warn(markup('Message longer than MAX_MESSAGE_LENGTH=%d, splitting into smaller messages.' % MAX_MESSAGE_LENGTH, 'red'))
ms = split_message(m, MAX_MESSAGE_LENGTH)
for m in ms:
send_message(m) # depends on [control=['for'], data=['m']] # depends on [control=['if'], data=['MAX_MESSAGE_LENGTH']]
elif len(m) == 0:
continue # depends on [control=['if'], data=[]]
else:
send_message(m) # depends on [control=['for'], data=['m']] # depends on [control=['if'], data=[]]
if files:
for f in files:
bot.send_document(chat_id=chat_id, document=f) # depends on [control=['for'], data=['f']] # depends on [control=['if'], data=[]]
if images:
if captions:
# make captions equal length when not all images have captions
captions += [None] * (len(images) - len(captions))
for (i, c) in zip(images, captions):
bot.send_photo(chat_id=chat_id, photo=i, caption=c) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
for i in images:
bot.send_photo(chat_id=chat_id, photo=i) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
if locations:
it = iter(locations)
for loc in it:
if ',' in loc:
(lat, lon) = loc.split(',') # depends on [control=['if'], data=['loc']]
else:
lat = loc
lon = next(it)
bot.send_location(chat_id=chat_id, latitude=float(lat), longitude=float(lon)) # depends on [control=['for'], data=['loc']] # depends on [control=['if'], data=[]] |
def centres_from_shape_pixel_scales_and_origin(shape, pixel_scales, origin):
"""Determine the (y,x) arc-second central coordinates of an array from its shape, pixel-scales and origin.
The coordinate system is defined such that the positive y axis is up and positive x axis is right.
Parameters
----------
shape : (int, int)
The (y,x) shape of the 2D array the arc-second centre is computed for.
pixel_scales : (float, float)
The (y,x) arc-second to pixel scales of the 2D array.
origin : (float, flloat)
The (y,x) origin of the 2D array, which the centre is shifted to.
Returns
--------
tuple (float, float)
The (y,x) arc-second central coordinates of the input array.
Examples
--------
centres_arcsec = centres_from_shape_pixel_scales_and_origin(shape=(5,5), pixel_scales=(0.5, 0.5), origin=(0.0, 0.0))
"""
y_centre_arcsec = float(shape[0] - 1) / 2 + (origin[0] / pixel_scales[0])
x_centre_arcsec = float(shape[1] - 1) / 2 - (origin[1] / pixel_scales[1])
return (y_centre_arcsec, x_centre_arcsec) | def function[centres_from_shape_pixel_scales_and_origin, parameter[shape, pixel_scales, origin]]:
constant[Determine the (y,x) arc-second central coordinates of an array from its shape, pixel-scales and origin.
The coordinate system is defined such that the positive y axis is up and positive x axis is right.
Parameters
----------
shape : (int, int)
The (y,x) shape of the 2D array the arc-second centre is computed for.
pixel_scales : (float, float)
The (y,x) arc-second to pixel scales of the 2D array.
origin : (float, flloat)
The (y,x) origin of the 2D array, which the centre is shifted to.
Returns
--------
tuple (float, float)
The (y,x) arc-second central coordinates of the input array.
Examples
--------
centres_arcsec = centres_from_shape_pixel_scales_and_origin(shape=(5,5), pixel_scales=(0.5, 0.5), origin=(0.0, 0.0))
]
variable[y_centre_arcsec] assign[=] binary_operation[binary_operation[call[name[float], parameter[binary_operation[call[name[shape]][constant[0]] - constant[1]]]] / constant[2]] + binary_operation[call[name[origin]][constant[0]] / call[name[pixel_scales]][constant[0]]]]
variable[x_centre_arcsec] assign[=] binary_operation[binary_operation[call[name[float], parameter[binary_operation[call[name[shape]][constant[1]] - constant[1]]]] / constant[2]] - binary_operation[call[name[origin]][constant[1]] / call[name[pixel_scales]][constant[1]]]]
return[tuple[[<ast.Name object at 0x7da18f813d30>, <ast.Name object at 0x7da18f813be0>]]] | keyword[def] identifier[centres_from_shape_pixel_scales_and_origin] ( identifier[shape] , identifier[pixel_scales] , identifier[origin] ):
literal[string]
identifier[y_centre_arcsec] = identifier[float] ( identifier[shape] [ literal[int] ]- literal[int] )/ literal[int] +( identifier[origin] [ literal[int] ]/ identifier[pixel_scales] [ literal[int] ])
identifier[x_centre_arcsec] = identifier[float] ( identifier[shape] [ literal[int] ]- literal[int] )/ literal[int] -( identifier[origin] [ literal[int] ]/ identifier[pixel_scales] [ literal[int] ])
keyword[return] ( identifier[y_centre_arcsec] , identifier[x_centre_arcsec] ) | def centres_from_shape_pixel_scales_and_origin(shape, pixel_scales, origin):
"""Determine the (y,x) arc-second central coordinates of an array from its shape, pixel-scales and origin.
The coordinate system is defined such that the positive y axis is up and positive x axis is right.
Parameters
----------
shape : (int, int)
The (y,x) shape of the 2D array the arc-second centre is computed for.
pixel_scales : (float, float)
The (y,x) arc-second to pixel scales of the 2D array.
origin : (float, flloat)
The (y,x) origin of the 2D array, which the centre is shifted to.
Returns
--------
tuple (float, float)
The (y,x) arc-second central coordinates of the input array.
Examples
--------
centres_arcsec = centres_from_shape_pixel_scales_and_origin(shape=(5,5), pixel_scales=(0.5, 0.5), origin=(0.0, 0.0))
"""
y_centre_arcsec = float(shape[0] - 1) / 2 + origin[0] / pixel_scales[0]
x_centre_arcsec = float(shape[1] - 1) / 2 - origin[1] / pixel_scales[1]
return (y_centre_arcsec, x_centre_arcsec) |
def managed(name, value, **kwargs):
'''
Ensure a sysrc variable is set to a specific value.
name
The variable name to set
value
Value to set the variable to
file
(optional) The rc file to add the variable to.
jail
(option) the name or JID of the jail to set the value in.
Example:
.. code-block:: yaml
syslogd:
sysrc.managed:
- name: syslogd_flags
- value: -ss
'''
ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''}
# Check the current state
current_state = __salt__['sysrc.get'](name=name, **kwargs)
if current_state is not None:
for rcname, rcdict in six.iteritems(current_state):
if rcdict[name] == value:
ret['result'] = True
ret['comment'] = '{0} is already set to the desired value.'.format(name)
return ret
if __opts__['test'] is True:
ret['comment'] = 'The value of "{0}" will be changed!'.format(name)
ret['changes'] = {
'old': current_state,
'new': name+' = '+value+' will be set.'
}
# When test=true return none
ret['result'] = None
return ret
new_state = __salt__['sysrc.set'](name=name, value=value, **kwargs)
ret['comment'] = 'The value of "{0}" was changed!'.format(name)
ret['changes'] = {
'old': current_state,
'new': new_state
}
ret['result'] = True
return ret | def function[managed, parameter[name, value]]:
constant[
Ensure a sysrc variable is set to a specific value.
name
The variable name to set
value
Value to set the variable to
file
(optional) The rc file to add the variable to.
jail
(option) the name or JID of the jail to set the value in.
Example:
.. code-block:: yaml
syslogd:
sysrc.managed:
- name: syslogd_flags
- value: -ss
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b215e020>, <ast.Constant object at 0x7da1b215d630>, <ast.Constant object at 0x7da1b215d300>, <ast.Constant object at 0x7da1b215dba0>], [<ast.Name object at 0x7da1b215d210>, <ast.Dict object at 0x7da1b215cb20>, <ast.Constant object at 0x7da1b215c0d0>, <ast.Constant object at 0x7da1b215e200>]]
variable[current_state] assign[=] call[call[name[__salt__]][constant[sysrc.get]], parameter[]]
if compare[name[current_state] is_not constant[None]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b215c220>, <ast.Name object at 0x7da1b215ef80>]]] in starred[call[name[six].iteritems, parameter[name[current_state]]]] begin[:]
if compare[call[name[rcdict]][name[name]] equal[==] name[value]] begin[:]
call[name[ret]][constant[result]] assign[=] constant[True]
call[name[ret]][constant[comment]] assign[=] call[constant[{0} is already set to the desired value.].format, parameter[name[name]]]
return[name[ret]]
if compare[call[name[__opts__]][constant[test]] is constant[True]] begin[:]
call[name[ret]][constant[comment]] assign[=] call[constant[The value of "{0}" will be changed!].format, parameter[name[name]]]
call[name[ret]][constant[changes]] assign[=] dictionary[[<ast.Constant object at 0x7da1b215c0a0>, <ast.Constant object at 0x7da1b215c430>], [<ast.Name object at 0x7da1b215d690>, <ast.BinOp object at 0x7da1b215c250>]]
call[name[ret]][constant[result]] assign[=] constant[None]
return[name[ret]]
variable[new_state] assign[=] call[call[name[__salt__]][constant[sysrc.set]], parameter[]]
call[name[ret]][constant[comment]] assign[=] call[constant[The value of "{0}" was changed!].format, parameter[name[name]]]
call[name[ret]][constant[changes]] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c84e50>, <ast.Constant object at 0x7da1b1c850c0>], [<ast.Name object at 0x7da1b1c84370>, <ast.Name object at 0x7da1b1c844f0>]]
call[name[ret]][constant[result]] assign[=] constant[True]
return[name[ret]] | keyword[def] identifier[managed] ( identifier[name] , identifier[value] ,** identifier[kwargs] ):
literal[string]
identifier[ret] ={ literal[string] : identifier[name] , literal[string] :{}, literal[string] : keyword[False] , literal[string] : literal[string] }
identifier[current_state] = identifier[__salt__] [ literal[string] ]( identifier[name] = identifier[name] ,** identifier[kwargs] )
keyword[if] identifier[current_state] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[rcname] , identifier[rcdict] keyword[in] identifier[six] . identifier[iteritems] ( identifier[current_state] ):
keyword[if] identifier[rcdict] [ identifier[name] ]== identifier[value] :
identifier[ret] [ literal[string] ]= keyword[True]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
keyword[return] identifier[ret]
keyword[if] identifier[__opts__] [ literal[string] ] keyword[is] keyword[True] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
identifier[ret] [ literal[string] ]={
literal[string] : identifier[current_state] ,
literal[string] : identifier[name] + literal[string] + identifier[value] + literal[string]
}
identifier[ret] [ literal[string] ]= keyword[None]
keyword[return] identifier[ret]
identifier[new_state] = identifier[__salt__] [ literal[string] ]( identifier[name] = identifier[name] , identifier[value] = identifier[value] ,** identifier[kwargs] )
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
identifier[ret] [ literal[string] ]={
literal[string] : identifier[current_state] ,
literal[string] : identifier[new_state]
}
identifier[ret] [ literal[string] ]= keyword[True]
keyword[return] identifier[ret] | def managed(name, value, **kwargs):
"""
Ensure a sysrc variable is set to a specific value.
name
The variable name to set
value
Value to set the variable to
file
(optional) The rc file to add the variable to.
jail
(option) the name or JID of the jail to set the value in.
Example:
.. code-block:: yaml
syslogd:
sysrc.managed:
- name: syslogd_flags
- value: -ss
"""
ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''}
# Check the current state
current_state = __salt__['sysrc.get'](name=name, **kwargs)
if current_state is not None:
for (rcname, rcdict) in six.iteritems(current_state):
if rcdict[name] == value:
ret['result'] = True
ret['comment'] = '{0} is already set to the desired value.'.format(name)
return ret # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['current_state']]
if __opts__['test'] is True:
ret['comment'] = 'The value of "{0}" will be changed!'.format(name)
ret['changes'] = {'old': current_state, 'new': name + ' = ' + value + ' will be set.'}
# When test=true return none
ret['result'] = None
return ret # depends on [control=['if'], data=[]]
new_state = __salt__['sysrc.set'](name=name, value=value, **kwargs)
ret['comment'] = 'The value of "{0}" was changed!'.format(name)
ret['changes'] = {'old': current_state, 'new': new_state}
ret['result'] = True
return ret |
def m2i(self, pkt, m):
"""
The client_kx_msg may be either None, EncryptedPreMasterSecret
(for RSA encryption key exchange), ClientDiffieHellmanPublic,
or ClientECDiffieHellmanPublic. When either one of them gets
dissected, the session context is updated accordingly.
"""
tmp_len = self.length_from(pkt)
tbd, rem = m[:tmp_len], m[tmp_len:]
s = pkt.tls_session
cls = None
if s.prcs and s.prcs.key_exchange:
cls = s.prcs.key_exchange.client_kx_msg_cls
if cls is None:
return Raw(tbd) / Padding(rem)
return cls(tbd, tls_session=s) / Padding(rem) | def function[m2i, parameter[self, pkt, m]]:
constant[
The client_kx_msg may be either None, EncryptedPreMasterSecret
(for RSA encryption key exchange), ClientDiffieHellmanPublic,
or ClientECDiffieHellmanPublic. When either one of them gets
dissected, the session context is updated accordingly.
]
variable[tmp_len] assign[=] call[name[self].length_from, parameter[name[pkt]]]
<ast.Tuple object at 0x7da1b2126c20> assign[=] tuple[[<ast.Subscript object at 0x7da1b2124e20>, <ast.Subscript object at 0x7da1b2125ed0>]]
variable[s] assign[=] name[pkt].tls_session
variable[cls] assign[=] constant[None]
if <ast.BoolOp object at 0x7da1b21276a0> begin[:]
variable[cls] assign[=] name[s].prcs.key_exchange.client_kx_msg_cls
if compare[name[cls] is constant[None]] begin[:]
return[binary_operation[call[name[Raw], parameter[name[tbd]]] / call[name[Padding], parameter[name[rem]]]]]
return[binary_operation[call[name[cls], parameter[name[tbd]]] / call[name[Padding], parameter[name[rem]]]]] | keyword[def] identifier[m2i] ( identifier[self] , identifier[pkt] , identifier[m] ):
literal[string]
identifier[tmp_len] = identifier[self] . identifier[length_from] ( identifier[pkt] )
identifier[tbd] , identifier[rem] = identifier[m] [: identifier[tmp_len] ], identifier[m] [ identifier[tmp_len] :]
identifier[s] = identifier[pkt] . identifier[tls_session]
identifier[cls] = keyword[None]
keyword[if] identifier[s] . identifier[prcs] keyword[and] identifier[s] . identifier[prcs] . identifier[key_exchange] :
identifier[cls] = identifier[s] . identifier[prcs] . identifier[key_exchange] . identifier[client_kx_msg_cls]
keyword[if] identifier[cls] keyword[is] keyword[None] :
keyword[return] identifier[Raw] ( identifier[tbd] )/ identifier[Padding] ( identifier[rem] )
keyword[return] identifier[cls] ( identifier[tbd] , identifier[tls_session] = identifier[s] )/ identifier[Padding] ( identifier[rem] ) | def m2i(self, pkt, m):
"""
The client_kx_msg may be either None, EncryptedPreMasterSecret
(for RSA encryption key exchange), ClientDiffieHellmanPublic,
or ClientECDiffieHellmanPublic. When either one of them gets
dissected, the session context is updated accordingly.
"""
tmp_len = self.length_from(pkt)
(tbd, rem) = (m[:tmp_len], m[tmp_len:])
s = pkt.tls_session
cls = None
if s.prcs and s.prcs.key_exchange:
cls = s.prcs.key_exchange.client_kx_msg_cls # depends on [control=['if'], data=[]]
if cls is None:
return Raw(tbd) / Padding(rem) # depends on [control=['if'], data=[]]
return cls(tbd, tls_session=s) / Padding(rem) |
def run(items, run_parallel):
"""Top level entry point for calculating heterogeneity, handles organization and job distribution.
"""
to_process = []
extras = []
for batch, cur_items in _group_by_batches(items).items():
if _ready_for_het_analysis(cur_items):
to_process.append((batch, cur_items))
else:
for data in cur_items:
extras.append([data])
processed = run_parallel("heterogeneity_estimate", ([xs, b, xs[0]["config"]] for b, xs in to_process))
return _group_by_sample_and_batch(extras + processed) | def function[run, parameter[items, run_parallel]]:
constant[Top level entry point for calculating heterogeneity, handles organization and job distribution.
]
variable[to_process] assign[=] list[[]]
variable[extras] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b18ab730>, <ast.Name object at 0x7da1b18a9930>]]] in starred[call[call[name[_group_by_batches], parameter[name[items]]].items, parameter[]]] begin[:]
if call[name[_ready_for_het_analysis], parameter[name[cur_items]]] begin[:]
call[name[to_process].append, parameter[tuple[[<ast.Name object at 0x7da1b18ab9d0>, <ast.Name object at 0x7da1b18aa530>]]]]
variable[processed] assign[=] call[name[run_parallel], parameter[constant[heterogeneity_estimate], <ast.GeneratorExp object at 0x7da1b18a8940>]]
return[call[name[_group_by_sample_and_batch], parameter[binary_operation[name[extras] + name[processed]]]]] | keyword[def] identifier[run] ( identifier[items] , identifier[run_parallel] ):
literal[string]
identifier[to_process] =[]
identifier[extras] =[]
keyword[for] identifier[batch] , identifier[cur_items] keyword[in] identifier[_group_by_batches] ( identifier[items] ). identifier[items] ():
keyword[if] identifier[_ready_for_het_analysis] ( identifier[cur_items] ):
identifier[to_process] . identifier[append] (( identifier[batch] , identifier[cur_items] ))
keyword[else] :
keyword[for] identifier[data] keyword[in] identifier[cur_items] :
identifier[extras] . identifier[append] ([ identifier[data] ])
identifier[processed] = identifier[run_parallel] ( literal[string] ,([ identifier[xs] , identifier[b] , identifier[xs] [ literal[int] ][ literal[string] ]] keyword[for] identifier[b] , identifier[xs] keyword[in] identifier[to_process] ))
keyword[return] identifier[_group_by_sample_and_batch] ( identifier[extras] + identifier[processed] ) | def run(items, run_parallel):
"""Top level entry point for calculating heterogeneity, handles organization and job distribution.
"""
to_process = []
extras = []
for (batch, cur_items) in _group_by_batches(items).items():
if _ready_for_het_analysis(cur_items):
to_process.append((batch, cur_items)) # depends on [control=['if'], data=[]]
else:
for data in cur_items:
extras.append([data]) # depends on [control=['for'], data=['data']] # depends on [control=['for'], data=[]]
processed = run_parallel('heterogeneity_estimate', ([xs, b, xs[0]['config']] for (b, xs) in to_process))
return _group_by_sample_and_batch(extras + processed) |
def read_tsv(cls, path, encoding='utf-8'):
"""Read a gene set database from a tab-delimited text file.
Parameters
----------
path: str
The path name of the the file.
encoding: str
The encoding of the text file.
Returns
-------
None
"""
gene_sets = []
n = 0
with open(path, 'rb') as fh:
reader = csv.reader(fh, dialect='excel-tab', encoding=encoding)
for l in reader:
n += 1
gs = GeneSet.from_list(l)
gene_sets.append(gs)
logger.debug('Read %d gene sets.', n)
logger.debug('Size of gene set list: %d', len(gene_sets))
return cls(gene_sets) | def function[read_tsv, parameter[cls, path, encoding]]:
constant[Read a gene set database from a tab-delimited text file.
Parameters
----------
path: str
The path name of the the file.
encoding: str
The encoding of the text file.
Returns
-------
None
]
variable[gene_sets] assign[=] list[[]]
variable[n] assign[=] constant[0]
with call[name[open], parameter[name[path], constant[rb]]] begin[:]
variable[reader] assign[=] call[name[csv].reader, parameter[name[fh]]]
for taget[name[l]] in starred[name[reader]] begin[:]
<ast.AugAssign object at 0x7da1b0ca5390>
variable[gs] assign[=] call[name[GeneSet].from_list, parameter[name[l]]]
call[name[gene_sets].append, parameter[name[gs]]]
call[name[logger].debug, parameter[constant[Read %d gene sets.], name[n]]]
call[name[logger].debug, parameter[constant[Size of gene set list: %d], call[name[len], parameter[name[gene_sets]]]]]
return[call[name[cls], parameter[name[gene_sets]]]] | keyword[def] identifier[read_tsv] ( identifier[cls] , identifier[path] , identifier[encoding] = literal[string] ):
literal[string]
identifier[gene_sets] =[]
identifier[n] = literal[int]
keyword[with] identifier[open] ( identifier[path] , literal[string] ) keyword[as] identifier[fh] :
identifier[reader] = identifier[csv] . identifier[reader] ( identifier[fh] , identifier[dialect] = literal[string] , identifier[encoding] = identifier[encoding] )
keyword[for] identifier[l] keyword[in] identifier[reader] :
identifier[n] += literal[int]
identifier[gs] = identifier[GeneSet] . identifier[from_list] ( identifier[l] )
identifier[gene_sets] . identifier[append] ( identifier[gs] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[n] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[len] ( identifier[gene_sets] ))
keyword[return] identifier[cls] ( identifier[gene_sets] ) | def read_tsv(cls, path, encoding='utf-8'):
"""Read a gene set database from a tab-delimited text file.
Parameters
----------
path: str
The path name of the the file.
encoding: str
The encoding of the text file.
Returns
-------
None
"""
gene_sets = []
n = 0
with open(path, 'rb') as fh:
reader = csv.reader(fh, dialect='excel-tab', encoding=encoding)
for l in reader:
n += 1
gs = GeneSet.from_list(l)
gene_sets.append(gs) # depends on [control=['for'], data=['l']] # depends on [control=['with'], data=['fh']]
logger.debug('Read %d gene sets.', n)
logger.debug('Size of gene set list: %d', len(gene_sets))
return cls(gene_sets) |
def _check_can_be_instantiated(cls, cache_location):
"""Pre-conditions: the cache location is the URL to a Fuseki server
and the SPARQLWrapper library exists (transitive dependency of
RDFlib's sparqlstore).
"""
if not any(cache_location.startswith(prefix)
for prefix in cls._CACHE_URL_PREFIXES):
raise InvalidCacheException('cache location is not a Fuseki url')
try:
from rdflib.plugins.stores.sparqlstore import SPARQLUpdateStore
except ImportError:
raise InvalidCacheException('unable to import sparql store')
del SPARQLUpdateStore | def function[_check_can_be_instantiated, parameter[cls, cache_location]]:
constant[Pre-conditions: the cache location is the URL to a Fuseki server
and the SPARQLWrapper library exists (transitive dependency of
RDFlib's sparqlstore).
]
if <ast.UnaryOp object at 0x7da1b12bba00> begin[:]
<ast.Raise object at 0x7da1b12b8310>
<ast.Try object at 0x7da1b12bb850>
<ast.Delete object at 0x7da1b12bb5b0> | keyword[def] identifier[_check_can_be_instantiated] ( identifier[cls] , identifier[cache_location] ):
literal[string]
keyword[if] keyword[not] identifier[any] ( identifier[cache_location] . identifier[startswith] ( identifier[prefix] )
keyword[for] identifier[prefix] keyword[in] identifier[cls] . identifier[_CACHE_URL_PREFIXES] ):
keyword[raise] identifier[InvalidCacheException] ( literal[string] )
keyword[try] :
keyword[from] identifier[rdflib] . identifier[plugins] . identifier[stores] . identifier[sparqlstore] keyword[import] identifier[SPARQLUpdateStore]
keyword[except] identifier[ImportError] :
keyword[raise] identifier[InvalidCacheException] ( literal[string] )
keyword[del] identifier[SPARQLUpdateStore] | def _check_can_be_instantiated(cls, cache_location):
"""Pre-conditions: the cache location is the URL to a Fuseki server
and the SPARQLWrapper library exists (transitive dependency of
RDFlib's sparqlstore).
"""
if not any((cache_location.startswith(prefix) for prefix in cls._CACHE_URL_PREFIXES)):
raise InvalidCacheException('cache location is not a Fuseki url') # depends on [control=['if'], data=[]]
try:
from rdflib.plugins.stores.sparqlstore import SPARQLUpdateStore # depends on [control=['try'], data=[]]
except ImportError:
raise InvalidCacheException('unable to import sparql store') # depends on [control=['except'], data=[]]
del SPARQLUpdateStore |
def is_invertible(self,X):
"""checks if Z is invertible"""
if len(X.shape) == 2:
return X.shape[0] == X.shape[1] and np.linalg.matrix_rank(X) == X.shape[0]
else:
return False | def function[is_invertible, parameter[self, X]]:
constant[checks if Z is invertible]
if compare[call[name[len], parameter[name[X].shape]] equal[==] constant[2]] begin[:]
return[<ast.BoolOp object at 0x7da1b25866b0>] | keyword[def] identifier[is_invertible] ( identifier[self] , identifier[X] ):
literal[string]
keyword[if] identifier[len] ( identifier[X] . identifier[shape] )== literal[int] :
keyword[return] identifier[X] . identifier[shape] [ literal[int] ]== identifier[X] . identifier[shape] [ literal[int] ] keyword[and] identifier[np] . identifier[linalg] . identifier[matrix_rank] ( identifier[X] )== identifier[X] . identifier[shape] [ literal[int] ]
keyword[else] :
keyword[return] keyword[False] | def is_invertible(self, X):
"""checks if Z is invertible"""
if len(X.shape) == 2:
return X.shape[0] == X.shape[1] and np.linalg.matrix_rank(X) == X.shape[0] # depends on [control=['if'], data=[]]
else:
return False |
def set_stats_params(
self, address=None, enable_http=None,
minify=None, no_cores=None, no_metrics=None, push_interval=None):
"""Enables stats server on the specified address.
* http://uwsgi.readthedocs.io/en/latest/StatsServer.html
:param str|unicode address: Address/socket to make stats available on.
Examples:
* 127.0.0.1:1717
* /tmp/statsock
* :5050
:param bool enable_http: Server stats over HTTP.
Prefixes stats server json output with http headers.
:param bool minify: Minify statistics json output.
:param bool no_cores: Disable generation of cores-related stats.
:param bool no_metrics: Do not include metrics in stats output.
:param int push_interval: Set the default frequency of stats pushers in seconds/
"""
self._set('stats-server', address)
self._set('stats-http', enable_http, cast=bool)
self._set('stats-minified', minify, cast=bool)
self._set('stats-no-cores', no_cores, cast=bool)
self._set('stats-no-metrics', no_metrics, cast=bool)
self._set('stats-pusher-default-freq', push_interval)
return self._section | def function[set_stats_params, parameter[self, address, enable_http, minify, no_cores, no_metrics, push_interval]]:
constant[Enables stats server on the specified address.
* http://uwsgi.readthedocs.io/en/latest/StatsServer.html
:param str|unicode address: Address/socket to make stats available on.
Examples:
* 127.0.0.1:1717
* /tmp/statsock
* :5050
:param bool enable_http: Server stats over HTTP.
Prefixes stats server json output with http headers.
:param bool minify: Minify statistics json output.
:param bool no_cores: Disable generation of cores-related stats.
:param bool no_metrics: Do not include metrics in stats output.
:param int push_interval: Set the default frequency of stats pushers in seconds/
]
call[name[self]._set, parameter[constant[stats-server], name[address]]]
call[name[self]._set, parameter[constant[stats-http], name[enable_http]]]
call[name[self]._set, parameter[constant[stats-minified], name[minify]]]
call[name[self]._set, parameter[constant[stats-no-cores], name[no_cores]]]
call[name[self]._set, parameter[constant[stats-no-metrics], name[no_metrics]]]
call[name[self]._set, parameter[constant[stats-pusher-default-freq], name[push_interval]]]
return[name[self]._section] | keyword[def] identifier[set_stats_params] (
identifier[self] , identifier[address] = keyword[None] , identifier[enable_http] = keyword[None] ,
identifier[minify] = keyword[None] , identifier[no_cores] = keyword[None] , identifier[no_metrics] = keyword[None] , identifier[push_interval] = keyword[None] ):
literal[string]
identifier[self] . identifier[_set] ( literal[string] , identifier[address] )
identifier[self] . identifier[_set] ( literal[string] , identifier[enable_http] , identifier[cast] = identifier[bool] )
identifier[self] . identifier[_set] ( literal[string] , identifier[minify] , identifier[cast] = identifier[bool] )
identifier[self] . identifier[_set] ( literal[string] , identifier[no_cores] , identifier[cast] = identifier[bool] )
identifier[self] . identifier[_set] ( literal[string] , identifier[no_metrics] , identifier[cast] = identifier[bool] )
identifier[self] . identifier[_set] ( literal[string] , identifier[push_interval] )
keyword[return] identifier[self] . identifier[_section] | def set_stats_params(self, address=None, enable_http=None, minify=None, no_cores=None, no_metrics=None, push_interval=None):
"""Enables stats server on the specified address.
* http://uwsgi.readthedocs.io/en/latest/StatsServer.html
:param str|unicode address: Address/socket to make stats available on.
Examples:
* 127.0.0.1:1717
* /tmp/statsock
* :5050
:param bool enable_http: Server stats over HTTP.
Prefixes stats server json output with http headers.
:param bool minify: Minify statistics json output.
:param bool no_cores: Disable generation of cores-related stats.
:param bool no_metrics: Do not include metrics in stats output.
:param int push_interval: Set the default frequency of stats pushers in seconds/
"""
self._set('stats-server', address)
self._set('stats-http', enable_http, cast=bool)
self._set('stats-minified', minify, cast=bool)
self._set('stats-no-cores', no_cores, cast=bool)
self._set('stats-no-metrics', no_metrics, cast=bool)
self._set('stats-pusher-default-freq', push_interval)
return self._section |
def build_model_from_txt(self, fname):
"""
Construct the model and perform regressions based on data in a txt file.
Parameters
----------
fname : str
The name of the file to load.
"""
x_values, y_values = read_column_data_from_txt(fname)
self.build_model_from_xy(x_values, y_values) | def function[build_model_from_txt, parameter[self, fname]]:
constant[
Construct the model and perform regressions based on data in a txt file.
Parameters
----------
fname : str
The name of the file to load.
]
<ast.Tuple object at 0x7da1b1999750> assign[=] call[name[read_column_data_from_txt], parameter[name[fname]]]
call[name[self].build_model_from_xy, parameter[name[x_values], name[y_values]]] | keyword[def] identifier[build_model_from_txt] ( identifier[self] , identifier[fname] ):
literal[string]
identifier[x_values] , identifier[y_values] = identifier[read_column_data_from_txt] ( identifier[fname] )
identifier[self] . identifier[build_model_from_xy] ( identifier[x_values] , identifier[y_values] ) | def build_model_from_txt(self, fname):
"""
Construct the model and perform regressions based on data in a txt file.
Parameters
----------
fname : str
The name of the file to load.
"""
(x_values, y_values) = read_column_data_from_txt(fname)
self.build_model_from_xy(x_values, y_values) |
def readinto(self, b):
"""Read up to len(b) bytes into the writable buffer *b* and return
the number of bytes read. If the socket is non-blocking and no bytes
are available, None is returned.
If *b* is non-empty, a 0 return value indicates that the connection
was shutdown at the other end.
"""
self._checkClosed()
self._checkReadable()
if self._timeout_occurred:
raise IOError("cannot read from timed out object")
while True:
try:
return self._sock.recv_into(b)
except timeout:
self._timeout_occurred = True
raise
# except InterruptedError:
# continue
except error as e:
if e.args[0] in _blocking_errnos:
return None
raise | def function[readinto, parameter[self, b]]:
constant[Read up to len(b) bytes into the writable buffer *b* and return
the number of bytes read. If the socket is non-blocking and no bytes
are available, None is returned.
If *b* is non-empty, a 0 return value indicates that the connection
was shutdown at the other end.
]
call[name[self]._checkClosed, parameter[]]
call[name[self]._checkReadable, parameter[]]
if name[self]._timeout_occurred begin[:]
<ast.Raise object at 0x7da18f58e620>
while constant[True] begin[:]
<ast.Try object at 0x7da18f58d780> | keyword[def] identifier[readinto] ( identifier[self] , identifier[b] ):
literal[string]
identifier[self] . identifier[_checkClosed] ()
identifier[self] . identifier[_checkReadable] ()
keyword[if] identifier[self] . identifier[_timeout_occurred] :
keyword[raise] identifier[IOError] ( literal[string] )
keyword[while] keyword[True] :
keyword[try] :
keyword[return] identifier[self] . identifier[_sock] . identifier[recv_into] ( identifier[b] )
keyword[except] identifier[timeout] :
identifier[self] . identifier[_timeout_occurred] = keyword[True]
keyword[raise]
keyword[except] identifier[error] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[args] [ literal[int] ] keyword[in] identifier[_blocking_errnos] :
keyword[return] keyword[None]
keyword[raise] | def readinto(self, b):
"""Read up to len(b) bytes into the writable buffer *b* and return
the number of bytes read. If the socket is non-blocking and no bytes
are available, None is returned.
If *b* is non-empty, a 0 return value indicates that the connection
was shutdown at the other end.
"""
self._checkClosed()
self._checkReadable()
if self._timeout_occurred:
raise IOError('cannot read from timed out object') # depends on [control=['if'], data=[]]
while True:
try:
return self._sock.recv_into(b) # depends on [control=['try'], data=[]]
except timeout:
self._timeout_occurred = True
raise # depends on [control=['except'], data=[]]
# except InterruptedError:
# continue
except error as e:
if e.args[0] in _blocking_errnos:
return None # depends on [control=['if'], data=[]]
raise # depends on [control=['except'], data=['e']] # depends on [control=['while'], data=[]] |
def load_cache(dpath, fname, cfgstr, ext='.cPkl', verbose=None, enabled=True):
"""
Loads data using util_io, but smartly constructs a filename
"""
if verbose is None:
verbose = VERBOSE_CACHE
if not USE_CACHE or not enabled:
if verbose > 1:
print('[util_cache] ... cache disabled: dpath=%s cfgstr=%r' %
(basename(dpath), cfgstr,))
raise IOError(3, 'Cache Loading Is Disabled')
fpath = _args2_fpath(dpath, fname, cfgstr, ext)
if not exists(fpath):
if verbose > 0:
print('[util_cache] ... cache does not exist: dpath=%r fname=%r cfgstr=%r' % (
basename(dpath), fname, cfgstr,))
raise IOError(2, 'No such file or directory: %r' % (fpath,))
else:
if verbose > 2:
print('[util_cache] ... cache exists: dpath=%r fname=%r cfgstr=%r' % (
basename(dpath), fname, cfgstr,))
import utool as ut
nbytes = ut.get_file_nBytes(fpath)
big_verbose = (nbytes > 1E6 and verbose > 2) or verbose > 2
if big_verbose:
print('[util_cache] About to read file of size %s' % (ut.byte_str2(nbytes),))
try:
with ut.Timer(fpath, verbose=big_verbose and verbose > 3):
data = util_io.load_data(fpath, verbose=verbose > 2)
except (EOFError, IOError, ImportError) as ex:
print('CORRUPTED? fpath = %s' % (fpath,))
if verbose > 1:
print('[util_cache] ... cache miss dpath=%s cfgstr=%r' % (
basename(dpath), cfgstr,))
raise IOError(str(ex))
except Exception:
print('CORRUPTED? fpath = %s' % (fpath,))
raise
else:
if verbose > 2:
print('[util_cache] ... cache hit')
return data | def function[load_cache, parameter[dpath, fname, cfgstr, ext, verbose, enabled]]:
constant[
Loads data using util_io, but smartly constructs a filename
]
if compare[name[verbose] is constant[None]] begin[:]
variable[verbose] assign[=] name[VERBOSE_CACHE]
if <ast.BoolOp object at 0x7da1b2505210> begin[:]
if compare[name[verbose] greater[>] constant[1]] begin[:]
call[name[print], parameter[binary_operation[constant[[util_cache] ... cache disabled: dpath=%s cfgstr=%r] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b2506920>, <ast.Name object at 0x7da1b25053c0>]]]]]
<ast.Raise object at 0x7da1b2506740>
variable[fpath] assign[=] call[name[_args2_fpath], parameter[name[dpath], name[fname], name[cfgstr], name[ext]]]
if <ast.UnaryOp object at 0x7da1b2507df0> begin[:]
if compare[name[verbose] greater[>] constant[0]] begin[:]
call[name[print], parameter[binary_operation[constant[[util_cache] ... cache does not exist: dpath=%r fname=%r cfgstr=%r] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b25058a0>, <ast.Name object at 0x7da1b25062c0>, <ast.Name object at 0x7da1b2505ff0>]]]]]
<ast.Raise object at 0x7da1b25ebd30>
<ast.Try object at 0x7da1b253b7c0>
return[name[data]] | keyword[def] identifier[load_cache] ( identifier[dpath] , identifier[fname] , identifier[cfgstr] , identifier[ext] = literal[string] , identifier[verbose] = keyword[None] , identifier[enabled] = keyword[True] ):
literal[string]
keyword[if] identifier[verbose] keyword[is] keyword[None] :
identifier[verbose] = identifier[VERBOSE_CACHE]
keyword[if] keyword[not] identifier[USE_CACHE] keyword[or] keyword[not] identifier[enabled] :
keyword[if] identifier[verbose] > literal[int] :
identifier[print] ( literal[string] %
( identifier[basename] ( identifier[dpath] ), identifier[cfgstr] ,))
keyword[raise] identifier[IOError] ( literal[int] , literal[string] )
identifier[fpath] = identifier[_args2_fpath] ( identifier[dpath] , identifier[fname] , identifier[cfgstr] , identifier[ext] )
keyword[if] keyword[not] identifier[exists] ( identifier[fpath] ):
keyword[if] identifier[verbose] > literal[int] :
identifier[print] ( literal[string] %(
identifier[basename] ( identifier[dpath] ), identifier[fname] , identifier[cfgstr] ,))
keyword[raise] identifier[IOError] ( literal[int] , literal[string] %( identifier[fpath] ,))
keyword[else] :
keyword[if] identifier[verbose] > literal[int] :
identifier[print] ( literal[string] %(
identifier[basename] ( identifier[dpath] ), identifier[fname] , identifier[cfgstr] ,))
keyword[import] identifier[utool] keyword[as] identifier[ut]
identifier[nbytes] = identifier[ut] . identifier[get_file_nBytes] ( identifier[fpath] )
identifier[big_verbose] =( identifier[nbytes] > literal[int] keyword[and] identifier[verbose] > literal[int] ) keyword[or] identifier[verbose] > literal[int]
keyword[if] identifier[big_verbose] :
identifier[print] ( literal[string] %( identifier[ut] . identifier[byte_str2] ( identifier[nbytes] ),))
keyword[try] :
keyword[with] identifier[ut] . identifier[Timer] ( identifier[fpath] , identifier[verbose] = identifier[big_verbose] keyword[and] identifier[verbose] > literal[int] ):
identifier[data] = identifier[util_io] . identifier[load_data] ( identifier[fpath] , identifier[verbose] = identifier[verbose] > literal[int] )
keyword[except] ( identifier[EOFError] , identifier[IOError] , identifier[ImportError] ) keyword[as] identifier[ex] :
identifier[print] ( literal[string] %( identifier[fpath] ,))
keyword[if] identifier[verbose] > literal[int] :
identifier[print] ( literal[string] %(
identifier[basename] ( identifier[dpath] ), identifier[cfgstr] ,))
keyword[raise] identifier[IOError] ( identifier[str] ( identifier[ex] ))
keyword[except] identifier[Exception] :
identifier[print] ( literal[string] %( identifier[fpath] ,))
keyword[raise]
keyword[else] :
keyword[if] identifier[verbose] > literal[int] :
identifier[print] ( literal[string] )
keyword[return] identifier[data] | def load_cache(dpath, fname, cfgstr, ext='.cPkl', verbose=None, enabled=True):
"""
Loads data using util_io, but smartly constructs a filename
"""
if verbose is None:
verbose = VERBOSE_CACHE # depends on [control=['if'], data=['verbose']]
if not USE_CACHE or not enabled:
if verbose > 1:
print('[util_cache] ... cache disabled: dpath=%s cfgstr=%r' % (basename(dpath), cfgstr)) # depends on [control=['if'], data=[]]
raise IOError(3, 'Cache Loading Is Disabled') # depends on [control=['if'], data=[]]
fpath = _args2_fpath(dpath, fname, cfgstr, ext)
if not exists(fpath):
if verbose > 0:
print('[util_cache] ... cache does not exist: dpath=%r fname=%r cfgstr=%r' % (basename(dpath), fname, cfgstr)) # depends on [control=['if'], data=[]]
raise IOError(2, 'No such file or directory: %r' % (fpath,)) # depends on [control=['if'], data=[]]
else:
if verbose > 2:
print('[util_cache] ... cache exists: dpath=%r fname=%r cfgstr=%r' % (basename(dpath), fname, cfgstr)) # depends on [control=['if'], data=[]]
import utool as ut
nbytes = ut.get_file_nBytes(fpath)
big_verbose = nbytes > 1000000.0 and verbose > 2 or verbose > 2
if big_verbose:
print('[util_cache] About to read file of size %s' % (ut.byte_str2(nbytes),)) # depends on [control=['if'], data=[]]
try:
with ut.Timer(fpath, verbose=big_verbose and verbose > 3):
data = util_io.load_data(fpath, verbose=verbose > 2) # depends on [control=['with'], data=[]] # depends on [control=['try'], data=[]]
except (EOFError, IOError, ImportError) as ex:
print('CORRUPTED? fpath = %s' % (fpath,))
if verbose > 1:
print('[util_cache] ... cache miss dpath=%s cfgstr=%r' % (basename(dpath), cfgstr)) # depends on [control=['if'], data=[]]
raise IOError(str(ex)) # depends on [control=['except'], data=['ex']]
except Exception:
print('CORRUPTED? fpath = %s' % (fpath,))
raise # depends on [control=['except'], data=[]]
else:
if verbose > 2:
print('[util_cache] ... cache hit') # depends on [control=['if'], data=[]]
return data |
def to_match(self):
"""Return a unicode object with the MATCH representation of this BetweenClause."""
template = u'({field_name} BETWEEN {lower_bound} AND {upper_bound})'
return template.format(
field_name=self.field.to_match(),
lower_bound=self.lower_bound.to_match(),
upper_bound=self.upper_bound.to_match()) | def function[to_match, parameter[self]]:
constant[Return a unicode object with the MATCH representation of this BetweenClause.]
variable[template] assign[=] constant[({field_name} BETWEEN {lower_bound} AND {upper_bound})]
return[call[name[template].format, parameter[]]] | keyword[def] identifier[to_match] ( identifier[self] ):
literal[string]
identifier[template] = literal[string]
keyword[return] identifier[template] . identifier[format] (
identifier[field_name] = identifier[self] . identifier[field] . identifier[to_match] (),
identifier[lower_bound] = identifier[self] . identifier[lower_bound] . identifier[to_match] (),
identifier[upper_bound] = identifier[self] . identifier[upper_bound] . identifier[to_match] ()) | def to_match(self):
"""Return a unicode object with the MATCH representation of this BetweenClause."""
template = u'({field_name} BETWEEN {lower_bound} AND {upper_bound})'
return template.format(field_name=self.field.to_match(), lower_bound=self.lower_bound.to_match(), upper_bound=self.upper_bound.to_match()) |
def multipart_delete(self, multipart):
"""Abort a multipart upload.
:param multipart: A :class:`invenio_files_rest.models.MultipartObject`
instance.
:returns: A Flask response.
"""
multipart.delete()
db.session.commit()
if multipart.file_id:
remove_file_data.delay(str(multipart.file_id))
return self.make_response('', 204) | def function[multipart_delete, parameter[self, multipart]]:
constant[Abort a multipart upload.
:param multipart: A :class:`invenio_files_rest.models.MultipartObject`
instance.
:returns: A Flask response.
]
call[name[multipart].delete, parameter[]]
call[name[db].session.commit, parameter[]]
if name[multipart].file_id begin[:]
call[name[remove_file_data].delay, parameter[call[name[str], parameter[name[multipart].file_id]]]]
return[call[name[self].make_response, parameter[constant[], constant[204]]]] | keyword[def] identifier[multipart_delete] ( identifier[self] , identifier[multipart] ):
literal[string]
identifier[multipart] . identifier[delete] ()
identifier[db] . identifier[session] . identifier[commit] ()
keyword[if] identifier[multipart] . identifier[file_id] :
identifier[remove_file_data] . identifier[delay] ( identifier[str] ( identifier[multipart] . identifier[file_id] ))
keyword[return] identifier[self] . identifier[make_response] ( literal[string] , literal[int] ) | def multipart_delete(self, multipart):
"""Abort a multipart upload.
:param multipart: A :class:`invenio_files_rest.models.MultipartObject`
instance.
:returns: A Flask response.
"""
multipart.delete()
db.session.commit()
if multipart.file_id:
remove_file_data.delay(str(multipart.file_id)) # depends on [control=['if'], data=[]]
return self.make_response('', 204) |
def get_uri(image):
'''get the uri for an image, if within acceptable
Parameters
==========
image: the image uri, in the format <uri>://<registry>/<namespace>:<tag>
'''
# Ensure we have a string
image = image or ''
# Find uri prefix, including ://
regexp = re.compile('^.+://')
uri = regexp.match(image)
if uri is not None:
uri = (uri.group().lower()
.replace('_','-')
.replace('://',''))
accepted_uris = ['aws',
'docker',
'http', 'https', # Must be allowed for pull
'dropbox',
'gitlab',
'globus',
'google-build',
'google-storage',
'google-drive',
'hub',
'nvidia',
'registry',
's3',
'swift']
# Allow for Singularity compatability
if "shub" in uri: uri = "hub"
if uri not in accepted_uris:
bot.warning('%s is not a recognized uri.' % uri)
uri = None
return uri | def function[get_uri, parameter[image]]:
constant[get the uri for an image, if within acceptable
Parameters
==========
image: the image uri, in the format <uri>://<registry>/<namespace>:<tag>
]
variable[image] assign[=] <ast.BoolOp object at 0x7da1b02da380>
variable[regexp] assign[=] call[name[re].compile, parameter[constant[^.+://]]]
variable[uri] assign[=] call[name[regexp].match, parameter[name[image]]]
if compare[name[uri] is_not constant[None]] begin[:]
variable[uri] assign[=] call[call[call[call[name[uri].group, parameter[]].lower, parameter[]].replace, parameter[constant[_], constant[-]]].replace, parameter[constant[://], constant[]]]
variable[accepted_uris] assign[=] list[[<ast.Constant object at 0x7da1b02da8f0>, <ast.Constant object at 0x7da1b02da8c0>, <ast.Constant object at 0x7da1b02da890>, <ast.Constant object at 0x7da1b02da860>, <ast.Constant object at 0x7da1b02dbdc0>, <ast.Constant object at 0x7da1b02dbdf0>, <ast.Constant object at 0x7da1b02dbe20>, <ast.Constant object at 0x7da1b02dbe50>, <ast.Constant object at 0x7da1b02dbe80>, <ast.Constant object at 0x7da1b02dbeb0>, <ast.Constant object at 0x7da1b02dbee0>, <ast.Constant object at 0x7da1b02dbf10>, <ast.Constant object at 0x7da1b02dbf40>, <ast.Constant object at 0x7da1b02dbc40>, <ast.Constant object at 0x7da1b02dbc70>]]
if compare[constant[shub] in name[uri]] begin[:]
variable[uri] assign[=] constant[hub]
if compare[name[uri] <ast.NotIn object at 0x7da2590d7190> name[accepted_uris]] begin[:]
call[name[bot].warning, parameter[binary_operation[constant[%s is not a recognized uri.] <ast.Mod object at 0x7da2590d6920> name[uri]]]]
variable[uri] assign[=] constant[None]
return[name[uri]] | keyword[def] identifier[get_uri] ( identifier[image] ):
literal[string]
identifier[image] = identifier[image] keyword[or] literal[string]
identifier[regexp] = identifier[re] . identifier[compile] ( literal[string] )
identifier[uri] = identifier[regexp] . identifier[match] ( identifier[image] )
keyword[if] identifier[uri] keyword[is] keyword[not] keyword[None] :
identifier[uri] =( identifier[uri] . identifier[group] (). identifier[lower] ()
. identifier[replace] ( literal[string] , literal[string] )
. identifier[replace] ( literal[string] , literal[string] ))
identifier[accepted_uris] =[ literal[string] ,
literal[string] ,
literal[string] , literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ]
keyword[if] literal[string] keyword[in] identifier[uri] : identifier[uri] = literal[string]
keyword[if] identifier[uri] keyword[not] keyword[in] identifier[accepted_uris] :
identifier[bot] . identifier[warning] ( literal[string] % identifier[uri] )
identifier[uri] = keyword[None]
keyword[return] identifier[uri] | def get_uri(image):
"""get the uri for an image, if within acceptable
Parameters
==========
image: the image uri, in the format <uri>://<registry>/<namespace>:<tag>
"""
# Ensure we have a string
image = image or ''
# Find uri prefix, including ://
regexp = re.compile('^.+://')
uri = regexp.match(image)
if uri is not None:
uri = uri.group().lower().replace('_', '-').replace('://', '') # Must be allowed for pull
accepted_uris = ['aws', 'docker', 'http', 'https', 'dropbox', 'gitlab', 'globus', 'google-build', 'google-storage', 'google-drive', 'hub', 'nvidia', 'registry', 's3', 'swift']
# Allow for Singularity compatability
if 'shub' in uri:
uri = 'hub' # depends on [control=['if'], data=['uri']]
if uri not in accepted_uris:
bot.warning('%s is not a recognized uri.' % uri)
uri = None # depends on [control=['if'], data=['uri']] # depends on [control=['if'], data=['uri']]
return uri |
def serialiseString(self, s):
"""
Similar to L{writeString} but does not encode a type byte.
"""
if type(s) is unicode:
s = self.context.getBytesForString(s)
l = len(s)
if l > 0xffff:
self.stream.write_ulong(l)
else:
self.stream.write_ushort(l)
self.stream.write(s) | def function[serialiseString, parameter[self, s]]:
constant[
Similar to L{writeString} but does not encode a type byte.
]
if compare[call[name[type], parameter[name[s]]] is name[unicode]] begin[:]
variable[s] assign[=] call[name[self].context.getBytesForString, parameter[name[s]]]
variable[l] assign[=] call[name[len], parameter[name[s]]]
if compare[name[l] greater[>] constant[65535]] begin[:]
call[name[self].stream.write_ulong, parameter[name[l]]]
call[name[self].stream.write, parameter[name[s]]] | keyword[def] identifier[serialiseString] ( identifier[self] , identifier[s] ):
literal[string]
keyword[if] identifier[type] ( identifier[s] ) keyword[is] identifier[unicode] :
identifier[s] = identifier[self] . identifier[context] . identifier[getBytesForString] ( identifier[s] )
identifier[l] = identifier[len] ( identifier[s] )
keyword[if] identifier[l] > literal[int] :
identifier[self] . identifier[stream] . identifier[write_ulong] ( identifier[l] )
keyword[else] :
identifier[self] . identifier[stream] . identifier[write_ushort] ( identifier[l] )
identifier[self] . identifier[stream] . identifier[write] ( identifier[s] ) | def serialiseString(self, s):
"""
Similar to L{writeString} but does not encode a type byte.
"""
if type(s) is unicode:
s = self.context.getBytesForString(s) # depends on [control=['if'], data=[]]
l = len(s)
if l > 65535:
self.stream.write_ulong(l) # depends on [control=['if'], data=['l']]
else:
self.stream.write_ushort(l)
self.stream.write(s) |
def new(cls, password, rounds):
"""Creates a PasswordHash from the given password."""
if isinstance(password, str):
password = password.encode('utf8')
value = bcrypt.hashpw(password, bcrypt.gensalt(rounds))
return cls(value) | def function[new, parameter[cls, password, rounds]]:
constant[Creates a PasswordHash from the given password.]
if call[name[isinstance], parameter[name[password], name[str]]] begin[:]
variable[password] assign[=] call[name[password].encode, parameter[constant[utf8]]]
variable[value] assign[=] call[name[bcrypt].hashpw, parameter[name[password], call[name[bcrypt].gensalt, parameter[name[rounds]]]]]
return[call[name[cls], parameter[name[value]]]] | keyword[def] identifier[new] ( identifier[cls] , identifier[password] , identifier[rounds] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[password] , identifier[str] ):
identifier[password] = identifier[password] . identifier[encode] ( literal[string] )
identifier[value] = identifier[bcrypt] . identifier[hashpw] ( identifier[password] , identifier[bcrypt] . identifier[gensalt] ( identifier[rounds] ))
keyword[return] identifier[cls] ( identifier[value] ) | def new(cls, password, rounds):
"""Creates a PasswordHash from the given password."""
if isinstance(password, str):
password = password.encode('utf8') # depends on [control=['if'], data=[]]
value = bcrypt.hashpw(password, bcrypt.gensalt(rounds))
return cls(value) |
def get_mac_address_table_output_mac_address_table_vlanid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_mac_address_table = ET.Element("get_mac_address_table")
config = get_mac_address_table
output = ET.SubElement(get_mac_address_table, "output")
mac_address_table = ET.SubElement(output, "mac-address-table")
mac_address_key = ET.SubElement(mac_address_table, "mac-address")
mac_address_key.text = kwargs.pop('mac_address')
vlanid = ET.SubElement(mac_address_table, "vlanid")
vlanid.text = kwargs.pop('vlanid')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[get_mac_address_table_output_mac_address_table_vlanid, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[get_mac_address_table] assign[=] call[name[ET].Element, parameter[constant[get_mac_address_table]]]
variable[config] assign[=] name[get_mac_address_table]
variable[output] assign[=] call[name[ET].SubElement, parameter[name[get_mac_address_table], constant[output]]]
variable[mac_address_table] assign[=] call[name[ET].SubElement, parameter[name[output], constant[mac-address-table]]]
variable[mac_address_key] assign[=] call[name[ET].SubElement, parameter[name[mac_address_table], constant[mac-address]]]
name[mac_address_key].text assign[=] call[name[kwargs].pop, parameter[constant[mac_address]]]
variable[vlanid] assign[=] call[name[ET].SubElement, parameter[name[mac_address_table], constant[vlanid]]]
name[vlanid].text assign[=] call[name[kwargs].pop, parameter[constant[vlanid]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[get_mac_address_table_output_mac_address_table_vlanid] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[get_mac_address_table] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[config] = identifier[get_mac_address_table]
identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[get_mac_address_table] , literal[string] )
identifier[mac_address_table] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] )
identifier[mac_address_key] = identifier[ET] . identifier[SubElement] ( identifier[mac_address_table] , literal[string] )
identifier[mac_address_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[vlanid] = identifier[ET] . identifier[SubElement] ( identifier[mac_address_table] , literal[string] )
identifier[vlanid] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def get_mac_address_table_output_mac_address_table_vlanid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
get_mac_address_table = ET.Element('get_mac_address_table')
config = get_mac_address_table
output = ET.SubElement(get_mac_address_table, 'output')
mac_address_table = ET.SubElement(output, 'mac-address-table')
mac_address_key = ET.SubElement(mac_address_table, 'mac-address')
mac_address_key.text = kwargs.pop('mac_address')
vlanid = ET.SubElement(mac_address_table, 'vlanid')
vlanid.text = kwargs.pop('vlanid')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def coerce_date_dict(date_dict):
"""
given a dictionary (presumed to be from request.GET) it returns a tuple
that represents a date. It will return from year down to seconds until one
is not found. ie if year, month, and seconds are in the dictionary, only
year and month will be returned, the rest will be returned as min. If none
of the parts are found return an empty tuple.
"""
keys = ['year', 'month', 'day', 'hour', 'minute', 'second']
ret_val = {
'year': 1,
'month': 1,
'day': 1,
'hour': 0,
'minute': 0,
'second': 0}
modified = False
for key in keys:
try:
ret_val[key] = int(date_dict[key])
modified = True
except KeyError:
break
return modified and ret_val or {} | def function[coerce_date_dict, parameter[date_dict]]:
constant[
given a dictionary (presumed to be from request.GET) it returns a tuple
that represents a date. It will return from year down to seconds until one
is not found. ie if year, month, and seconds are in the dictionary, only
year and month will be returned, the rest will be returned as min. If none
of the parts are found return an empty tuple.
]
variable[keys] assign[=] list[[<ast.Constant object at 0x7da1b206bd00>, <ast.Constant object at 0x7da1b206ba00>, <ast.Constant object at 0x7da1b206ab30>, <ast.Constant object at 0x7da1b2069270>, <ast.Constant object at 0x7da1b2068b20>, <ast.Constant object at 0x7da1b206baf0>]]
variable[ret_val] assign[=] dictionary[[<ast.Constant object at 0x7da1b206a6b0>, <ast.Constant object at 0x7da1b206b190>, <ast.Constant object at 0x7da1b206a050>, <ast.Constant object at 0x7da1b206b790>, <ast.Constant object at 0x7da1b206a1a0>, <ast.Constant object at 0x7da1b206a590>], [<ast.Constant object at 0x7da1b206bd60>, <ast.Constant object at 0x7da1b2068490>, <ast.Constant object at 0x7da1b2068520>, <ast.Constant object at 0x7da1b2068610>, <ast.Constant object at 0x7da1b206ae30>, <ast.Constant object at 0x7da1b206a2f0>]]
variable[modified] assign[=] constant[False]
for taget[name[key]] in starred[name[keys]] begin[:]
<ast.Try object at 0x7da1b206b9a0>
return[<ast.BoolOp object at 0x7da1b206a3e0>] | keyword[def] identifier[coerce_date_dict] ( identifier[date_dict] ):
literal[string]
identifier[keys] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
identifier[ret_val] ={
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] }
identifier[modified] = keyword[False]
keyword[for] identifier[key] keyword[in] identifier[keys] :
keyword[try] :
identifier[ret_val] [ identifier[key] ]= identifier[int] ( identifier[date_dict] [ identifier[key] ])
identifier[modified] = keyword[True]
keyword[except] identifier[KeyError] :
keyword[break]
keyword[return] identifier[modified] keyword[and] identifier[ret_val] keyword[or] {} | def coerce_date_dict(date_dict):
"""
given a dictionary (presumed to be from request.GET) it returns a tuple
that represents a date. It will return from year down to seconds until one
is not found. ie if year, month, and seconds are in the dictionary, only
year and month will be returned, the rest will be returned as min. If none
of the parts are found return an empty tuple.
"""
keys = ['year', 'month', 'day', 'hour', 'minute', 'second']
ret_val = {'year': 1, 'month': 1, 'day': 1, 'hour': 0, 'minute': 0, 'second': 0}
modified = False
for key in keys:
try:
ret_val[key] = int(date_dict[key])
modified = True # depends on [control=['try'], data=[]]
except KeyError:
break # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['key']]
return modified and ret_val or {} |
def restore_component(self, component_name, save_path):
"""
Restores a component's parameters from a save location.
Args:
component_name: The component to restore.
save_path: The save location.
"""
component = self.get_component(component_name=component_name)
self._validate_savable(component=component, component_name=component_name)
component.restore(sess=self.session, save_path=save_path) | def function[restore_component, parameter[self, component_name, save_path]]:
constant[
Restores a component's parameters from a save location.
Args:
component_name: The component to restore.
save_path: The save location.
]
variable[component] assign[=] call[name[self].get_component, parameter[]]
call[name[self]._validate_savable, parameter[]]
call[name[component].restore, parameter[]] | keyword[def] identifier[restore_component] ( identifier[self] , identifier[component_name] , identifier[save_path] ):
literal[string]
identifier[component] = identifier[self] . identifier[get_component] ( identifier[component_name] = identifier[component_name] )
identifier[self] . identifier[_validate_savable] ( identifier[component] = identifier[component] , identifier[component_name] = identifier[component_name] )
identifier[component] . identifier[restore] ( identifier[sess] = identifier[self] . identifier[session] , identifier[save_path] = identifier[save_path] ) | def restore_component(self, component_name, save_path):
"""
Restores a component's parameters from a save location.
Args:
component_name: The component to restore.
save_path: The save location.
"""
component = self.get_component(component_name=component_name)
self._validate_savable(component=component, component_name=component_name)
component.restore(sess=self.session, save_path=save_path) |
def loads(cls, pickle_string):
"""
Equivalent to pickle.loads except that the HoloViews trees is
restored appropriately.
"""
cls.load_counter_offset = StoreOptions.id_offset()
val = pickle.loads(pickle_string)
cls.load_counter_offset = None
return val | def function[loads, parameter[cls, pickle_string]]:
constant[
Equivalent to pickle.loads except that the HoloViews trees is
restored appropriately.
]
name[cls].load_counter_offset assign[=] call[name[StoreOptions].id_offset, parameter[]]
variable[val] assign[=] call[name[pickle].loads, parameter[name[pickle_string]]]
name[cls].load_counter_offset assign[=] constant[None]
return[name[val]] | keyword[def] identifier[loads] ( identifier[cls] , identifier[pickle_string] ):
literal[string]
identifier[cls] . identifier[load_counter_offset] = identifier[StoreOptions] . identifier[id_offset] ()
identifier[val] = identifier[pickle] . identifier[loads] ( identifier[pickle_string] )
identifier[cls] . identifier[load_counter_offset] = keyword[None]
keyword[return] identifier[val] | def loads(cls, pickle_string):
"""
Equivalent to pickle.loads except that the HoloViews trees is
restored appropriately.
"""
cls.load_counter_offset = StoreOptions.id_offset()
val = pickle.loads(pickle_string)
cls.load_counter_offset = None
return val |
def digest(self, alg='sha256', b64=True, strip=True):
"""return a url-safe hash of the string, optionally (and by default) base64-encoded
alg='sha256' = the hash algorithm, must be in hashlib
b64=True = whether to base64-encode the output
strip=True = whether to strip trailing '=' from the base64 output
Using the default arguments returns a url-safe base64-encoded SHA-256 hash of the string.
Length of the digest with different algorithms, using b64=True and strip=True:
* SHA224 = 38
* SHA256 = 43 (DEFAULT)
* SHA384 = 64
* SHA512 = 86
"""
import base64, hashlib
h = hashlib.new(alg)
h.update(str(self).encode('utf-8'))
if b64 == True:
# this returns a string with a predictable amount of = padding at the end
b = base64.urlsafe_b64encode(h.digest()).decode('ascii')
if strip == True:
b = b.rstrip('=')
return b
else:
return h.hexdigest() | def function[digest, parameter[self, alg, b64, strip]]:
constant[return a url-safe hash of the string, optionally (and by default) base64-encoded
alg='sha256' = the hash algorithm, must be in hashlib
b64=True = whether to base64-encode the output
strip=True = whether to strip trailing '=' from the base64 output
Using the default arguments returns a url-safe base64-encoded SHA-256 hash of the string.
Length of the digest with different algorithms, using b64=True and strip=True:
* SHA224 = 38
* SHA256 = 43 (DEFAULT)
* SHA384 = 64
* SHA512 = 86
]
import module[base64], module[hashlib]
variable[h] assign[=] call[name[hashlib].new, parameter[name[alg]]]
call[name[h].update, parameter[call[call[name[str], parameter[name[self]]].encode, parameter[constant[utf-8]]]]]
if compare[name[b64] equal[==] constant[True]] begin[:]
variable[b] assign[=] call[call[name[base64].urlsafe_b64encode, parameter[call[name[h].digest, parameter[]]]].decode, parameter[constant[ascii]]]
if compare[name[strip] equal[==] constant[True]] begin[:]
variable[b] assign[=] call[name[b].rstrip, parameter[constant[=]]]
return[name[b]] | keyword[def] identifier[digest] ( identifier[self] , identifier[alg] = literal[string] , identifier[b64] = keyword[True] , identifier[strip] = keyword[True] ):
literal[string]
keyword[import] identifier[base64] , identifier[hashlib]
identifier[h] = identifier[hashlib] . identifier[new] ( identifier[alg] )
identifier[h] . identifier[update] ( identifier[str] ( identifier[self] ). identifier[encode] ( literal[string] ))
keyword[if] identifier[b64] == keyword[True] :
identifier[b] = identifier[base64] . identifier[urlsafe_b64encode] ( identifier[h] . identifier[digest] ()). identifier[decode] ( literal[string] )
keyword[if] identifier[strip] == keyword[True] :
identifier[b] = identifier[b] . identifier[rstrip] ( literal[string] )
keyword[return] identifier[b]
keyword[else] :
keyword[return] identifier[h] . identifier[hexdigest] () | def digest(self, alg='sha256', b64=True, strip=True):
"""return a url-safe hash of the string, optionally (and by default) base64-encoded
alg='sha256' = the hash algorithm, must be in hashlib
b64=True = whether to base64-encode the output
strip=True = whether to strip trailing '=' from the base64 output
Using the default arguments returns a url-safe base64-encoded SHA-256 hash of the string.
Length of the digest with different algorithms, using b64=True and strip=True:
* SHA224 = 38
* SHA256 = 43 (DEFAULT)
* SHA384 = 64
* SHA512 = 86
"""
import base64, hashlib
h = hashlib.new(alg)
h.update(str(self).encode('utf-8'))
if b64 == True: # this returns a string with a predictable amount of = padding at the end
b = base64.urlsafe_b64encode(h.digest()).decode('ascii')
if strip == True:
b = b.rstrip('=') # depends on [control=['if'], data=[]]
return b # depends on [control=['if'], data=[]]
else:
return h.hexdigest() |
def refresh(self):
"""
Generate an access token with a client/access token pair. Used
access token is invalidated.
Returns:
dict: Response or error dict
"""
endpoint = '/refresh'
payload = {
'accessToken': self.access_token,
'clientToken': self.client_token,
}
rep = self._ygg_req(endpoint, payload)
if not rep or 'error' in rep:
return False
self.access_token = rep['accessToken']
self.client_token = rep['clientToken']
self.selected_profile = rep['selectedProfile']
return True | def function[refresh, parameter[self]]:
constant[
Generate an access token with a client/access token pair. Used
access token is invalidated.
Returns:
dict: Response or error dict
]
variable[endpoint] assign[=] constant[/refresh]
variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b2852620>, <ast.Constant object at 0x7da1b2853b20>], [<ast.Attribute object at 0x7da1b2853490>, <ast.Attribute object at 0x7da1b28522c0>]]
variable[rep] assign[=] call[name[self]._ygg_req, parameter[name[endpoint], name[payload]]]
if <ast.BoolOp object at 0x7da1b2850100> begin[:]
return[constant[False]]
name[self].access_token assign[=] call[name[rep]][constant[accessToken]]
name[self].client_token assign[=] call[name[rep]][constant[clientToken]]
name[self].selected_profile assign[=] call[name[rep]][constant[selectedProfile]]
return[constant[True]] | keyword[def] identifier[refresh] ( identifier[self] ):
literal[string]
identifier[endpoint] = literal[string]
identifier[payload] ={
literal[string] : identifier[self] . identifier[access_token] ,
literal[string] : identifier[self] . identifier[client_token] ,
}
identifier[rep] = identifier[self] . identifier[_ygg_req] ( identifier[endpoint] , identifier[payload] )
keyword[if] keyword[not] identifier[rep] keyword[or] literal[string] keyword[in] identifier[rep] :
keyword[return] keyword[False]
identifier[self] . identifier[access_token] = identifier[rep] [ literal[string] ]
identifier[self] . identifier[client_token] = identifier[rep] [ literal[string] ]
identifier[self] . identifier[selected_profile] = identifier[rep] [ literal[string] ]
keyword[return] keyword[True] | def refresh(self):
"""
Generate an access token with a client/access token pair. Used
access token is invalidated.
Returns:
dict: Response or error dict
"""
endpoint = '/refresh'
payload = {'accessToken': self.access_token, 'clientToken': self.client_token}
rep = self._ygg_req(endpoint, payload)
if not rep or 'error' in rep:
return False # depends on [control=['if'], data=[]]
self.access_token = rep['accessToken']
self.client_token = rep['clientToken']
self.selected_profile = rep['selectedProfile']
return True |
def closing(image, radius=None, mask=None, footprint = None):
'''Do a morphological closing
image - pixel image to operate on
radius - use a structuring element with the given radius. If no structuring
element, use an 8-connected structuring element.
mask - if present, only use unmasked pixels for operations
'''
dilated_image = grey_dilation(image, radius, mask, footprint)
return grey_erosion(dilated_image, radius, mask, footprint) | def function[closing, parameter[image, radius, mask, footprint]]:
constant[Do a morphological closing
image - pixel image to operate on
radius - use a structuring element with the given radius. If no structuring
element, use an 8-connected structuring element.
mask - if present, only use unmasked pixels for operations
]
variable[dilated_image] assign[=] call[name[grey_dilation], parameter[name[image], name[radius], name[mask], name[footprint]]]
return[call[name[grey_erosion], parameter[name[dilated_image], name[radius], name[mask], name[footprint]]]] | keyword[def] identifier[closing] ( identifier[image] , identifier[radius] = keyword[None] , identifier[mask] = keyword[None] , identifier[footprint] = keyword[None] ):
literal[string]
identifier[dilated_image] = identifier[grey_dilation] ( identifier[image] , identifier[radius] , identifier[mask] , identifier[footprint] )
keyword[return] identifier[grey_erosion] ( identifier[dilated_image] , identifier[radius] , identifier[mask] , identifier[footprint] ) | def closing(image, radius=None, mask=None, footprint=None):
"""Do a morphological closing
image - pixel image to operate on
radius - use a structuring element with the given radius. If no structuring
element, use an 8-connected structuring element.
mask - if present, only use unmasked pixels for operations
"""
dilated_image = grey_dilation(image, radius, mask, footprint)
return grey_erosion(dilated_image, radius, mask, footprint) |
def add_edge(self, source: Node,
target: Node,
weight: float = 1,
save_to_cache: bool = True) -> None:
"""
Adds an edge between the specified nodes of the graph.
Arguments:
source (Node): The source node of the edge to add.
target (Node): The target node of the edge to add.
weight (float): The weight of the edge.
save_to_cache (bool): Whether the edge should be saved to the local database. This
argument is necessary (and `False`) when we load edges from
the local cache.
"""
if self._edges.get_edge(source, target) is not None:
return
self._edges.add_edge(
source=source,
target=target,
weight=weight,
save_to_cache=save_to_cache
) | def function[add_edge, parameter[self, source, target, weight, save_to_cache]]:
constant[
Adds an edge between the specified nodes of the graph.
Arguments:
source (Node): The source node of the edge to add.
target (Node): The target node of the edge to add.
weight (float): The weight of the edge.
save_to_cache (bool): Whether the edge should be saved to the local database. This
argument is necessary (and `False`) when we load edges from
the local cache.
]
if compare[call[name[self]._edges.get_edge, parameter[name[source], name[target]]] is_not constant[None]] begin[:]
return[None]
call[name[self]._edges.add_edge, parameter[]] | keyword[def] identifier[add_edge] ( identifier[self] , identifier[source] : identifier[Node] ,
identifier[target] : identifier[Node] ,
identifier[weight] : identifier[float] = literal[int] ,
identifier[save_to_cache] : identifier[bool] = keyword[True] )-> keyword[None] :
literal[string]
keyword[if] identifier[self] . identifier[_edges] . identifier[get_edge] ( identifier[source] , identifier[target] ) keyword[is] keyword[not] keyword[None] :
keyword[return]
identifier[self] . identifier[_edges] . identifier[add_edge] (
identifier[source] = identifier[source] ,
identifier[target] = identifier[target] ,
identifier[weight] = identifier[weight] ,
identifier[save_to_cache] = identifier[save_to_cache]
) | def add_edge(self, source: Node, target: Node, weight: float=1, save_to_cache: bool=True) -> None:
"""
Adds an edge between the specified nodes of the graph.
Arguments:
source (Node): The source node of the edge to add.
target (Node): The target node of the edge to add.
weight (float): The weight of the edge.
save_to_cache (bool): Whether the edge should be saved to the local database. This
argument is necessary (and `False`) when we load edges from
the local cache.
"""
if self._edges.get_edge(source, target) is not None:
return # depends on [control=['if'], data=[]]
self._edges.add_edge(source=source, target=target, weight=weight, save_to_cache=save_to_cache) |
def check_auth(args, role=None):
"""Check the user authentication."""
users = boto3.resource("dynamodb").Table(os.environ['people'])
if not (args.get('email', None) and args.get('api_key', None)):
mesg = "Invalid request: `email` and `api_key` are required"
return {'success': False, 'message': mesg}
user = users.get_item(Key={'email': args.get('email')})
if 'Item' not in user:
return {'success': False, 'message': 'User does not exist.'}
user = user['Item']
if user['api_key'] != args['api_key']:
return {'success': False, 'message': 'API key was invalid.'}
if role:
if user['role'] not in role:
mesg = 'User is not authorized to make this change.'
return {'success': False, 'message': mesg}
return {'success': True, 'message': None, 'user': user} | def function[check_auth, parameter[args, role]]:
constant[Check the user authentication.]
variable[users] assign[=] call[call[name[boto3].resource, parameter[constant[dynamodb]]].Table, parameter[call[name[os].environ][constant[people]]]]
if <ast.UnaryOp object at 0x7da1b287b970> begin[:]
variable[mesg] assign[=] constant[Invalid request: `email` and `api_key` are required]
return[dictionary[[<ast.Constant object at 0x7da1b287ad10>, <ast.Constant object at 0x7da1b287aec0>], [<ast.Constant object at 0x7da1b28798a0>, <ast.Name object at 0x7da1b28797e0>]]]
variable[user] assign[=] call[name[users].get_item, parameter[]]
if compare[constant[Item] <ast.NotIn object at 0x7da2590d7190> name[user]] begin[:]
return[dictionary[[<ast.Constant object at 0x7da1b287b8e0>, <ast.Constant object at 0x7da1b28784c0>], [<ast.Constant object at 0x7da1b287afe0>, <ast.Constant object at 0x7da1b287a170>]]]
variable[user] assign[=] call[name[user]][constant[Item]]
if compare[call[name[user]][constant[api_key]] not_equal[!=] call[name[args]][constant[api_key]]] begin[:]
return[dictionary[[<ast.Constant object at 0x7da1b287bac0>, <ast.Constant object at 0x7da1b287b9a0>], [<ast.Constant object at 0x7da1b287b160>, <ast.Constant object at 0x7da1b2878ca0>]]]
if name[role] begin[:]
if compare[call[name[user]][constant[role]] <ast.NotIn object at 0x7da2590d7190> name[role]] begin[:]
variable[mesg] assign[=] constant[User is not authorized to make this change.]
return[dictionary[[<ast.Constant object at 0x7da1b287a020>, <ast.Constant object at 0x7da1b28786a0>], [<ast.Constant object at 0x7da1b2879c30>, <ast.Name object at 0x7da1b2879e70>]]]
return[dictionary[[<ast.Constant object at 0x7da1b287a560>, <ast.Constant object at 0x7da1b2878e80>, <ast.Constant object at 0x7da1b287a0e0>], [<ast.Constant object at 0x7da1b287a740>, <ast.Constant object at 0x7da1b2878f40>, <ast.Name object at 0x7da1b287b850>]]] | keyword[def] identifier[check_auth] ( identifier[args] , identifier[role] = keyword[None] ):
literal[string]
identifier[users] = identifier[boto3] . identifier[resource] ( literal[string] ). identifier[Table] ( identifier[os] . identifier[environ] [ literal[string] ])
keyword[if] keyword[not] ( identifier[args] . identifier[get] ( literal[string] , keyword[None] ) keyword[and] identifier[args] . identifier[get] ( literal[string] , keyword[None] )):
identifier[mesg] = literal[string]
keyword[return] { literal[string] : keyword[False] , literal[string] : identifier[mesg] }
identifier[user] = identifier[users] . identifier[get_item] ( identifier[Key] ={ literal[string] : identifier[args] . identifier[get] ( literal[string] )})
keyword[if] literal[string] keyword[not] keyword[in] identifier[user] :
keyword[return] { literal[string] : keyword[False] , literal[string] : literal[string] }
identifier[user] = identifier[user] [ literal[string] ]
keyword[if] identifier[user] [ literal[string] ]!= identifier[args] [ literal[string] ]:
keyword[return] { literal[string] : keyword[False] , literal[string] : literal[string] }
keyword[if] identifier[role] :
keyword[if] identifier[user] [ literal[string] ] keyword[not] keyword[in] identifier[role] :
identifier[mesg] = literal[string]
keyword[return] { literal[string] : keyword[False] , literal[string] : identifier[mesg] }
keyword[return] { literal[string] : keyword[True] , literal[string] : keyword[None] , literal[string] : identifier[user] } | def check_auth(args, role=None):
"""Check the user authentication."""
users = boto3.resource('dynamodb').Table(os.environ['people'])
if not (args.get('email', None) and args.get('api_key', None)):
mesg = 'Invalid request: `email` and `api_key` are required'
return {'success': False, 'message': mesg} # depends on [control=['if'], data=[]]
user = users.get_item(Key={'email': args.get('email')})
if 'Item' not in user:
return {'success': False, 'message': 'User does not exist.'} # depends on [control=['if'], data=[]]
user = user['Item']
if user['api_key'] != args['api_key']:
return {'success': False, 'message': 'API key was invalid.'} # depends on [control=['if'], data=[]]
if role:
if user['role'] not in role:
mesg = 'User is not authorized to make this change.'
return {'success': False, 'message': mesg} # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return {'success': True, 'message': None, 'user': user} |
def _parse_columns(self):
''' a helper method for parsing the column properties from the record schema '''
# construct column list
column_map = {}
for key, value in self.model.keyMap.items():
record_key = key[1:]
if record_key:
if self.item_key.findall(record_key):
pass
else:
if value['value_datatype'] == 'map':
continue
datatype = value['value_datatype']
if value['value_datatype'] == 'number':
datatype = 'float'
if 'integer_data' in value.keys():
if value['integer_data']:
datatype = 'integer'
replace_key = ''
if 'field_metadata' in value.keys():
if 'replace_key' in value['field_metadata'].keys():
if isinstance(value['field_metadata']['replace_key'], str):
replace_key = value['field_metadata']['replace_key']
max_length = None
if 'max_length' in value.keys():
max_length = value['max_length']
column_map[record_key] = (record_key, datatype, replace_key, max_length)
return column_map | def function[_parse_columns, parameter[self]]:
constant[ a helper method for parsing the column properties from the record schema ]
variable[column_map] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da20cabf190>, <ast.Name object at 0x7da20cabfb80>]]] in starred[call[name[self].model.keyMap.items, parameter[]]] begin[:]
variable[record_key] assign[=] call[name[key]][<ast.Slice object at 0x7da20cabc7f0>]
if name[record_key] begin[:]
if call[name[self].item_key.findall, parameter[name[record_key]]] begin[:]
pass
return[name[column_map]] | keyword[def] identifier[_parse_columns] ( identifier[self] ):
literal[string]
identifier[column_map] ={}
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[self] . identifier[model] . identifier[keyMap] . identifier[items] ():
identifier[record_key] = identifier[key] [ literal[int] :]
keyword[if] identifier[record_key] :
keyword[if] identifier[self] . identifier[item_key] . identifier[findall] ( identifier[record_key] ):
keyword[pass]
keyword[else] :
keyword[if] identifier[value] [ literal[string] ]== literal[string] :
keyword[continue]
identifier[datatype] = identifier[value] [ literal[string] ]
keyword[if] identifier[value] [ literal[string] ]== literal[string] :
identifier[datatype] = literal[string]
keyword[if] literal[string] keyword[in] identifier[value] . identifier[keys] ():
keyword[if] identifier[value] [ literal[string] ]:
identifier[datatype] = literal[string]
identifier[replace_key] = literal[string]
keyword[if] literal[string] keyword[in] identifier[value] . identifier[keys] ():
keyword[if] literal[string] keyword[in] identifier[value] [ literal[string] ]. identifier[keys] ():
keyword[if] identifier[isinstance] ( identifier[value] [ literal[string] ][ literal[string] ], identifier[str] ):
identifier[replace_key] = identifier[value] [ literal[string] ][ literal[string] ]
identifier[max_length] = keyword[None]
keyword[if] literal[string] keyword[in] identifier[value] . identifier[keys] ():
identifier[max_length] = identifier[value] [ literal[string] ]
identifier[column_map] [ identifier[record_key] ]=( identifier[record_key] , identifier[datatype] , identifier[replace_key] , identifier[max_length] )
keyword[return] identifier[column_map] | def _parse_columns(self):
""" a helper method for parsing the column properties from the record schema """
# construct column list
column_map = {}
for (key, value) in self.model.keyMap.items():
record_key = key[1:]
if record_key:
if self.item_key.findall(record_key):
pass # depends on [control=['if'], data=[]]
else:
if value['value_datatype'] == 'map':
continue # depends on [control=['if'], data=[]]
datatype = value['value_datatype']
if value['value_datatype'] == 'number':
datatype = 'float'
if 'integer_data' in value.keys():
if value['integer_data']:
datatype = 'integer' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
replace_key = ''
if 'field_metadata' in value.keys():
if 'replace_key' in value['field_metadata'].keys():
if isinstance(value['field_metadata']['replace_key'], str):
replace_key = value['field_metadata']['replace_key'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
max_length = None
if 'max_length' in value.keys():
max_length = value['max_length'] # depends on [control=['if'], data=[]]
column_map[record_key] = (record_key, datatype, replace_key, max_length) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return column_map |
def camel_to_snake_case(name):
"""Takes a camelCased string and converts to snake_case."""
pattern = r'[A-Z][a-z]+|[A-Z]+(?![a-z])'
return '_'.join(map(str.lower, re.findall(pattern, name))) | def function[camel_to_snake_case, parameter[name]]:
constant[Takes a camelCased string and converts to snake_case.]
variable[pattern] assign[=] constant[[A-Z][a-z]+|[A-Z]+(?![a-z])]
return[call[constant[_].join, parameter[call[name[map], parameter[name[str].lower, call[name[re].findall, parameter[name[pattern], name[name]]]]]]]] | keyword[def] identifier[camel_to_snake_case] ( identifier[name] ):
literal[string]
identifier[pattern] = literal[string]
keyword[return] literal[string] . identifier[join] ( identifier[map] ( identifier[str] . identifier[lower] , identifier[re] . identifier[findall] ( identifier[pattern] , identifier[name] ))) | def camel_to_snake_case(name):
"""Takes a camelCased string and converts to snake_case."""
pattern = '[A-Z][a-z]+|[A-Z]+(?![a-z])'
return '_'.join(map(str.lower, re.findall(pattern, name))) |
def pprint(d, lvlindent=2, initindent=0, delim=':',
max_width=80, depth=3, no_values=False,
align_vals=True, print_func=None,
keycolor=None, compress_lists=None,
round_floats=None, _dlist=False):
""" print a nested dict in readable format
(- denotes an element in a list of dictionaries)
Parameters
----------
d : object
lvlindent : int
additional indentation spaces for each level
initindent : int
initial indentation spaces
delim : str
delimiter between key and value nodes
max_width : int
max character width of each line
depth : int or None
maximum levels to display
no_values : bool
whether to print values
align_vals : bool
whether to align values for each level
print_func : callable or None
function to print strings (print if None)
keycolor : None or str
if str, color keys by this color,
allowed: red, green, yellow, blue, magenta, cyan, white
compress_lists : int
compress lists/tuples longer than this,
e.g. [1,1,1,1,1,1] -> [1, 1,..., 1]
round_floats : int
significant figures for floats
Examples
--------
>>> d = {'a':{'b':{'c':'Å','de':[4,5,[7,'x'],9]}}}
>>> pprint(d,depth=None)
a:
b:
c: Å
de: [4, 5, [7, x], 9]
>>> pprint(d,max_width=17,depth=None)
a:
b:
c: Å
de: [4, 5,
[7, x],
9]
>>> pprint(d,no_values=True,depth=None)
a:
b:
c:
de:
>>> pprint(d,depth=2)
a:
b: {...}
>>> pprint({'a':[1,1,1,1,1,1,1,1]},
... compress_lists=3)
a: [1, 1, 1, ...(x5)]
"""
if print_func is None:
print_func = _default_print_func
if not is_dict_like(d):
d = {'': d}
# print_func('{}'.format(d))
# return
extra = lvlindent if _dlist else 0
def decode_to_str(obj):
val_string = obj
if isinstance(obj, list):
if compress_lists is not None:
if len(obj) > compress_lists:
diff = str(len(obj) - compress_lists)
obj = obj[:compress_lists] + ['...(x{})'.format(diff)]
val_string = '[' + ', '.join([decode_to_str(o) for o in obj]) + ']'
elif isinstance(obj, tuple):
if compress_lists is not None:
if len(obj) > compress_lists:
diff = str(len(obj) - compress_lists)
obj = list(
obj[:compress_lists]) + ['...(x{})'.format(diff)]
val_string = '(' + ', '.join([decode_to_str(o) for o in obj]) + ')'
elif isinstance(obj, float) and round_floats is not None:
round_str = '{0:.' + str(round_floats - 1) + 'E}'
val_string = str(float(round_str.format(obj)))
else:
try:
val_string = encode(obj, outtype='str')
except (TypeError, UnicodeError):
pass
# convert unicode to str (so no u'' prefix in python 2)
try:
return str(val_string)
except Exception:
return unicode(val_string)
if align_vals:
key_width = 0
for key, val in d.items():
if not is_dict_like(val):
key_str = decode_to_str(key)
key_width = max(key_width, len(key_str))
max_depth = depth
for i, key in enumerate(natural_sort(d.keys())):
value = d[key]
if _dlist and i == 0:
key_str = '- ' + decode_to_str(key)
elif _dlist:
key_str = ' ' + decode_to_str(key)
else:
key_str = decode_to_str(key)
if keycolor is not None:
key_str = colortxt(key_str, keycolor)
if align_vals:
key_str = '{0: <{1}} '.format(
key_str + delim, key_width + len(delim))
else:
key_str = '{0}{1} '.format(key_str, delim)
depth = max_depth if max_depth is not None else 2
if keycolor is not None:
key_length = len(_strip_ansi(key_str))
else:
key_length = len(key_str)
key_line = ' ' * initindent + key_str
new_line = ' ' * initindent + ' ' * key_length
if depth <= 0:
continue
if is_dict_like(value):
if depth <= 1:
print_func(' ' * initindent + key_str + '{...}')
else:
print_func(' ' * initindent + key_str)
pprint(value, lvlindent, initindent + lvlindent + extra, delim,
max_width,
depth=max_depth - 1 if max_depth is not None else None,
no_values=no_values, align_vals=align_vals,
print_func=print_func, keycolor=keycolor,
compress_lists=compress_lists,
round_floats=round_floats)
continue
if isinstance(value, list):
if all([is_dict_like(o) for o in value]) and value:
if depth <= 1:
print_func(key_line + '[...]')
continue
print_func(key_line)
for obj in value:
pprint(
obj, lvlindent, initindent + lvlindent + extra, delim,
max_width,
depth=max_depth - 1 if max_depth is not None else None,
no_values=no_values, align_vals=align_vals,
print_func=print_func, keycolor=keycolor,
compress_lists=compress_lists,
round_floats=round_floats, _dlist=True)
continue
val_string_all = decode_to_str(value) if not no_values else ''
for i, val_string in enumerate(val_string_all.split('\n')):
if max_width is not None:
if len(key_line) + 1 > max_width:
raise Exception(
'cannot fit keys and data within set max_width')
# divide into chuncks and join by same indentation
val_indent = ' ' * (initindent + key_length)
n = max_width - len(val_indent)
val_string = val_indent.join(
[s + '\n' for s in textwrap.wrap(val_string, n)])[:-1]
if i == 0:
print_func(key_line + val_string)
else:
print_func(new_line + val_string) | def function[pprint, parameter[d, lvlindent, initindent, delim, max_width, depth, no_values, align_vals, print_func, keycolor, compress_lists, round_floats, _dlist]]:
constant[ print a nested dict in readable format
(- denotes an element in a list of dictionaries)
Parameters
----------
d : object
lvlindent : int
additional indentation spaces for each level
initindent : int
initial indentation spaces
delim : str
delimiter between key and value nodes
max_width : int
max character width of each line
depth : int or None
maximum levels to display
no_values : bool
whether to print values
align_vals : bool
whether to align values for each level
print_func : callable or None
function to print strings (print if None)
keycolor : None or str
if str, color keys by this color,
allowed: red, green, yellow, blue, magenta, cyan, white
compress_lists : int
compress lists/tuples longer than this,
e.g. [1,1,1,1,1,1] -> [1, 1,..., 1]
round_floats : int
significant figures for floats
Examples
--------
>>> d = {'a':{'b':{'c':'Å','de':[4,5,[7,'x'],9]}}}
>>> pprint(d,depth=None)
a:
b:
c: Å
de: [4, 5, [7, x], 9]
>>> pprint(d,max_width=17,depth=None)
a:
b:
c: Å
de: [4, 5,
[7, x],
9]
>>> pprint(d,no_values=True,depth=None)
a:
b:
c:
de:
>>> pprint(d,depth=2)
a:
b: {...}
>>> pprint({'a':[1,1,1,1,1,1,1,1]},
... compress_lists=3)
a: [1, 1, 1, ...(x5)]
]
if compare[name[print_func] is constant[None]] begin[:]
variable[print_func] assign[=] name[_default_print_func]
if <ast.UnaryOp object at 0x7da20e9b1000> begin[:]
variable[d] assign[=] dictionary[[<ast.Constant object at 0x7da20e9b3c10>], [<ast.Name object at 0x7da20e9b24d0>]]
variable[extra] assign[=] <ast.IfExp object at 0x7da20e9b1a50>
def function[decode_to_str, parameter[obj]]:
variable[val_string] assign[=] name[obj]
if call[name[isinstance], parameter[name[obj], name[list]]] begin[:]
if compare[name[compress_lists] is_not constant[None]] begin[:]
if compare[call[name[len], parameter[name[obj]]] greater[>] name[compress_lists]] begin[:]
variable[diff] assign[=] call[name[str], parameter[binary_operation[call[name[len], parameter[name[obj]]] - name[compress_lists]]]]
variable[obj] assign[=] binary_operation[call[name[obj]][<ast.Slice object at 0x7da20e9b3af0>] + list[[<ast.Call object at 0x7da20e9b3220>]]]
variable[val_string] assign[=] binary_operation[binary_operation[constant[[] + call[constant[, ].join, parameter[<ast.ListComp object at 0x7da20e9b17e0>]]] + constant[]]]
<ast.Try object at 0x7da20e9b0040>
if name[align_vals] begin[:]
variable[key_width] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da18dc99a80>, <ast.Name object at 0x7da18dc98a60>]]] in starred[call[name[d].items, parameter[]]] begin[:]
if <ast.UnaryOp object at 0x7da18dc9b6a0> begin[:]
variable[key_str] assign[=] call[name[decode_to_str], parameter[name[key]]]
variable[key_width] assign[=] call[name[max], parameter[name[key_width], call[name[len], parameter[name[key_str]]]]]
variable[max_depth] assign[=] name[depth]
for taget[tuple[[<ast.Name object at 0x7da18dc99c90>, <ast.Name object at 0x7da18dc9a1a0>]]] in starred[call[name[enumerate], parameter[call[name[natural_sort], parameter[call[name[d].keys, parameter[]]]]]]] begin[:]
variable[value] assign[=] call[name[d]][name[key]]
if <ast.BoolOp object at 0x7da18dc988e0> begin[:]
variable[key_str] assign[=] binary_operation[constant[- ] + call[name[decode_to_str], parameter[name[key]]]]
if compare[name[keycolor] is_not constant[None]] begin[:]
variable[key_str] assign[=] call[name[colortxt], parameter[name[key_str], name[keycolor]]]
if name[align_vals] begin[:]
variable[key_str] assign[=] call[constant[{0: <{1}} ].format, parameter[binary_operation[name[key_str] + name[delim]], binary_operation[name[key_width] + call[name[len], parameter[name[delim]]]]]]
variable[depth] assign[=] <ast.IfExp object at 0x7da18dc9b640>
if compare[name[keycolor] is_not constant[None]] begin[:]
variable[key_length] assign[=] call[name[len], parameter[call[name[_strip_ansi], parameter[name[key_str]]]]]
variable[key_line] assign[=] binary_operation[binary_operation[constant[ ] * name[initindent]] + name[key_str]]
variable[new_line] assign[=] binary_operation[binary_operation[constant[ ] * name[initindent]] + binary_operation[constant[ ] * name[key_length]]]
if compare[name[depth] less_or_equal[<=] constant[0]] begin[:]
continue
if call[name[is_dict_like], parameter[name[value]]] begin[:]
if compare[name[depth] less_or_equal[<=] constant[1]] begin[:]
call[name[print_func], parameter[binary_operation[binary_operation[binary_operation[constant[ ] * name[initindent]] + name[key_str]] + constant[{...}]]]]
continue
if call[name[isinstance], parameter[name[value], name[list]]] begin[:]
if <ast.BoolOp object at 0x7da18dc98400> begin[:]
if compare[name[depth] less_or_equal[<=] constant[1]] begin[:]
call[name[print_func], parameter[binary_operation[name[key_line] + constant[[...]]]]]
continue
call[name[print_func], parameter[name[key_line]]]
for taget[name[obj]] in starred[name[value]] begin[:]
call[name[pprint], parameter[name[obj], name[lvlindent], binary_operation[binary_operation[name[initindent] + name[lvlindent]] + name[extra]], name[delim], name[max_width]]]
continue
variable[val_string_all] assign[=] <ast.IfExp object at 0x7da20c990ee0>
for taget[tuple[[<ast.Name object at 0x7da20c9902b0>, <ast.Name object at 0x7da20c991840>]]] in starred[call[name[enumerate], parameter[call[name[val_string_all].split, parameter[constant[
]]]]]] begin[:]
if compare[name[max_width] is_not constant[None]] begin[:]
if compare[binary_operation[call[name[len], parameter[name[key_line]]] + constant[1]] greater[>] name[max_width]] begin[:]
<ast.Raise object at 0x7da20c992ec0>
variable[val_indent] assign[=] binary_operation[constant[ ] * binary_operation[name[initindent] + name[key_length]]]
variable[n] assign[=] binary_operation[name[max_width] - call[name[len], parameter[name[val_indent]]]]
variable[val_string] assign[=] call[call[name[val_indent].join, parameter[<ast.ListComp object at 0x7da20c992ef0>]]][<ast.Slice object at 0x7da20c993280>]
if compare[name[i] equal[==] constant[0]] begin[:]
call[name[print_func], parameter[binary_operation[name[key_line] + name[val_string]]]] | keyword[def] identifier[pprint] ( identifier[d] , identifier[lvlindent] = literal[int] , identifier[initindent] = literal[int] , identifier[delim] = literal[string] ,
identifier[max_width] = literal[int] , identifier[depth] = literal[int] , identifier[no_values] = keyword[False] ,
identifier[align_vals] = keyword[True] , identifier[print_func] = keyword[None] ,
identifier[keycolor] = keyword[None] , identifier[compress_lists] = keyword[None] ,
identifier[round_floats] = keyword[None] , identifier[_dlist] = keyword[False] ):
literal[string]
keyword[if] identifier[print_func] keyword[is] keyword[None] :
identifier[print_func] = identifier[_default_print_func]
keyword[if] keyword[not] identifier[is_dict_like] ( identifier[d] ):
identifier[d] ={ literal[string] : identifier[d] }
identifier[extra] = identifier[lvlindent] keyword[if] identifier[_dlist] keyword[else] literal[int]
keyword[def] identifier[decode_to_str] ( identifier[obj] ):
identifier[val_string] = identifier[obj]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[list] ):
keyword[if] identifier[compress_lists] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[len] ( identifier[obj] )> identifier[compress_lists] :
identifier[diff] = identifier[str] ( identifier[len] ( identifier[obj] )- identifier[compress_lists] )
identifier[obj] = identifier[obj] [: identifier[compress_lists] ]+[ literal[string] . identifier[format] ( identifier[diff] )]
identifier[val_string] = literal[string] + literal[string] . identifier[join] ([ identifier[decode_to_str] ( identifier[o] ) keyword[for] identifier[o] keyword[in] identifier[obj] ])+ literal[string]
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[tuple] ):
keyword[if] identifier[compress_lists] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[len] ( identifier[obj] )> identifier[compress_lists] :
identifier[diff] = identifier[str] ( identifier[len] ( identifier[obj] )- identifier[compress_lists] )
identifier[obj] = identifier[list] (
identifier[obj] [: identifier[compress_lists] ])+[ literal[string] . identifier[format] ( identifier[diff] )]
identifier[val_string] = literal[string] + literal[string] . identifier[join] ([ identifier[decode_to_str] ( identifier[o] ) keyword[for] identifier[o] keyword[in] identifier[obj] ])+ literal[string]
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[float] ) keyword[and] identifier[round_floats] keyword[is] keyword[not] keyword[None] :
identifier[round_str] = literal[string] + identifier[str] ( identifier[round_floats] - literal[int] )+ literal[string]
identifier[val_string] = identifier[str] ( identifier[float] ( identifier[round_str] . identifier[format] ( identifier[obj] )))
keyword[else] :
keyword[try] :
identifier[val_string] = identifier[encode] ( identifier[obj] , identifier[outtype] = literal[string] )
keyword[except] ( identifier[TypeError] , identifier[UnicodeError] ):
keyword[pass]
keyword[try] :
keyword[return] identifier[str] ( identifier[val_string] )
keyword[except] identifier[Exception] :
keyword[return] identifier[unicode] ( identifier[val_string] )
keyword[if] identifier[align_vals] :
identifier[key_width] = literal[int]
keyword[for] identifier[key] , identifier[val] keyword[in] identifier[d] . identifier[items] ():
keyword[if] keyword[not] identifier[is_dict_like] ( identifier[val] ):
identifier[key_str] = identifier[decode_to_str] ( identifier[key] )
identifier[key_width] = identifier[max] ( identifier[key_width] , identifier[len] ( identifier[key_str] ))
identifier[max_depth] = identifier[depth]
keyword[for] identifier[i] , identifier[key] keyword[in] identifier[enumerate] ( identifier[natural_sort] ( identifier[d] . identifier[keys] ())):
identifier[value] = identifier[d] [ identifier[key] ]
keyword[if] identifier[_dlist] keyword[and] identifier[i] == literal[int] :
identifier[key_str] = literal[string] + identifier[decode_to_str] ( identifier[key] )
keyword[elif] identifier[_dlist] :
identifier[key_str] = literal[string] + identifier[decode_to_str] ( identifier[key] )
keyword[else] :
identifier[key_str] = identifier[decode_to_str] ( identifier[key] )
keyword[if] identifier[keycolor] keyword[is] keyword[not] keyword[None] :
identifier[key_str] = identifier[colortxt] ( identifier[key_str] , identifier[keycolor] )
keyword[if] identifier[align_vals] :
identifier[key_str] = literal[string] . identifier[format] (
identifier[key_str] + identifier[delim] , identifier[key_width] + identifier[len] ( identifier[delim] ))
keyword[else] :
identifier[key_str] = literal[string] . identifier[format] ( identifier[key_str] , identifier[delim] )
identifier[depth] = identifier[max_depth] keyword[if] identifier[max_depth] keyword[is] keyword[not] keyword[None] keyword[else] literal[int]
keyword[if] identifier[keycolor] keyword[is] keyword[not] keyword[None] :
identifier[key_length] = identifier[len] ( identifier[_strip_ansi] ( identifier[key_str] ))
keyword[else] :
identifier[key_length] = identifier[len] ( identifier[key_str] )
identifier[key_line] = literal[string] * identifier[initindent] + identifier[key_str]
identifier[new_line] = literal[string] * identifier[initindent] + literal[string] * identifier[key_length]
keyword[if] identifier[depth] <= literal[int] :
keyword[continue]
keyword[if] identifier[is_dict_like] ( identifier[value] ):
keyword[if] identifier[depth] <= literal[int] :
identifier[print_func] ( literal[string] * identifier[initindent] + identifier[key_str] + literal[string] )
keyword[else] :
identifier[print_func] ( literal[string] * identifier[initindent] + identifier[key_str] )
identifier[pprint] ( identifier[value] , identifier[lvlindent] , identifier[initindent] + identifier[lvlindent] + identifier[extra] , identifier[delim] ,
identifier[max_width] ,
identifier[depth] = identifier[max_depth] - literal[int] keyword[if] identifier[max_depth] keyword[is] keyword[not] keyword[None] keyword[else] keyword[None] ,
identifier[no_values] = identifier[no_values] , identifier[align_vals] = identifier[align_vals] ,
identifier[print_func] = identifier[print_func] , identifier[keycolor] = identifier[keycolor] ,
identifier[compress_lists] = identifier[compress_lists] ,
identifier[round_floats] = identifier[round_floats] )
keyword[continue]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[list] ):
keyword[if] identifier[all] ([ identifier[is_dict_like] ( identifier[o] ) keyword[for] identifier[o] keyword[in] identifier[value] ]) keyword[and] identifier[value] :
keyword[if] identifier[depth] <= literal[int] :
identifier[print_func] ( identifier[key_line] + literal[string] )
keyword[continue]
identifier[print_func] ( identifier[key_line] )
keyword[for] identifier[obj] keyword[in] identifier[value] :
identifier[pprint] (
identifier[obj] , identifier[lvlindent] , identifier[initindent] + identifier[lvlindent] + identifier[extra] , identifier[delim] ,
identifier[max_width] ,
identifier[depth] = identifier[max_depth] - literal[int] keyword[if] identifier[max_depth] keyword[is] keyword[not] keyword[None] keyword[else] keyword[None] ,
identifier[no_values] = identifier[no_values] , identifier[align_vals] = identifier[align_vals] ,
identifier[print_func] = identifier[print_func] , identifier[keycolor] = identifier[keycolor] ,
identifier[compress_lists] = identifier[compress_lists] ,
identifier[round_floats] = identifier[round_floats] , identifier[_dlist] = keyword[True] )
keyword[continue]
identifier[val_string_all] = identifier[decode_to_str] ( identifier[value] ) keyword[if] keyword[not] identifier[no_values] keyword[else] literal[string]
keyword[for] identifier[i] , identifier[val_string] keyword[in] identifier[enumerate] ( identifier[val_string_all] . identifier[split] ( literal[string] )):
keyword[if] identifier[max_width] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[len] ( identifier[key_line] )+ literal[int] > identifier[max_width] :
keyword[raise] identifier[Exception] (
literal[string] )
identifier[val_indent] = literal[string] *( identifier[initindent] + identifier[key_length] )
identifier[n] = identifier[max_width] - identifier[len] ( identifier[val_indent] )
identifier[val_string] = identifier[val_indent] . identifier[join] (
[ identifier[s] + literal[string] keyword[for] identifier[s] keyword[in] identifier[textwrap] . identifier[wrap] ( identifier[val_string] , identifier[n] )])[:- literal[int] ]
keyword[if] identifier[i] == literal[int] :
identifier[print_func] ( identifier[key_line] + identifier[val_string] )
keyword[else] :
identifier[print_func] ( identifier[new_line] + identifier[val_string] ) | def pprint(d, lvlindent=2, initindent=0, delim=':', max_width=80, depth=3, no_values=False, align_vals=True, print_func=None, keycolor=None, compress_lists=None, round_floats=None, _dlist=False):
""" print a nested dict in readable format
(- denotes an element in a list of dictionaries)
Parameters
----------
d : object
lvlindent : int
additional indentation spaces for each level
initindent : int
initial indentation spaces
delim : str
delimiter between key and value nodes
max_width : int
max character width of each line
depth : int or None
maximum levels to display
no_values : bool
whether to print values
align_vals : bool
whether to align values for each level
print_func : callable or None
function to print strings (print if None)
keycolor : None or str
if str, color keys by this color,
allowed: red, green, yellow, blue, magenta, cyan, white
compress_lists : int
compress lists/tuples longer than this,
e.g. [1,1,1,1,1,1] -> [1, 1,..., 1]
round_floats : int
significant figures for floats
Examples
--------
>>> d = {'a':{'b':{'c':'Å','de':[4,5,[7,'x'],9]}}}
>>> pprint(d,depth=None)
a:
b:
c: Å
de: [4, 5, [7, x], 9]
>>> pprint(d,max_width=17,depth=None)
a:
b:
c: Å
de: [4, 5,
[7, x],
9]
>>> pprint(d,no_values=True,depth=None)
a:
b:
c:
de:
>>> pprint(d,depth=2)
a:
b: {...}
>>> pprint({'a':[1,1,1,1,1,1,1,1]},
... compress_lists=3)
a: [1, 1, 1, ...(x5)]
"""
if print_func is None:
print_func = _default_print_func # depends on [control=['if'], data=['print_func']]
if not is_dict_like(d):
d = {'': d} # depends on [control=['if'], data=[]]
# print_func('{}'.format(d))
# return
extra = lvlindent if _dlist else 0
def decode_to_str(obj):
val_string = obj
if isinstance(obj, list):
if compress_lists is not None:
if len(obj) > compress_lists:
diff = str(len(obj) - compress_lists)
obj = obj[:compress_lists] + ['...(x{})'.format(diff)] # depends on [control=['if'], data=['compress_lists']] # depends on [control=['if'], data=['compress_lists']]
val_string = '[' + ', '.join([decode_to_str(o) for o in obj]) + ']' # depends on [control=['if'], data=[]]
elif isinstance(obj, tuple):
if compress_lists is not None:
if len(obj) > compress_lists:
diff = str(len(obj) - compress_lists)
obj = list(obj[:compress_lists]) + ['...(x{})'.format(diff)] # depends on [control=['if'], data=['compress_lists']] # depends on [control=['if'], data=['compress_lists']]
val_string = '(' + ', '.join([decode_to_str(o) for o in obj]) + ')' # depends on [control=['if'], data=[]]
elif isinstance(obj, float) and round_floats is not None:
round_str = '{0:.' + str(round_floats - 1) + 'E}'
val_string = str(float(round_str.format(obj))) # depends on [control=['if'], data=[]]
else:
try:
val_string = encode(obj, outtype='str') # depends on [control=['try'], data=[]]
except (TypeError, UnicodeError):
pass # depends on [control=['except'], data=[]]
# convert unicode to str (so no u'' prefix in python 2)
try:
return str(val_string) # depends on [control=['try'], data=[]]
except Exception:
return unicode(val_string) # depends on [control=['except'], data=[]]
if align_vals:
key_width = 0
for (key, val) in d.items():
if not is_dict_like(val):
key_str = decode_to_str(key)
key_width = max(key_width, len(key_str)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
max_depth = depth
for (i, key) in enumerate(natural_sort(d.keys())):
value = d[key]
if _dlist and i == 0:
key_str = '- ' + decode_to_str(key) # depends on [control=['if'], data=[]]
elif _dlist:
key_str = ' ' + decode_to_str(key) # depends on [control=['if'], data=[]]
else:
key_str = decode_to_str(key)
if keycolor is not None:
key_str = colortxt(key_str, keycolor) # depends on [control=['if'], data=['keycolor']]
if align_vals:
key_str = '{0: <{1}} '.format(key_str + delim, key_width + len(delim)) # depends on [control=['if'], data=[]]
else:
key_str = '{0}{1} '.format(key_str, delim)
depth = max_depth if max_depth is not None else 2
if keycolor is not None:
key_length = len(_strip_ansi(key_str)) # depends on [control=['if'], data=[]]
else:
key_length = len(key_str)
key_line = ' ' * initindent + key_str
new_line = ' ' * initindent + ' ' * key_length
if depth <= 0:
continue # depends on [control=['if'], data=[]]
if is_dict_like(value):
if depth <= 1:
print_func(' ' * initindent + key_str + '{...}') # depends on [control=['if'], data=[]]
else:
print_func(' ' * initindent + key_str)
pprint(value, lvlindent, initindent + lvlindent + extra, delim, max_width, depth=max_depth - 1 if max_depth is not None else None, no_values=no_values, align_vals=align_vals, print_func=print_func, keycolor=keycolor, compress_lists=compress_lists, round_floats=round_floats)
continue # depends on [control=['if'], data=[]]
if isinstance(value, list):
if all([is_dict_like(o) for o in value]) and value:
if depth <= 1:
print_func(key_line + '[...]')
continue # depends on [control=['if'], data=[]]
print_func(key_line)
for obj in value:
pprint(obj, lvlindent, initindent + lvlindent + extra, delim, max_width, depth=max_depth - 1 if max_depth is not None else None, no_values=no_values, align_vals=align_vals, print_func=print_func, keycolor=keycolor, compress_lists=compress_lists, round_floats=round_floats, _dlist=True) # depends on [control=['for'], data=['obj']]
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
val_string_all = decode_to_str(value) if not no_values else ''
for (i, val_string) in enumerate(val_string_all.split('\n')):
if max_width is not None:
if len(key_line) + 1 > max_width:
raise Exception('cannot fit keys and data within set max_width') # depends on [control=['if'], data=[]]
# divide into chuncks and join by same indentation
val_indent = ' ' * (initindent + key_length)
n = max_width - len(val_indent)
val_string = val_indent.join([s + '\n' for s in textwrap.wrap(val_string, n)])[:-1] # depends on [control=['if'], data=['max_width']]
if i == 0:
print_func(key_line + val_string) # depends on [control=['if'], data=[]]
else:
print_func(new_line + val_string) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] |
def DeserializeMessage(self, response_type, data):
"""Deserialize the given data as method_config.response_type."""
try:
message = encoding.JsonToMessage(response_type, data)
except (exceptions.InvalidDataFromServerError,
messages.ValidationError, ValueError) as e:
raise exceptions.InvalidDataFromServerError(
'Error decoding response "%s" as type %s: %s' % (
data, response_type.__name__, e))
return message | def function[DeserializeMessage, parameter[self, response_type, data]]:
constant[Deserialize the given data as method_config.response_type.]
<ast.Try object at 0x7da1b0846bc0>
return[name[message]] | keyword[def] identifier[DeserializeMessage] ( identifier[self] , identifier[response_type] , identifier[data] ):
literal[string]
keyword[try] :
identifier[message] = identifier[encoding] . identifier[JsonToMessage] ( identifier[response_type] , identifier[data] )
keyword[except] ( identifier[exceptions] . identifier[InvalidDataFromServerError] ,
identifier[messages] . identifier[ValidationError] , identifier[ValueError] ) keyword[as] identifier[e] :
keyword[raise] identifier[exceptions] . identifier[InvalidDataFromServerError] (
literal[string] %(
identifier[data] , identifier[response_type] . identifier[__name__] , identifier[e] ))
keyword[return] identifier[message] | def DeserializeMessage(self, response_type, data):
"""Deserialize the given data as method_config.response_type."""
try:
message = encoding.JsonToMessage(response_type, data) # depends on [control=['try'], data=[]]
except (exceptions.InvalidDataFromServerError, messages.ValidationError, ValueError) as e:
raise exceptions.InvalidDataFromServerError('Error decoding response "%s" as type %s: %s' % (data, response_type.__name__, e)) # depends on [control=['except'], data=['e']]
return message |
def run(
self,
*, # Force keyword args.
program: Union[circuits.Circuit, Schedule],
job_config: Optional[JobConfig] = None,
param_resolver: ParamResolver = ParamResolver({}),
repetitions: int = 1,
priority: int = 50,
processor_ids: Sequence[str] = ('xmonsim',)) -> TrialResult:
"""Runs the supplied Circuit or Schedule via Quantum Engine.
Args:
program: The Circuit or Schedule to execute. If a circuit is
provided, a moment by moment schedule will be used.
job_config: Configures the names of programs and jobs.
param_resolver: Parameters to run with the program.
repetitions: The number of repetitions to simulate.
priority: The priority to run at, 0-100.
processor_ids: The engine processors to run against.
Returns:
A single TrialResult for this run.
"""
return list(
self.run_sweep(program=program,
job_config=job_config,
params=[param_resolver],
repetitions=repetitions,
priority=priority,
processor_ids=processor_ids))[0] | def function[run, parameter[self]]:
constant[Runs the supplied Circuit or Schedule via Quantum Engine.
Args:
program: The Circuit or Schedule to execute. If a circuit is
provided, a moment by moment schedule will be used.
job_config: Configures the names of programs and jobs.
param_resolver: Parameters to run with the program.
repetitions: The number of repetitions to simulate.
priority: The priority to run at, 0-100.
processor_ids: The engine processors to run against.
Returns:
A single TrialResult for this run.
]
return[call[call[name[list], parameter[call[name[self].run_sweep, parameter[]]]]][constant[0]]] | keyword[def] identifier[run] (
identifier[self] ,
*,
identifier[program] : identifier[Union] [ identifier[circuits] . identifier[Circuit] , identifier[Schedule] ],
identifier[job_config] : identifier[Optional] [ identifier[JobConfig] ]= keyword[None] ,
identifier[param_resolver] : identifier[ParamResolver] = identifier[ParamResolver] ({}),
identifier[repetitions] : identifier[int] = literal[int] ,
identifier[priority] : identifier[int] = literal[int] ,
identifier[processor_ids] : identifier[Sequence] [ identifier[str] ]=( literal[string] ,))-> identifier[TrialResult] :
literal[string]
keyword[return] identifier[list] (
identifier[self] . identifier[run_sweep] ( identifier[program] = identifier[program] ,
identifier[job_config] = identifier[job_config] ,
identifier[params] =[ identifier[param_resolver] ],
identifier[repetitions] = identifier[repetitions] ,
identifier[priority] = identifier[priority] ,
identifier[processor_ids] = identifier[processor_ids] ))[ literal[int] ] | def run(self, *, program: Union[circuits.Circuit, Schedule], job_config: Optional[JobConfig]=None, param_resolver: ParamResolver=ParamResolver({}), repetitions: int=1, priority: int=50, processor_ids: Sequence[str]=('xmonsim',)) -> TrialResult: # Force keyword args.
'Runs the supplied Circuit or Schedule via Quantum Engine.\n\n Args:\n program: The Circuit or Schedule to execute. If a circuit is\n provided, a moment by moment schedule will be used.\n job_config: Configures the names of programs and jobs.\n param_resolver: Parameters to run with the program.\n repetitions: The number of repetitions to simulate.\n priority: The priority to run at, 0-100.\n processor_ids: The engine processors to run against.\n\n Returns:\n A single TrialResult for this run.\n '
return list(self.run_sweep(program=program, job_config=job_config, params=[param_resolver], repetitions=repetitions, priority=priority, processor_ids=processor_ids))[0] |
def age_to_create_time(age, from_time=None):
"""Compute the create time (UTC) for the list filter.
If the age is an integer value it is treated as a UTC date.
Otherwise the value must be of the form "<integer><unit>" where supported
units are s, m, h, d, w (seconds, minutes, hours, days, weeks).
Args:
age: A "<integer><unit>" string or integer value.
from_time:
Returns:
A timezone-aware datetime or None if age parameter is empty.
"""
if not age:
return None
if not from_time:
from_time = dsub_util.replace_timezone(datetime.datetime.now(), tzlocal())
try:
last_char = age[-1]
if last_char == 's':
return from_time - datetime.timedelta(seconds=int(age[:-1]))
elif last_char == 'm':
return from_time - datetime.timedelta(minutes=int(age[:-1]))
elif last_char == 'h':
return from_time - datetime.timedelta(hours=int(age[:-1]))
elif last_char == 'd':
return from_time - datetime.timedelta(days=int(age[:-1]))
elif last_char == 'w':
return from_time - datetime.timedelta(weeks=int(age[:-1]))
else:
# If no unit is given treat the age as seconds from epoch, otherwise apply
# the correct time unit.
return dsub_util.replace_timezone(
datetime.datetime.utcfromtimestamp(int(age)), pytz.utc)
except (ValueError, OverflowError) as e:
raise ValueError('Unable to parse age string %s: %s' % (age, e)) | def function[age_to_create_time, parameter[age, from_time]]:
constant[Compute the create time (UTC) for the list filter.
If the age is an integer value it is treated as a UTC date.
Otherwise the value must be of the form "<integer><unit>" where supported
units are s, m, h, d, w (seconds, minutes, hours, days, weeks).
Args:
age: A "<integer><unit>" string or integer value.
from_time:
Returns:
A timezone-aware datetime or None if age parameter is empty.
]
if <ast.UnaryOp object at 0x7da1b013ceb0> begin[:]
return[constant[None]]
if <ast.UnaryOp object at 0x7da1b013c430> begin[:]
variable[from_time] assign[=] call[name[dsub_util].replace_timezone, parameter[call[name[datetime].datetime.now, parameter[]], call[name[tzlocal], parameter[]]]]
<ast.Try object at 0x7da1b013d990> | keyword[def] identifier[age_to_create_time] ( identifier[age] , identifier[from_time] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[age] :
keyword[return] keyword[None]
keyword[if] keyword[not] identifier[from_time] :
identifier[from_time] = identifier[dsub_util] . identifier[replace_timezone] ( identifier[datetime] . identifier[datetime] . identifier[now] (), identifier[tzlocal] ())
keyword[try] :
identifier[last_char] = identifier[age] [- literal[int] ]
keyword[if] identifier[last_char] == literal[string] :
keyword[return] identifier[from_time] - identifier[datetime] . identifier[timedelta] ( identifier[seconds] = identifier[int] ( identifier[age] [:- literal[int] ]))
keyword[elif] identifier[last_char] == literal[string] :
keyword[return] identifier[from_time] - identifier[datetime] . identifier[timedelta] ( identifier[minutes] = identifier[int] ( identifier[age] [:- literal[int] ]))
keyword[elif] identifier[last_char] == literal[string] :
keyword[return] identifier[from_time] - identifier[datetime] . identifier[timedelta] ( identifier[hours] = identifier[int] ( identifier[age] [:- literal[int] ]))
keyword[elif] identifier[last_char] == literal[string] :
keyword[return] identifier[from_time] - identifier[datetime] . identifier[timedelta] ( identifier[days] = identifier[int] ( identifier[age] [:- literal[int] ]))
keyword[elif] identifier[last_char] == literal[string] :
keyword[return] identifier[from_time] - identifier[datetime] . identifier[timedelta] ( identifier[weeks] = identifier[int] ( identifier[age] [:- literal[int] ]))
keyword[else] :
keyword[return] identifier[dsub_util] . identifier[replace_timezone] (
identifier[datetime] . identifier[datetime] . identifier[utcfromtimestamp] ( identifier[int] ( identifier[age] )), identifier[pytz] . identifier[utc] )
keyword[except] ( identifier[ValueError] , identifier[OverflowError] ) keyword[as] identifier[e] :
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[age] , identifier[e] )) | def age_to_create_time(age, from_time=None):
"""Compute the create time (UTC) for the list filter.
If the age is an integer value it is treated as a UTC date.
Otherwise the value must be of the form "<integer><unit>" where supported
units are s, m, h, d, w (seconds, minutes, hours, days, weeks).
Args:
age: A "<integer><unit>" string or integer value.
from_time:
Returns:
A timezone-aware datetime or None if age parameter is empty.
"""
if not age:
return None # depends on [control=['if'], data=[]]
if not from_time:
from_time = dsub_util.replace_timezone(datetime.datetime.now(), tzlocal()) # depends on [control=['if'], data=[]]
try:
last_char = age[-1]
if last_char == 's':
return from_time - datetime.timedelta(seconds=int(age[:-1])) # depends on [control=['if'], data=[]]
elif last_char == 'm':
return from_time - datetime.timedelta(minutes=int(age[:-1])) # depends on [control=['if'], data=[]]
elif last_char == 'h':
return from_time - datetime.timedelta(hours=int(age[:-1])) # depends on [control=['if'], data=[]]
elif last_char == 'd':
return from_time - datetime.timedelta(days=int(age[:-1])) # depends on [control=['if'], data=[]]
elif last_char == 'w':
return from_time - datetime.timedelta(weeks=int(age[:-1])) # depends on [control=['if'], data=[]]
else:
# If no unit is given treat the age as seconds from epoch, otherwise apply
# the correct time unit.
return dsub_util.replace_timezone(datetime.datetime.utcfromtimestamp(int(age)), pytz.utc) # depends on [control=['try'], data=[]]
except (ValueError, OverflowError) as e:
raise ValueError('Unable to parse age string %s: %s' % (age, e)) # depends on [control=['except'], data=['e']] |
def _run_node_distribution_command(self, command, workunit):
"""Runs a NodeDistribution.Command for _execute_command and returns its return code.
Passes any additional kwargs to command.run (which passes them, modified, to subprocess.Popen).
Override this in a Task subclass to do something more complicated than just calling
command.run() and returning the result of wait().
:param NodeDistribution.Command command: The command to run.
:param WorkUnit workunit: The WorkUnit the command is running under.
:returns: returncode
:rtype: int
"""
process = command.run(stdout=workunit.output('stdout'),
stderr=workunit.output('stderr'))
return process.wait() | def function[_run_node_distribution_command, parameter[self, command, workunit]]:
constant[Runs a NodeDistribution.Command for _execute_command and returns its return code.
Passes any additional kwargs to command.run (which passes them, modified, to subprocess.Popen).
Override this in a Task subclass to do something more complicated than just calling
command.run() and returning the result of wait().
:param NodeDistribution.Command command: The command to run.
:param WorkUnit workunit: The WorkUnit the command is running under.
:returns: returncode
:rtype: int
]
variable[process] assign[=] call[name[command].run, parameter[]]
return[call[name[process].wait, parameter[]]] | keyword[def] identifier[_run_node_distribution_command] ( identifier[self] , identifier[command] , identifier[workunit] ):
literal[string]
identifier[process] = identifier[command] . identifier[run] ( identifier[stdout] = identifier[workunit] . identifier[output] ( literal[string] ),
identifier[stderr] = identifier[workunit] . identifier[output] ( literal[string] ))
keyword[return] identifier[process] . identifier[wait] () | def _run_node_distribution_command(self, command, workunit):
"""Runs a NodeDistribution.Command for _execute_command and returns its return code.
Passes any additional kwargs to command.run (which passes them, modified, to subprocess.Popen).
Override this in a Task subclass to do something more complicated than just calling
command.run() and returning the result of wait().
:param NodeDistribution.Command command: The command to run.
:param WorkUnit workunit: The WorkUnit the command is running under.
:returns: returncode
:rtype: int
"""
process = command.run(stdout=workunit.output('stdout'), stderr=workunit.output('stderr'))
return process.wait() |
def get_review_history_statuses(instance, reverse=False):
"""Returns a list with the statuses of the instance from the review_history
"""
review_history = getReviewHistory(instance, reverse=reverse)
return map(lambda event: event["review_state"], review_history) | def function[get_review_history_statuses, parameter[instance, reverse]]:
constant[Returns a list with the statuses of the instance from the review_history
]
variable[review_history] assign[=] call[name[getReviewHistory], parameter[name[instance]]]
return[call[name[map], parameter[<ast.Lambda object at 0x7da1b1d48880>, name[review_history]]]] | keyword[def] identifier[get_review_history_statuses] ( identifier[instance] , identifier[reverse] = keyword[False] ):
literal[string]
identifier[review_history] = identifier[getReviewHistory] ( identifier[instance] , identifier[reverse] = identifier[reverse] )
keyword[return] identifier[map] ( keyword[lambda] identifier[event] : identifier[event] [ literal[string] ], identifier[review_history] ) | def get_review_history_statuses(instance, reverse=False):
"""Returns a list with the statuses of the instance from the review_history
"""
review_history = getReviewHistory(instance, reverse=reverse)
return map(lambda event: event['review_state'], review_history) |
def google_trends(query_terms=['big data', 'machine learning', 'data science'], data_set='google_trends', refresh_data=False):
"""
Data downloaded from Google trends for given query terms. Warning,
if you use this function multiple times in a row you get blocked
due to terms of service violations.
The function will cache the result of any query in an attempt to
avoid this. If you wish to refresh an old query set refresh_data
to True. The function is inspired by this notebook:
http://nbviewer.ipython.org/github/sahuguet/notebooks/blob/master/GoogleTrends%20meet%20Notebook.ipynb
"""
query_terms.sort()
import pandas as pd
# Create directory name for data
dir_path = os.path.join(data_path,'google_trends')
if not os.path.isdir(dir_path):
os.makedirs(dir_path)
dir_name = '-'.join(query_terms)
dir_name = dir_name.replace(' ', '_')
dir_path = os.path.join(dir_path,dir_name)
file = 'data.csv'
file_name = os.path.join(dir_path,file)
if not os.path.exists(file_name) or refresh_data:
print("Accessing Google trends to acquire the data. Note that repeated accesses will result in a block due to a google terms of service violation. Failure at this point may be due to such blocks.")
# quote the query terms.
quoted_terms = []
for term in query_terms:
quoted_terms.append(quote(term))
print("Query terms: ", ', '.join(query_terms))
print("Fetching query:")
query = 'http://www.google.com/trends/fetchComponent?q=%s&cid=TIMESERIES_GRAPH_0&export=3' % ",".join(quoted_terms)
data = urlopen(query).read().decode('utf8')
print("Done.")
# In the notebook they did some data cleaning: remove Javascript header+footer, and translate new Date(....,..,..) into YYYY-MM-DD.
header = """// Data table response\ngoogle.visualization.Query.setResponse("""
data = data[len(header):-2]
data = re.sub('new Date\((\d+),(\d+),(\d+)\)', (lambda m: '"%s-%02d-%02d"' % (m.group(1).strip(), 1+int(m.group(2)), int(m.group(3)))), data)
timeseries = json.loads(data)
columns = [k['label'] for k in timeseries['table']['cols']]
rows = list(map(lambda x: [k['v'] for k in x['c']], timeseries['table']['rows']))
df = pd.DataFrame(rows, columns=columns)
if not os.path.isdir(dir_path):
os.makedirs(dir_path)
df.to_csv(file_name)
else:
print("Reading cached data for google trends. To refresh the cache set 'refresh_data=True' when calling this function.")
print("Query terms: ", ', '.join(query_terms))
df = pd.read_csv(file_name, parse_dates=[0])
columns = df.columns
terms = len(query_terms)
import datetime
from matplotlib.dates import date2num
X = np.asarray([(date2num(datetime.datetime.strptime(df.ix[row]['Date'], '%Y-%m-%d')), i) for i in range(terms) for row in df.index])
Y = np.asarray([[df.ix[row][query_terms[i]]] for i in range(terms) for row in df.index ])
output_info = columns[1:]
cats = {}
for i in range(terms):
cats[query_terms[i]] = i
return data_details_return({'data frame' : df, 'X': X, 'Y': Y, 'query_terms': query_terms, 'info': "Data downloaded from google trends with query terms: " + ', '.join(query_terms) + '.', 'covariates' : [datenum('date'), discrete(cats, 'query_terms')], 'response' : ['normalized interest']}, data_set) | def function[google_trends, parameter[query_terms, data_set, refresh_data]]:
constant[
Data downloaded from Google trends for given query terms. Warning,
if you use this function multiple times in a row you get blocked
due to terms of service violations.
The function will cache the result of any query in an attempt to
avoid this. If you wish to refresh an old query set refresh_data
to True. The function is inspired by this notebook:
http://nbviewer.ipython.org/github/sahuguet/notebooks/blob/master/GoogleTrends%20meet%20Notebook.ipynb
]
call[name[query_terms].sort, parameter[]]
import module[pandas] as alias[pd]
variable[dir_path] assign[=] call[name[os].path.join, parameter[name[data_path], constant[google_trends]]]
if <ast.UnaryOp object at 0x7da1b0f90610> begin[:]
call[name[os].makedirs, parameter[name[dir_path]]]
variable[dir_name] assign[=] call[constant[-].join, parameter[name[query_terms]]]
variable[dir_name] assign[=] call[name[dir_name].replace, parameter[constant[ ], constant[_]]]
variable[dir_path] assign[=] call[name[os].path.join, parameter[name[dir_path], name[dir_name]]]
variable[file] assign[=] constant[data.csv]
variable[file_name] assign[=] call[name[os].path.join, parameter[name[dir_path], name[file]]]
if <ast.BoolOp object at 0x7da1b0f90dc0> begin[:]
call[name[print], parameter[constant[Accessing Google trends to acquire the data. Note that repeated accesses will result in a block due to a google terms of service violation. Failure at this point may be due to such blocks.]]]
variable[quoted_terms] assign[=] list[[]]
for taget[name[term]] in starred[name[query_terms]] begin[:]
call[name[quoted_terms].append, parameter[call[name[quote], parameter[name[term]]]]]
call[name[print], parameter[constant[Query terms: ], call[constant[, ].join, parameter[name[query_terms]]]]]
call[name[print], parameter[constant[Fetching query:]]]
variable[query] assign[=] binary_operation[constant[http://www.google.com/trends/fetchComponent?q=%s&cid=TIMESERIES_GRAPH_0&export=3] <ast.Mod object at 0x7da2590d6920> call[constant[,].join, parameter[name[quoted_terms]]]]
variable[data] assign[=] call[call[call[name[urlopen], parameter[name[query]]].read, parameter[]].decode, parameter[constant[utf8]]]
call[name[print], parameter[constant[Done.]]]
variable[header] assign[=] constant[// Data table response
google.visualization.Query.setResponse(]
variable[data] assign[=] call[name[data]][<ast.Slice object at 0x7da1b0f939d0>]
variable[data] assign[=] call[name[re].sub, parameter[constant[new Date\((\d+),(\d+),(\d+)\)], <ast.Lambda object at 0x7da1b0f93730>, name[data]]]
variable[timeseries] assign[=] call[name[json].loads, parameter[name[data]]]
variable[columns] assign[=] <ast.ListComp object at 0x7da1b0f92350>
variable[rows] assign[=] call[name[list], parameter[call[name[map], parameter[<ast.Lambda object at 0x7da1b0f90970>, call[call[name[timeseries]][constant[table]]][constant[rows]]]]]]
variable[df] assign[=] call[name[pd].DataFrame, parameter[name[rows]]]
if <ast.UnaryOp object at 0x7da1b0f90a90> begin[:]
call[name[os].makedirs, parameter[name[dir_path]]]
call[name[df].to_csv, parameter[name[file_name]]]
variable[columns] assign[=] name[df].columns
variable[terms] assign[=] call[name[len], parameter[name[query_terms]]]
import module[datetime]
from relative_module[matplotlib.dates] import module[date2num]
variable[X] assign[=] call[name[np].asarray, parameter[<ast.ListComp object at 0x7da1b0f93310>]]
variable[Y] assign[=] call[name[np].asarray, parameter[<ast.ListComp object at 0x7da1b0fcd900>]]
variable[output_info] assign[=] call[name[columns]][<ast.Slice object at 0x7da1b0fce290>]
variable[cats] assign[=] dictionary[[], []]
for taget[name[i]] in starred[call[name[range], parameter[name[terms]]]] begin[:]
call[name[cats]][call[name[query_terms]][name[i]]] assign[=] name[i]
return[call[name[data_details_return], parameter[dictionary[[<ast.Constant object at 0x7da1b0fcc250>, <ast.Constant object at 0x7da1b0fce050>, <ast.Constant object at 0x7da1b0fcdf90>, <ast.Constant object at 0x7da1b0fcc220>, <ast.Constant object at 0x7da1b0fcdd80>, <ast.Constant object at 0x7da1b0fcda50>, <ast.Constant object at 0x7da1b0fce410>], [<ast.Name object at 0x7da1b0fcdf00>, <ast.Name object at 0x7da1b0fce170>, <ast.Name object at 0x7da1b0fcdde0>, <ast.Name object at 0x7da1b0fce7a0>, <ast.BinOp object at 0x7da1b0fce020>, <ast.List object at 0x7da1b0fcdab0>, <ast.List object at 0x7da1b0fb3520>]], name[data_set]]]] | keyword[def] identifier[google_trends] ( identifier[query_terms] =[ literal[string] , literal[string] , literal[string] ], identifier[data_set] = literal[string] , identifier[refresh_data] = keyword[False] ):
literal[string]
identifier[query_terms] . identifier[sort] ()
keyword[import] identifier[pandas] keyword[as] identifier[pd]
identifier[dir_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[data_path] , literal[string] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[dir_path] ):
identifier[os] . identifier[makedirs] ( identifier[dir_path] )
identifier[dir_name] = literal[string] . identifier[join] ( identifier[query_terms] )
identifier[dir_name] = identifier[dir_name] . identifier[replace] ( literal[string] , literal[string] )
identifier[dir_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir_path] , identifier[dir_name] )
identifier[file] = literal[string]
identifier[file_name] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir_path] , identifier[file] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[file_name] ) keyword[or] identifier[refresh_data] :
identifier[print] ( literal[string] )
identifier[quoted_terms] =[]
keyword[for] identifier[term] keyword[in] identifier[query_terms] :
identifier[quoted_terms] . identifier[append] ( identifier[quote] ( identifier[term] ))
identifier[print] ( literal[string] , literal[string] . identifier[join] ( identifier[query_terms] ))
identifier[print] ( literal[string] )
identifier[query] = literal[string] % literal[string] . identifier[join] ( identifier[quoted_terms] )
identifier[data] = identifier[urlopen] ( identifier[query] ). identifier[read] (). identifier[decode] ( literal[string] )
identifier[print] ( literal[string] )
identifier[header] = literal[string]
identifier[data] = identifier[data] [ identifier[len] ( identifier[header] ):- literal[int] ]
identifier[data] = identifier[re] . identifier[sub] ( literal[string] ,( keyword[lambda] identifier[m] : literal[string] %( identifier[m] . identifier[group] ( literal[int] ). identifier[strip] (), literal[int] + identifier[int] ( identifier[m] . identifier[group] ( literal[int] )), identifier[int] ( identifier[m] . identifier[group] ( literal[int] )))), identifier[data] )
identifier[timeseries] = identifier[json] . identifier[loads] ( identifier[data] )
identifier[columns] =[ identifier[k] [ literal[string] ] keyword[for] identifier[k] keyword[in] identifier[timeseries] [ literal[string] ][ literal[string] ]]
identifier[rows] = identifier[list] ( identifier[map] ( keyword[lambda] identifier[x] :[ identifier[k] [ literal[string] ] keyword[for] identifier[k] keyword[in] identifier[x] [ literal[string] ]], identifier[timeseries] [ literal[string] ][ literal[string] ]))
identifier[df] = identifier[pd] . identifier[DataFrame] ( identifier[rows] , identifier[columns] = identifier[columns] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[dir_path] ):
identifier[os] . identifier[makedirs] ( identifier[dir_path] )
identifier[df] . identifier[to_csv] ( identifier[file_name] )
keyword[else] :
identifier[print] ( literal[string] )
identifier[print] ( literal[string] , literal[string] . identifier[join] ( identifier[query_terms] ))
identifier[df] = identifier[pd] . identifier[read_csv] ( identifier[file_name] , identifier[parse_dates] =[ literal[int] ])
identifier[columns] = identifier[df] . identifier[columns]
identifier[terms] = identifier[len] ( identifier[query_terms] )
keyword[import] identifier[datetime]
keyword[from] identifier[matplotlib] . identifier[dates] keyword[import] identifier[date2num]
identifier[X] = identifier[np] . identifier[asarray] ([( identifier[date2num] ( identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[df] . identifier[ix] [ identifier[row] ][ literal[string] ], literal[string] )), identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[terms] ) keyword[for] identifier[row] keyword[in] identifier[df] . identifier[index] ])
identifier[Y] = identifier[np] . identifier[asarray] ([[ identifier[df] . identifier[ix] [ identifier[row] ][ identifier[query_terms] [ identifier[i] ]]] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[terms] ) keyword[for] identifier[row] keyword[in] identifier[df] . identifier[index] ])
identifier[output_info] = identifier[columns] [ literal[int] :]
identifier[cats] ={}
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[terms] ):
identifier[cats] [ identifier[query_terms] [ identifier[i] ]]= identifier[i]
keyword[return] identifier[data_details_return] ({ literal[string] : identifier[df] , literal[string] : identifier[X] , literal[string] : identifier[Y] , literal[string] : identifier[query_terms] , literal[string] : literal[string] + literal[string] . identifier[join] ( identifier[query_terms] )+ literal[string] , literal[string] :[ identifier[datenum] ( literal[string] ), identifier[discrete] ( identifier[cats] , literal[string] )], literal[string] :[ literal[string] ]}, identifier[data_set] ) | def google_trends(query_terms=['big data', 'machine learning', 'data science'], data_set='google_trends', refresh_data=False):
"""
Data downloaded from Google trends for given query terms. Warning,
if you use this function multiple times in a row you get blocked
due to terms of service violations.
The function will cache the result of any query in an attempt to
avoid this. If you wish to refresh an old query set refresh_data
to True. The function is inspired by this notebook:
http://nbviewer.ipython.org/github/sahuguet/notebooks/blob/master/GoogleTrends%20meet%20Notebook.ipynb
"""
query_terms.sort()
import pandas as pd
# Create directory name for data
dir_path = os.path.join(data_path, 'google_trends')
if not os.path.isdir(dir_path):
os.makedirs(dir_path) # depends on [control=['if'], data=[]]
dir_name = '-'.join(query_terms)
dir_name = dir_name.replace(' ', '_')
dir_path = os.path.join(dir_path, dir_name)
file = 'data.csv'
file_name = os.path.join(dir_path, file)
if not os.path.exists(file_name) or refresh_data:
print('Accessing Google trends to acquire the data. Note that repeated accesses will result in a block due to a google terms of service violation. Failure at this point may be due to such blocks.')
# quote the query terms.
quoted_terms = []
for term in query_terms:
quoted_terms.append(quote(term)) # depends on [control=['for'], data=['term']]
print('Query terms: ', ', '.join(query_terms))
print('Fetching query:')
query = 'http://www.google.com/trends/fetchComponent?q=%s&cid=TIMESERIES_GRAPH_0&export=3' % ','.join(quoted_terms)
data = urlopen(query).read().decode('utf8')
print('Done.')
# In the notebook they did some data cleaning: remove Javascript header+footer, and translate new Date(....,..,..) into YYYY-MM-DD.
header = '// Data table response\ngoogle.visualization.Query.setResponse('
data = data[len(header):-2]
data = re.sub('new Date\\((\\d+),(\\d+),(\\d+)\\)', lambda m: '"%s-%02d-%02d"' % (m.group(1).strip(), 1 + int(m.group(2)), int(m.group(3))), data)
timeseries = json.loads(data)
columns = [k['label'] for k in timeseries['table']['cols']]
rows = list(map(lambda x: [k['v'] for k in x['c']], timeseries['table']['rows']))
df = pd.DataFrame(rows, columns=columns)
if not os.path.isdir(dir_path):
os.makedirs(dir_path) # depends on [control=['if'], data=[]]
df.to_csv(file_name) # depends on [control=['if'], data=[]]
else:
print("Reading cached data for google trends. To refresh the cache set 'refresh_data=True' when calling this function.")
print('Query terms: ', ', '.join(query_terms))
df = pd.read_csv(file_name, parse_dates=[0])
columns = df.columns
terms = len(query_terms)
import datetime
from matplotlib.dates import date2num
X = np.asarray([(date2num(datetime.datetime.strptime(df.ix[row]['Date'], '%Y-%m-%d')), i) for i in range(terms) for row in df.index])
Y = np.asarray([[df.ix[row][query_terms[i]]] for i in range(terms) for row in df.index])
output_info = columns[1:]
cats = {}
for i in range(terms):
cats[query_terms[i]] = i # depends on [control=['for'], data=['i']]
return data_details_return({'data frame': df, 'X': X, 'Y': Y, 'query_terms': query_terms, 'info': 'Data downloaded from google trends with query terms: ' + ', '.join(query_terms) + '.', 'covariates': [datenum('date'), discrete(cats, 'query_terms')], 'response': ['normalized interest']}, data_set) |
def can_access(self, user):
"""Return whether or not `user` can access a project.
The project's is_ready field must be set for a user to access.
"""
return self.class_.is_admin(user) or \
self.is_ready and self.class_ in user.classes | def function[can_access, parameter[self, user]]:
constant[Return whether or not `user` can access a project.
The project's is_ready field must be set for a user to access.
]
return[<ast.BoolOp object at 0x7da1b0a4c610>] | keyword[def] identifier[can_access] ( identifier[self] , identifier[user] ):
literal[string]
keyword[return] identifier[self] . identifier[class_] . identifier[is_admin] ( identifier[user] ) keyword[or] identifier[self] . identifier[is_ready] keyword[and] identifier[self] . identifier[class_] keyword[in] identifier[user] . identifier[classes] | def can_access(self, user):
"""Return whether or not `user` can access a project.
The project's is_ready field must be set for a user to access.
"""
return self.class_.is_admin(user) or (self.is_ready and self.class_ in user.classes) |
def from_dict(cls, data):
"""Transforms a Python dictionary to an Output object.
Note:
To pass a serialization cycle multiple times, a
Cryptoconditions Fulfillment needs to be present in the
passed-in dictionary, as Condition URIs are not serializable
anymore.
Args:
data (dict): The dict to be transformed.
Returns:
:class:`~bigchaindb.common.transaction.Output`
"""
try:
fulfillment = _fulfillment_from_details(data['condition']['details'])
except KeyError:
# NOTE: Hashlock condition case
fulfillment = data['condition']['uri']
try:
amount = int(data['amount'])
except ValueError:
raise AmountError('Invalid amount: %s' % data['amount'])
return cls(fulfillment, data['public_keys'], amount) | def function[from_dict, parameter[cls, data]]:
constant[Transforms a Python dictionary to an Output object.
Note:
To pass a serialization cycle multiple times, a
Cryptoconditions Fulfillment needs to be present in the
passed-in dictionary, as Condition URIs are not serializable
anymore.
Args:
data (dict): The dict to be transformed.
Returns:
:class:`~bigchaindb.common.transaction.Output`
]
<ast.Try object at 0x7da1b1bfa6e0>
<ast.Try object at 0x7da1b1bee650>
return[call[name[cls], parameter[name[fulfillment], call[name[data]][constant[public_keys]], name[amount]]]] | keyword[def] identifier[from_dict] ( identifier[cls] , identifier[data] ):
literal[string]
keyword[try] :
identifier[fulfillment] = identifier[_fulfillment_from_details] ( identifier[data] [ literal[string] ][ literal[string] ])
keyword[except] identifier[KeyError] :
identifier[fulfillment] = identifier[data] [ literal[string] ][ literal[string] ]
keyword[try] :
identifier[amount] = identifier[int] ( identifier[data] [ literal[string] ])
keyword[except] identifier[ValueError] :
keyword[raise] identifier[AmountError] ( literal[string] % identifier[data] [ literal[string] ])
keyword[return] identifier[cls] ( identifier[fulfillment] , identifier[data] [ literal[string] ], identifier[amount] ) | def from_dict(cls, data):
"""Transforms a Python dictionary to an Output object.
Note:
To pass a serialization cycle multiple times, a
Cryptoconditions Fulfillment needs to be present in the
passed-in dictionary, as Condition URIs are not serializable
anymore.
Args:
data (dict): The dict to be transformed.
Returns:
:class:`~bigchaindb.common.transaction.Output`
"""
try:
fulfillment = _fulfillment_from_details(data['condition']['details']) # depends on [control=['try'], data=[]]
except KeyError:
# NOTE: Hashlock condition case
fulfillment = data['condition']['uri'] # depends on [control=['except'], data=[]]
try:
amount = int(data['amount']) # depends on [control=['try'], data=[]]
except ValueError:
raise AmountError('Invalid amount: %s' % data['amount']) # depends on [control=['except'], data=[]]
return cls(fulfillment, data['public_keys'], amount) |
def delete_password(self, service, username):
"""Delete the stored password (only the first one)
"""
collection = self.get_preferred_collection()
items = collection.search_items(
{"username": username, "service": service})
for item in items:
return item.delete()
raise PasswordDeleteError("No such password!") | def function[delete_password, parameter[self, service, username]]:
constant[Delete the stored password (only the first one)
]
variable[collection] assign[=] call[name[self].get_preferred_collection, parameter[]]
variable[items] assign[=] call[name[collection].search_items, parameter[dictionary[[<ast.Constant object at 0x7da1b016d5d0>, <ast.Constant object at 0x7da1b016d4e0>], [<ast.Name object at 0x7da1b016ee60>, <ast.Name object at 0x7da1b016cd30>]]]]
for taget[name[item]] in starred[name[items]] begin[:]
return[call[name[item].delete, parameter[]]]
<ast.Raise object at 0x7da1b016ddb0> | keyword[def] identifier[delete_password] ( identifier[self] , identifier[service] , identifier[username] ):
literal[string]
identifier[collection] = identifier[self] . identifier[get_preferred_collection] ()
identifier[items] = identifier[collection] . identifier[search_items] (
{ literal[string] : identifier[username] , literal[string] : identifier[service] })
keyword[for] identifier[item] keyword[in] identifier[items] :
keyword[return] identifier[item] . identifier[delete] ()
keyword[raise] identifier[PasswordDeleteError] ( literal[string] ) | def delete_password(self, service, username):
"""Delete the stored password (only the first one)
"""
collection = self.get_preferred_collection()
items = collection.search_items({'username': username, 'service': service})
for item in items:
return item.delete() # depends on [control=['for'], data=['item']]
raise PasswordDeleteError('No such password!') |
def _is_name_used_as_variadic(name, variadics):
"""Check if the given name is used as a variadic argument."""
return any(
variadic.value == name or variadic.value.parent_of(name)
for variadic in variadics
) | def function[_is_name_used_as_variadic, parameter[name, variadics]]:
constant[Check if the given name is used as a variadic argument.]
return[call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b020cfa0>]]] | keyword[def] identifier[_is_name_used_as_variadic] ( identifier[name] , identifier[variadics] ):
literal[string]
keyword[return] identifier[any] (
identifier[variadic] . identifier[value] == identifier[name] keyword[or] identifier[variadic] . identifier[value] . identifier[parent_of] ( identifier[name] )
keyword[for] identifier[variadic] keyword[in] identifier[variadics]
) | def _is_name_used_as_variadic(name, variadics):
"""Check if the given name is used as a variadic argument."""
return any((variadic.value == name or variadic.value.parent_of(name) for variadic in variadics)) |
def start(self):
"""
Starts the session.
Starting the session will actually get the API key of the current user
"""
if NURESTSession.session_stack:
bambou_logger.critical("Starting a session inside a with statement is not supported.")
raise Exception("Starting a session inside a with statement is not supported.")
NURESTSession.current_session = self
self._authenticate()
return self | def function[start, parameter[self]]:
constant[
Starts the session.
Starting the session will actually get the API key of the current user
]
if name[NURESTSession].session_stack begin[:]
call[name[bambou_logger].critical, parameter[constant[Starting a session inside a with statement is not supported.]]]
<ast.Raise object at 0x7da1b0f47580>
name[NURESTSession].current_session assign[=] name[self]
call[name[self]._authenticate, parameter[]]
return[name[self]] | keyword[def] identifier[start] ( identifier[self] ):
literal[string]
keyword[if] identifier[NURESTSession] . identifier[session_stack] :
identifier[bambou_logger] . identifier[critical] ( literal[string] )
keyword[raise] identifier[Exception] ( literal[string] )
identifier[NURESTSession] . identifier[current_session] = identifier[self]
identifier[self] . identifier[_authenticate] ()
keyword[return] identifier[self] | def start(self):
"""
Starts the session.
Starting the session will actually get the API key of the current user
"""
if NURESTSession.session_stack:
bambou_logger.critical('Starting a session inside a with statement is not supported.')
raise Exception('Starting a session inside a with statement is not supported.') # depends on [control=['if'], data=[]]
NURESTSession.current_session = self
self._authenticate()
return self |
def move(self):
"""Move."""
previous = self.current
current = self._next_from_generator()
self.current = None if current is None else Token(*current)
self.line = self.current.start[0] if self.current else self.line
self.got_logical_newline = previous.kind in self.LOGICAL_NEWLINES
return previous | def function[move, parameter[self]]:
constant[Move.]
variable[previous] assign[=] name[self].current
variable[current] assign[=] call[name[self]._next_from_generator, parameter[]]
name[self].current assign[=] <ast.IfExp object at 0x7da18dc042e0>
name[self].line assign[=] <ast.IfExp object at 0x7da18dc06110>
name[self].got_logical_newline assign[=] compare[name[previous].kind in name[self].LOGICAL_NEWLINES]
return[name[previous]] | keyword[def] identifier[move] ( identifier[self] ):
literal[string]
identifier[previous] = identifier[self] . identifier[current]
identifier[current] = identifier[self] . identifier[_next_from_generator] ()
identifier[self] . identifier[current] = keyword[None] keyword[if] identifier[current] keyword[is] keyword[None] keyword[else] identifier[Token] (* identifier[current] )
identifier[self] . identifier[line] = identifier[self] . identifier[current] . identifier[start] [ literal[int] ] keyword[if] identifier[self] . identifier[current] keyword[else] identifier[self] . identifier[line]
identifier[self] . identifier[got_logical_newline] = identifier[previous] . identifier[kind] keyword[in] identifier[self] . identifier[LOGICAL_NEWLINES]
keyword[return] identifier[previous] | def move(self):
"""Move."""
previous = self.current
current = self._next_from_generator()
self.current = None if current is None else Token(*current)
self.line = self.current.start[0] if self.current else self.line
self.got_logical_newline = previous.kind in self.LOGICAL_NEWLINES
return previous |
def expr(args):
"""
%prog expr block exp layout napus.bed
Plot a composite figure showing synteny and the expression level between
homeologs in two tissues - total 4 lists of values. block file contains the
gene pairs between AN and CN.
"""
from jcvi.graphics.base import red_purple as default_cm
p = OptionParser(expr.__doc__)
opts, args, iopts = p.set_image_options(args, figsize="8x5")
if len(args) != 4:
sys.exit(not p.print_help())
block, exp, layout, napusbed = args
fig = plt.figure(1, (iopts.w, iopts.h))
root = fig.add_axes([0, 0, 1, 1])
s = Synteny(fig, root, block, napusbed, layout)
# Import the expression values
# Columns are: leaf-A, leaf-C, root-A, root-C
fp = open(exp)
data = {}
for row in fp:
gid, lf, rt = row.split()
lf, rt = float(lf), float(rt)
data[gid] = (lf, rt)
rA, rB = s.rr
gA = [x.accn for x in rA.genes]
gC = [x.accn for x in rB.genes]
A = [data.get(x, (0, 0)) for x in gA]
C = [data.get(x, (0, 0)) for x in gC]
A = np.array(A)
C = np.array(C)
A = np.transpose(A)
C = np.transpose(C)
d, h = .01, .1
lsg = "lightslategrey"
coords = s.gg # Coordinates of the genes
axes = []
for j, (y, gg) in enumerate(((.79, gA), (.24, gC))):
r = s.rr[j]
x = r.xstart
w = r.xend - r.xstart
ax = fig.add_axes([x, y, w, h])
axes.append(ax)
root.add_patch(Rectangle((x - h, y - d), w + h + d, h + 2 * d, fill=False,
ec=lsg, lw=1))
root.text(x - d, y + 3 * h / 4, "root", ha="right", va="center")
root.text(x - d, y + h / 4, "leaf", ha="right", va="center")
ty = y - 2 * d if y > .5 else y + h + 2 * d
nrows = len(gg)
for i, g in enumerate(gg):
start, end = coords[(j, g)]
sx, sy = start
ex, ey = end
assert sy == ey
sy = sy + 2 * d if sy > .5 else sy - 2 * d
root.plot(((sx + ex) / 2, x + w * (i + .5) / nrows), (sy, ty),
lw=1, ls=":", color="k", alpha=.2)
axA, axC = axes
p = axA.pcolormesh(A, cmap=default_cm)
p = axC.pcolormesh(C, cmap=default_cm)
axA.set_xlim(0, len(gA))
axC.set_xlim(0, len(gC))
x, y, w, h = .35, .1, .3, .05
ax_colorbar = fig.add_axes([x, y, w, h])
fig.colorbar(p, cax=ax_colorbar, orientation='horizontal')
root.text(x - d, y + h / 2, "RPKM", ha="right", va="center")
root.set_xlim(0, 1)
root.set_ylim(0, 1)
for x in (axA, axC, root):
x.set_axis_off()
image_name = "napusf4b." + iopts.format
savefig(image_name, dpi=iopts.dpi, iopts=iopts) | def function[expr, parameter[args]]:
constant[
%prog expr block exp layout napus.bed
Plot a composite figure showing synteny and the expression level between
homeologs in two tissues - total 4 lists of values. block file contains the
gene pairs between AN and CN.
]
from relative_module[jcvi.graphics.base] import module[red_purple]
variable[p] assign[=] call[name[OptionParser], parameter[name[expr].__doc__]]
<ast.Tuple object at 0x7da1b0842aa0> assign[=] call[name[p].set_image_options, parameter[name[args]]]
if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[4]] begin[:]
call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da1b0842cb0>]]
<ast.Tuple object at 0x7da1b0842e30> assign[=] name[args]
variable[fig] assign[=] call[name[plt].figure, parameter[constant[1], tuple[[<ast.Attribute object at 0x7da1b0843fa0>, <ast.Attribute object at 0x7da1b0843a00>]]]]
variable[root] assign[=] call[name[fig].add_axes, parameter[list[[<ast.Constant object at 0x7da1b0843a60>, <ast.Constant object at 0x7da1b0843a30>, <ast.Constant object at 0x7da1b0843c70>, <ast.Constant object at 0x7da1b0843c40>]]]]
variable[s] assign[=] call[name[Synteny], parameter[name[fig], name[root], name[block], name[napusbed], name[layout]]]
variable[fp] assign[=] call[name[open], parameter[name[exp]]]
variable[data] assign[=] dictionary[[], []]
for taget[name[row]] in starred[name[fp]] begin[:]
<ast.Tuple object at 0x7da1b08433a0> assign[=] call[name[row].split, parameter[]]
<ast.Tuple object at 0x7da1b0843400> assign[=] tuple[[<ast.Call object at 0x7da1b0843580>, <ast.Call object at 0x7da1b0843670>]]
call[name[data]][name[gid]] assign[=] tuple[[<ast.Name object at 0x7da1b0843130>, <ast.Name object at 0x7da1b0843100>]]
<ast.Tuple object at 0x7da1b08432b0> assign[=] name[s].rr
variable[gA] assign[=] <ast.ListComp object at 0x7da1b0843070>
variable[gC] assign[=] <ast.ListComp object at 0x7da1b0842fe0>
variable[A] assign[=] <ast.ListComp object at 0x7da1b0842650>
variable[C] assign[=] <ast.ListComp object at 0x7da20c76df00>
variable[A] assign[=] call[name[np].array, parameter[name[A]]]
variable[C] assign[=] call[name[np].array, parameter[name[C]]]
variable[A] assign[=] call[name[np].transpose, parameter[name[A]]]
variable[C] assign[=] call[name[np].transpose, parameter[name[C]]]
<ast.Tuple object at 0x7da20c76eaa0> assign[=] tuple[[<ast.Constant object at 0x7da20c76d720>, <ast.Constant object at 0x7da20c76c2b0>]]
variable[lsg] assign[=] constant[lightslategrey]
variable[coords] assign[=] name[s].gg
variable[axes] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da20c76d420>, <ast.Tuple object at 0x7da20c76c910>]]] in starred[call[name[enumerate], parameter[tuple[[<ast.Tuple object at 0x7da20c76fe20>, <ast.Tuple object at 0x7da20c76e620>]]]]] begin[:]
variable[r] assign[=] call[name[s].rr][name[j]]
variable[x] assign[=] name[r].xstart
variable[w] assign[=] binary_operation[name[r].xend - name[r].xstart]
variable[ax] assign[=] call[name[fig].add_axes, parameter[list[[<ast.Name object at 0x7da20c76c8b0>, <ast.Name object at 0x7da20c76d7b0>, <ast.Name object at 0x7da20c76e110>, <ast.Name object at 0x7da20c76e3e0>]]]]
call[name[axes].append, parameter[name[ax]]]
call[name[root].add_patch, parameter[call[name[Rectangle], parameter[tuple[[<ast.BinOp object at 0x7da20c76e440>, <ast.BinOp object at 0x7da20c76d2a0>]], binary_operation[binary_operation[name[w] + name[h]] + name[d]], binary_operation[name[h] + binary_operation[constant[2] * name[d]]]]]]]
call[name[root].text, parameter[binary_operation[name[x] - name[d]], binary_operation[name[y] + binary_operation[binary_operation[constant[3] * name[h]] / constant[4]]], constant[root]]]
call[name[root].text, parameter[binary_operation[name[x] - name[d]], binary_operation[name[y] + binary_operation[name[h] / constant[4]]], constant[leaf]]]
variable[ty] assign[=] <ast.IfExp object at 0x7da18ede6710>
variable[nrows] assign[=] call[name[len], parameter[name[gg]]]
for taget[tuple[[<ast.Name object at 0x7da18ede7b20>, <ast.Name object at 0x7da18ede6f80>]]] in starred[call[name[enumerate], parameter[name[gg]]]] begin[:]
<ast.Tuple object at 0x7da18ede4340> assign[=] call[name[coords]][tuple[[<ast.Name object at 0x7da18ede6350>, <ast.Name object at 0x7da18ede4280>]]]
<ast.Tuple object at 0x7da18ede5060> assign[=] name[start]
<ast.Tuple object at 0x7da18ede61a0> assign[=] name[end]
assert[compare[name[sy] equal[==] name[ey]]]
variable[sy] assign[=] <ast.IfExp object at 0x7da18ede6140>
call[name[root].plot, parameter[tuple[[<ast.BinOp object at 0x7da18ede6e00>, <ast.BinOp object at 0x7da18ede6dd0>]], tuple[[<ast.Name object at 0x7da18f00c5e0>, <ast.Name object at 0x7da18f00f490>]]]]
<ast.Tuple object at 0x7da18f00f6d0> assign[=] name[axes]
variable[p] assign[=] call[name[axA].pcolormesh, parameter[name[A]]]
variable[p] assign[=] call[name[axC].pcolormesh, parameter[name[C]]]
call[name[axA].set_xlim, parameter[constant[0], call[name[len], parameter[name[gA]]]]]
call[name[axC].set_xlim, parameter[constant[0], call[name[len], parameter[name[gC]]]]]
<ast.Tuple object at 0x7da18f00df60> assign[=] tuple[[<ast.Constant object at 0x7da18f00c9a0>, <ast.Constant object at 0x7da18f00d390>, <ast.Constant object at 0x7da18f00cd60>, <ast.Constant object at 0x7da18f00d9c0>]]
variable[ax_colorbar] assign[=] call[name[fig].add_axes, parameter[list[[<ast.Name object at 0x7da18f00fc70>, <ast.Name object at 0x7da18f00eb60>, <ast.Name object at 0x7da18f00ccd0>, <ast.Name object at 0x7da18f00f370>]]]]
call[name[fig].colorbar, parameter[name[p]]]
call[name[root].text, parameter[binary_operation[name[x] - name[d]], binary_operation[name[y] + binary_operation[name[h] / constant[2]]], constant[RPKM]]]
call[name[root].set_xlim, parameter[constant[0], constant[1]]]
call[name[root].set_ylim, parameter[constant[0], constant[1]]]
for taget[name[x]] in starred[tuple[[<ast.Name object at 0x7da18f00cf40>, <ast.Name object at 0x7da18f00c9d0>, <ast.Name object at 0x7da18f00c0d0>]]] begin[:]
call[name[x].set_axis_off, parameter[]]
variable[image_name] assign[=] binary_operation[constant[napusf4b.] + name[iopts].format]
call[name[savefig], parameter[name[image_name]]] | keyword[def] identifier[expr] ( identifier[args] ):
literal[string]
keyword[from] identifier[jcvi] . identifier[graphics] . identifier[base] keyword[import] identifier[red_purple] keyword[as] identifier[default_cm]
identifier[p] = identifier[OptionParser] ( identifier[expr] . identifier[__doc__] )
identifier[opts] , identifier[args] , identifier[iopts] = identifier[p] . identifier[set_image_options] ( identifier[args] , identifier[figsize] = literal[string] )
keyword[if] identifier[len] ( identifier[args] )!= literal[int] :
identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ())
identifier[block] , identifier[exp] , identifier[layout] , identifier[napusbed] = identifier[args]
identifier[fig] = identifier[plt] . identifier[figure] ( literal[int] ,( identifier[iopts] . identifier[w] , identifier[iopts] . identifier[h] ))
identifier[root] = identifier[fig] . identifier[add_axes] ([ literal[int] , literal[int] , literal[int] , literal[int] ])
identifier[s] = identifier[Synteny] ( identifier[fig] , identifier[root] , identifier[block] , identifier[napusbed] , identifier[layout] )
identifier[fp] = identifier[open] ( identifier[exp] )
identifier[data] ={}
keyword[for] identifier[row] keyword[in] identifier[fp] :
identifier[gid] , identifier[lf] , identifier[rt] = identifier[row] . identifier[split] ()
identifier[lf] , identifier[rt] = identifier[float] ( identifier[lf] ), identifier[float] ( identifier[rt] )
identifier[data] [ identifier[gid] ]=( identifier[lf] , identifier[rt] )
identifier[rA] , identifier[rB] = identifier[s] . identifier[rr]
identifier[gA] =[ identifier[x] . identifier[accn] keyword[for] identifier[x] keyword[in] identifier[rA] . identifier[genes] ]
identifier[gC] =[ identifier[x] . identifier[accn] keyword[for] identifier[x] keyword[in] identifier[rB] . identifier[genes] ]
identifier[A] =[ identifier[data] . identifier[get] ( identifier[x] ,( literal[int] , literal[int] )) keyword[for] identifier[x] keyword[in] identifier[gA] ]
identifier[C] =[ identifier[data] . identifier[get] ( identifier[x] ,( literal[int] , literal[int] )) keyword[for] identifier[x] keyword[in] identifier[gC] ]
identifier[A] = identifier[np] . identifier[array] ( identifier[A] )
identifier[C] = identifier[np] . identifier[array] ( identifier[C] )
identifier[A] = identifier[np] . identifier[transpose] ( identifier[A] )
identifier[C] = identifier[np] . identifier[transpose] ( identifier[C] )
identifier[d] , identifier[h] = literal[int] , literal[int]
identifier[lsg] = literal[string]
identifier[coords] = identifier[s] . identifier[gg]
identifier[axes] =[]
keyword[for] identifier[j] ,( identifier[y] , identifier[gg] ) keyword[in] identifier[enumerate] ((( literal[int] , identifier[gA] ),( literal[int] , identifier[gC] ))):
identifier[r] = identifier[s] . identifier[rr] [ identifier[j] ]
identifier[x] = identifier[r] . identifier[xstart]
identifier[w] = identifier[r] . identifier[xend] - identifier[r] . identifier[xstart]
identifier[ax] = identifier[fig] . identifier[add_axes] ([ identifier[x] , identifier[y] , identifier[w] , identifier[h] ])
identifier[axes] . identifier[append] ( identifier[ax] )
identifier[root] . identifier[add_patch] ( identifier[Rectangle] (( identifier[x] - identifier[h] , identifier[y] - identifier[d] ), identifier[w] + identifier[h] + identifier[d] , identifier[h] + literal[int] * identifier[d] , identifier[fill] = keyword[False] ,
identifier[ec] = identifier[lsg] , identifier[lw] = literal[int] ))
identifier[root] . identifier[text] ( identifier[x] - identifier[d] , identifier[y] + literal[int] * identifier[h] / literal[int] , literal[string] , identifier[ha] = literal[string] , identifier[va] = literal[string] )
identifier[root] . identifier[text] ( identifier[x] - identifier[d] , identifier[y] + identifier[h] / literal[int] , literal[string] , identifier[ha] = literal[string] , identifier[va] = literal[string] )
identifier[ty] = identifier[y] - literal[int] * identifier[d] keyword[if] identifier[y] > literal[int] keyword[else] identifier[y] + identifier[h] + literal[int] * identifier[d]
identifier[nrows] = identifier[len] ( identifier[gg] )
keyword[for] identifier[i] , identifier[g] keyword[in] identifier[enumerate] ( identifier[gg] ):
identifier[start] , identifier[end] = identifier[coords] [( identifier[j] , identifier[g] )]
identifier[sx] , identifier[sy] = identifier[start]
identifier[ex] , identifier[ey] = identifier[end]
keyword[assert] identifier[sy] == identifier[ey]
identifier[sy] = identifier[sy] + literal[int] * identifier[d] keyword[if] identifier[sy] > literal[int] keyword[else] identifier[sy] - literal[int] * identifier[d]
identifier[root] . identifier[plot] ((( identifier[sx] + identifier[ex] )/ literal[int] , identifier[x] + identifier[w] *( identifier[i] + literal[int] )/ identifier[nrows] ),( identifier[sy] , identifier[ty] ),
identifier[lw] = literal[int] , identifier[ls] = literal[string] , identifier[color] = literal[string] , identifier[alpha] = literal[int] )
identifier[axA] , identifier[axC] = identifier[axes]
identifier[p] = identifier[axA] . identifier[pcolormesh] ( identifier[A] , identifier[cmap] = identifier[default_cm] )
identifier[p] = identifier[axC] . identifier[pcolormesh] ( identifier[C] , identifier[cmap] = identifier[default_cm] )
identifier[axA] . identifier[set_xlim] ( literal[int] , identifier[len] ( identifier[gA] ))
identifier[axC] . identifier[set_xlim] ( literal[int] , identifier[len] ( identifier[gC] ))
identifier[x] , identifier[y] , identifier[w] , identifier[h] = literal[int] , literal[int] , literal[int] , literal[int]
identifier[ax_colorbar] = identifier[fig] . identifier[add_axes] ([ identifier[x] , identifier[y] , identifier[w] , identifier[h] ])
identifier[fig] . identifier[colorbar] ( identifier[p] , identifier[cax] = identifier[ax_colorbar] , identifier[orientation] = literal[string] )
identifier[root] . identifier[text] ( identifier[x] - identifier[d] , identifier[y] + identifier[h] / literal[int] , literal[string] , identifier[ha] = literal[string] , identifier[va] = literal[string] )
identifier[root] . identifier[set_xlim] ( literal[int] , literal[int] )
identifier[root] . identifier[set_ylim] ( literal[int] , literal[int] )
keyword[for] identifier[x] keyword[in] ( identifier[axA] , identifier[axC] , identifier[root] ):
identifier[x] . identifier[set_axis_off] ()
identifier[image_name] = literal[string] + identifier[iopts] . identifier[format]
identifier[savefig] ( identifier[image_name] , identifier[dpi] = identifier[iopts] . identifier[dpi] , identifier[iopts] = identifier[iopts] ) | def expr(args):
"""
%prog expr block exp layout napus.bed
Plot a composite figure showing synteny and the expression level between
homeologs in two tissues - total 4 lists of values. block file contains the
gene pairs between AN and CN.
"""
from jcvi.graphics.base import red_purple as default_cm
p = OptionParser(expr.__doc__)
(opts, args, iopts) = p.set_image_options(args, figsize='8x5')
if len(args) != 4:
sys.exit(not p.print_help()) # depends on [control=['if'], data=[]]
(block, exp, layout, napusbed) = args
fig = plt.figure(1, (iopts.w, iopts.h))
root = fig.add_axes([0, 0, 1, 1])
s = Synteny(fig, root, block, napusbed, layout)
# Import the expression values
# Columns are: leaf-A, leaf-C, root-A, root-C
fp = open(exp)
data = {}
for row in fp:
(gid, lf, rt) = row.split()
(lf, rt) = (float(lf), float(rt))
data[gid] = (lf, rt) # depends on [control=['for'], data=['row']]
(rA, rB) = s.rr
gA = [x.accn for x in rA.genes]
gC = [x.accn for x in rB.genes]
A = [data.get(x, (0, 0)) for x in gA]
C = [data.get(x, (0, 0)) for x in gC]
A = np.array(A)
C = np.array(C)
A = np.transpose(A)
C = np.transpose(C)
(d, h) = (0.01, 0.1)
lsg = 'lightslategrey'
coords = s.gg # Coordinates of the genes
axes = []
for (j, (y, gg)) in enumerate(((0.79, gA), (0.24, gC))):
r = s.rr[j]
x = r.xstart
w = r.xend - r.xstart
ax = fig.add_axes([x, y, w, h])
axes.append(ax)
root.add_patch(Rectangle((x - h, y - d), w + h + d, h + 2 * d, fill=False, ec=lsg, lw=1))
root.text(x - d, y + 3 * h / 4, 'root', ha='right', va='center')
root.text(x - d, y + h / 4, 'leaf', ha='right', va='center')
ty = y - 2 * d if y > 0.5 else y + h + 2 * d
nrows = len(gg)
for (i, g) in enumerate(gg):
(start, end) = coords[j, g]
(sx, sy) = start
(ex, ey) = end
assert sy == ey
sy = sy + 2 * d if sy > 0.5 else sy - 2 * d
root.plot(((sx + ex) / 2, x + w * (i + 0.5) / nrows), (sy, ty), lw=1, ls=':', color='k', alpha=0.2) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
(axA, axC) = axes
p = axA.pcolormesh(A, cmap=default_cm)
p = axC.pcolormesh(C, cmap=default_cm)
axA.set_xlim(0, len(gA))
axC.set_xlim(0, len(gC))
(x, y, w, h) = (0.35, 0.1, 0.3, 0.05)
ax_colorbar = fig.add_axes([x, y, w, h])
fig.colorbar(p, cax=ax_colorbar, orientation='horizontal')
root.text(x - d, y + h / 2, 'RPKM', ha='right', va='center')
root.set_xlim(0, 1)
root.set_ylim(0, 1)
for x in (axA, axC, root):
x.set_axis_off() # depends on [control=['for'], data=['x']]
image_name = 'napusf4b.' + iopts.format
savefig(image_name, dpi=iopts.dpi, iopts=iopts) |
def histograms(self, analytes=None, bins=25, logy=False,
filt=False, colourful=True):
"""
Plot histograms of analytes.
Parameters
----------
analytes : optional, array_like or str
The analyte(s) to plot. Defaults to all analytes.
bins : int
The number of bins in each histogram (default = 25)
logy : bool
If true, y axis is a log scale.
filt : str, dict or bool
Either logical filter expression contained in a str,
a dict of expressions specifying the filter string to
use for each analyte or a boolean. Passed to `grab_filt`.
colourful : bool
If True, histograms are colourful :)
Returns
-------
(fig, axes)
"""
if analytes is None:
analytes = self.analytes
if self.focus_stage in ['ratio', 'calibrated']:
analytes = [a for a in analytes if self.internal_standard not in a]
if colourful:
cmap = self.cmaps
else:
cmap = None
self.get_focus(filt=filt)
fig, axes = plot.histograms(self.focus, keys=analytes,
bins=bins, logy=logy, cmap=cmap)
return fig, axes | def function[histograms, parameter[self, analytes, bins, logy, filt, colourful]]:
constant[
Plot histograms of analytes.
Parameters
----------
analytes : optional, array_like or str
The analyte(s) to plot. Defaults to all analytes.
bins : int
The number of bins in each histogram (default = 25)
logy : bool
If true, y axis is a log scale.
filt : str, dict or bool
Either logical filter expression contained in a str,
a dict of expressions specifying the filter string to
use for each analyte or a boolean. Passed to `grab_filt`.
colourful : bool
If True, histograms are colourful :)
Returns
-------
(fig, axes)
]
if compare[name[analytes] is constant[None]] begin[:]
variable[analytes] assign[=] name[self].analytes
if compare[name[self].focus_stage in list[[<ast.Constant object at 0x7da1b03b8190>, <ast.Constant object at 0x7da1b03baf50>]]] begin[:]
variable[analytes] assign[=] <ast.ListComp object at 0x7da1b03bad70>
if name[colourful] begin[:]
variable[cmap] assign[=] name[self].cmaps
call[name[self].get_focus, parameter[]]
<ast.Tuple object at 0x7da20c6a8dc0> assign[=] call[name[plot].histograms, parameter[name[self].focus]]
return[tuple[[<ast.Name object at 0x7da20c6aa530>, <ast.Name object at 0x7da20c6aaa70>]]] | keyword[def] identifier[histograms] ( identifier[self] , identifier[analytes] = keyword[None] , identifier[bins] = literal[int] , identifier[logy] = keyword[False] ,
identifier[filt] = keyword[False] , identifier[colourful] = keyword[True] ):
literal[string]
keyword[if] identifier[analytes] keyword[is] keyword[None] :
identifier[analytes] = identifier[self] . identifier[analytes]
keyword[if] identifier[self] . identifier[focus_stage] keyword[in] [ literal[string] , literal[string] ]:
identifier[analytes] =[ identifier[a] keyword[for] identifier[a] keyword[in] identifier[analytes] keyword[if] identifier[self] . identifier[internal_standard] keyword[not] keyword[in] identifier[a] ]
keyword[if] identifier[colourful] :
identifier[cmap] = identifier[self] . identifier[cmaps]
keyword[else] :
identifier[cmap] = keyword[None]
identifier[self] . identifier[get_focus] ( identifier[filt] = identifier[filt] )
identifier[fig] , identifier[axes] = identifier[plot] . identifier[histograms] ( identifier[self] . identifier[focus] , identifier[keys] = identifier[analytes] ,
identifier[bins] = identifier[bins] , identifier[logy] = identifier[logy] , identifier[cmap] = identifier[cmap] )
keyword[return] identifier[fig] , identifier[axes] | def histograms(self, analytes=None, bins=25, logy=False, filt=False, colourful=True):
"""
Plot histograms of analytes.
Parameters
----------
analytes : optional, array_like or str
The analyte(s) to plot. Defaults to all analytes.
bins : int
The number of bins in each histogram (default = 25)
logy : bool
If true, y axis is a log scale.
filt : str, dict or bool
Either logical filter expression contained in a str,
a dict of expressions specifying the filter string to
use for each analyte or a boolean. Passed to `grab_filt`.
colourful : bool
If True, histograms are colourful :)
Returns
-------
(fig, axes)
"""
if analytes is None:
analytes = self.analytes # depends on [control=['if'], data=['analytes']]
if self.focus_stage in ['ratio', 'calibrated']:
analytes = [a for a in analytes if self.internal_standard not in a] # depends on [control=['if'], data=[]]
if colourful:
cmap = self.cmaps # depends on [control=['if'], data=[]]
else:
cmap = None
self.get_focus(filt=filt)
(fig, axes) = plot.histograms(self.focus, keys=analytes, bins=bins, logy=logy, cmap=cmap)
return (fig, axes) |
def lstring_as_obj(true_or_false=None):
"""Toggles whether lstrings should be treated as strings or as objects.
When FieldArrays is first loaded, the default is True.
Parameters
----------
true_or_false : {None|bool}
Pass True to map lstrings to objects; False otherwise. If None
provided, just returns the current state.
Return
------
current_stat : bool
The current state of lstring_as_obj.
Examples
--------
>>> from pycbc.io import FieldArray
>>> FieldArray.lstring_as_obj()
True
>>> FieldArray.FieldArray.from_arrays([numpy.zeros(10)], dtype=[('foo', 'lstring')])
FieldArray([(0.0,), (0.0,), (0.0,), (0.0,), (0.0,), (0.0,), (0.0,), (0.0,),
(0.0,), (0.0,)],
dtype=[('foo', 'O')])
>>> FieldArray.lstring_as_obj(False)
False
>>> FieldArray.FieldArray.from_arrays([numpy.zeros(10)], dtype=[('foo', 'lstring')])
FieldArray([('0.0',), ('0.0',), ('0.0',), ('0.0',), ('0.0',), ('0.0',),
('0.0',), ('0.0',), ('0.0',), ('0.0',)],
dtype=[('foo', 'S50')])
"""
if true_or_false is not None:
_default_types_status['lstring_as_obj'] = true_or_false
# update the typeDict
numpy.typeDict[u'lstring'] = numpy.object_ \
if _default_types_status['lstring_as_obj'] \
else 'S%i' % _default_types_status['default_strlen']
return _default_types_status['lstring_as_obj'] | def function[lstring_as_obj, parameter[true_or_false]]:
constant[Toggles whether lstrings should be treated as strings or as objects.
When FieldArrays is first loaded, the default is True.
Parameters
----------
true_or_false : {None|bool}
Pass True to map lstrings to objects; False otherwise. If None
provided, just returns the current state.
Return
------
current_stat : bool
The current state of lstring_as_obj.
Examples
--------
>>> from pycbc.io import FieldArray
>>> FieldArray.lstring_as_obj()
True
>>> FieldArray.FieldArray.from_arrays([numpy.zeros(10)], dtype=[('foo', 'lstring')])
FieldArray([(0.0,), (0.0,), (0.0,), (0.0,), (0.0,), (0.0,), (0.0,), (0.0,),
(0.0,), (0.0,)],
dtype=[('foo', 'O')])
>>> FieldArray.lstring_as_obj(False)
False
>>> FieldArray.FieldArray.from_arrays([numpy.zeros(10)], dtype=[('foo', 'lstring')])
FieldArray([('0.0',), ('0.0',), ('0.0',), ('0.0',), ('0.0',), ('0.0',),
('0.0',), ('0.0',), ('0.0',), ('0.0',)],
dtype=[('foo', 'S50')])
]
if compare[name[true_or_false] is_not constant[None]] begin[:]
call[name[_default_types_status]][constant[lstring_as_obj]] assign[=] name[true_or_false]
call[name[numpy].typeDict][constant[lstring]] assign[=] <ast.IfExp object at 0x7da18bccbac0>
return[call[name[_default_types_status]][constant[lstring_as_obj]]] | keyword[def] identifier[lstring_as_obj] ( identifier[true_or_false] = keyword[None] ):
literal[string]
keyword[if] identifier[true_or_false] keyword[is] keyword[not] keyword[None] :
identifier[_default_types_status] [ literal[string] ]= identifier[true_or_false]
identifier[numpy] . identifier[typeDict] [ literal[string] ]= identifier[numpy] . identifier[object_] keyword[if] identifier[_default_types_status] [ literal[string] ] keyword[else] literal[string] % identifier[_default_types_status] [ literal[string] ]
keyword[return] identifier[_default_types_status] [ literal[string] ] | def lstring_as_obj(true_or_false=None):
"""Toggles whether lstrings should be treated as strings or as objects.
When FieldArrays is first loaded, the default is True.
Parameters
----------
true_or_false : {None|bool}
Pass True to map lstrings to objects; False otherwise. If None
provided, just returns the current state.
Return
------
current_stat : bool
The current state of lstring_as_obj.
Examples
--------
>>> from pycbc.io import FieldArray
>>> FieldArray.lstring_as_obj()
True
>>> FieldArray.FieldArray.from_arrays([numpy.zeros(10)], dtype=[('foo', 'lstring')])
FieldArray([(0.0,), (0.0,), (0.0,), (0.0,), (0.0,), (0.0,), (0.0,), (0.0,),
(0.0,), (0.0,)],
dtype=[('foo', 'O')])
>>> FieldArray.lstring_as_obj(False)
False
>>> FieldArray.FieldArray.from_arrays([numpy.zeros(10)], dtype=[('foo', 'lstring')])
FieldArray([('0.0',), ('0.0',), ('0.0',), ('0.0',), ('0.0',), ('0.0',),
('0.0',), ('0.0',), ('0.0',), ('0.0',)],
dtype=[('foo', 'S50')])
"""
if true_or_false is not None:
_default_types_status['lstring_as_obj'] = true_or_false
# update the typeDict
numpy.typeDict[u'lstring'] = numpy.object_ if _default_types_status['lstring_as_obj'] else 'S%i' % _default_types_status['default_strlen'] # depends on [control=['if'], data=['true_or_false']]
return _default_types_status['lstring_as_obj'] |
def websocket(self, uri, *args, **kwargs):
"""Create a websocket route from a decorated function
:param uri: endpoint at which the socket endpoint will be accessible.
:type uri: str
:param args: captures all of the positional arguments passed in
:type args: tuple(Any)
:param kwargs: captures the keyword arguments passed in
:type kwargs: dict(Any)
:return: The exception function to use as the decorator
:rtype: fn
"""
kwargs.setdefault('host', None)
kwargs.setdefault('strict_slashes', None)
kwargs.setdefault('subprotocols', None)
kwargs.setdefault('name', None)
def wrapper(handler_f):
self._ws.append(FutureWebsocket(handler_f, uri, args, kwargs))
return handler_f
return wrapper | def function[websocket, parameter[self, uri]]:
constant[Create a websocket route from a decorated function
:param uri: endpoint at which the socket endpoint will be accessible.
:type uri: str
:param args: captures all of the positional arguments passed in
:type args: tuple(Any)
:param kwargs: captures the keyword arguments passed in
:type kwargs: dict(Any)
:return: The exception function to use as the decorator
:rtype: fn
]
call[name[kwargs].setdefault, parameter[constant[host], constant[None]]]
call[name[kwargs].setdefault, parameter[constant[strict_slashes], constant[None]]]
call[name[kwargs].setdefault, parameter[constant[subprotocols], constant[None]]]
call[name[kwargs].setdefault, parameter[constant[name], constant[None]]]
def function[wrapper, parameter[handler_f]]:
call[name[self]._ws.append, parameter[call[name[FutureWebsocket], parameter[name[handler_f], name[uri], name[args], name[kwargs]]]]]
return[name[handler_f]]
return[name[wrapper]] | keyword[def] identifier[websocket] ( identifier[self] , identifier[uri] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] . identifier[setdefault] ( literal[string] , keyword[None] )
identifier[kwargs] . identifier[setdefault] ( literal[string] , keyword[None] )
identifier[kwargs] . identifier[setdefault] ( literal[string] , keyword[None] )
identifier[kwargs] . identifier[setdefault] ( literal[string] , keyword[None] )
keyword[def] identifier[wrapper] ( identifier[handler_f] ):
identifier[self] . identifier[_ws] . identifier[append] ( identifier[FutureWebsocket] ( identifier[handler_f] , identifier[uri] , identifier[args] , identifier[kwargs] ))
keyword[return] identifier[handler_f]
keyword[return] identifier[wrapper] | def websocket(self, uri, *args, **kwargs):
"""Create a websocket route from a decorated function
:param uri: endpoint at which the socket endpoint will be accessible.
:type uri: str
:param args: captures all of the positional arguments passed in
:type args: tuple(Any)
:param kwargs: captures the keyword arguments passed in
:type kwargs: dict(Any)
:return: The exception function to use as the decorator
:rtype: fn
"""
kwargs.setdefault('host', None)
kwargs.setdefault('strict_slashes', None)
kwargs.setdefault('subprotocols', None)
kwargs.setdefault('name', None)
def wrapper(handler_f):
self._ws.append(FutureWebsocket(handler_f, uri, args, kwargs))
return handler_f
return wrapper |
def update(self, observable, handlers):
"""Toolbar ReaderObserver callback that is notified when
readers are added or removed."""
addedreaders, removedreaders = handlers
for reader in addedreaders:
item = self.Append(str(reader))
self.SetClientData(item, reader)
for reader in removedreaders:
item = self.FindString(str(reader))
if wx.NOT_FOUND != item:
self.Delete(item)
selection = self.GetSelection() | def function[update, parameter[self, observable, handlers]]:
constant[Toolbar ReaderObserver callback that is notified when
readers are added or removed.]
<ast.Tuple object at 0x7da1b23ef430> assign[=] name[handlers]
for taget[name[reader]] in starred[name[addedreaders]] begin[:]
variable[item] assign[=] call[name[self].Append, parameter[call[name[str], parameter[name[reader]]]]]
call[name[self].SetClientData, parameter[name[item], name[reader]]]
for taget[name[reader]] in starred[name[removedreaders]] begin[:]
variable[item] assign[=] call[name[self].FindString, parameter[call[name[str], parameter[name[reader]]]]]
if compare[name[wx].NOT_FOUND not_equal[!=] name[item]] begin[:]
call[name[self].Delete, parameter[name[item]]]
variable[selection] assign[=] call[name[self].GetSelection, parameter[]] | keyword[def] identifier[update] ( identifier[self] , identifier[observable] , identifier[handlers] ):
literal[string]
identifier[addedreaders] , identifier[removedreaders] = identifier[handlers]
keyword[for] identifier[reader] keyword[in] identifier[addedreaders] :
identifier[item] = identifier[self] . identifier[Append] ( identifier[str] ( identifier[reader] ))
identifier[self] . identifier[SetClientData] ( identifier[item] , identifier[reader] )
keyword[for] identifier[reader] keyword[in] identifier[removedreaders] :
identifier[item] = identifier[self] . identifier[FindString] ( identifier[str] ( identifier[reader] ))
keyword[if] identifier[wx] . identifier[NOT_FOUND] != identifier[item] :
identifier[self] . identifier[Delete] ( identifier[item] )
identifier[selection] = identifier[self] . identifier[GetSelection] () | def update(self, observable, handlers):
"""Toolbar ReaderObserver callback that is notified when
readers are added or removed."""
(addedreaders, removedreaders) = handlers
for reader in addedreaders:
item = self.Append(str(reader))
self.SetClientData(item, reader) # depends on [control=['for'], data=['reader']]
for reader in removedreaders:
item = self.FindString(str(reader))
if wx.NOT_FOUND != item:
self.Delete(item) # depends on [control=['if'], data=['item']] # depends on [control=['for'], data=['reader']]
selection = self.GetSelection() |
def shell_sort(arr):
''' Shell Sort
Complexity: O(n^2)
'''
n = len(arr)
# Initialize size of the gap
gap = n//2
while gap > 0:
y_index = gap
while y_index < len(arr):
y = arr[y_index]
x_index = y_index - gap
while x_index >= 0 and y < arr[x_index]:
arr[x_index + gap] = arr[x_index]
x_index = x_index - gap
arr[x_index + gap] = y
y_index = y_index + 1
gap = gap//2
return arr | def function[shell_sort, parameter[arr]]:
constant[ Shell Sort
Complexity: O(n^2)
]
variable[n] assign[=] call[name[len], parameter[name[arr]]]
variable[gap] assign[=] binary_operation[name[n] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]]
while compare[name[gap] greater[>] constant[0]] begin[:]
variable[y_index] assign[=] name[gap]
while compare[name[y_index] less[<] call[name[len], parameter[name[arr]]]] begin[:]
variable[y] assign[=] call[name[arr]][name[y_index]]
variable[x_index] assign[=] binary_operation[name[y_index] - name[gap]]
while <ast.BoolOp object at 0x7da1b20756f0> begin[:]
call[name[arr]][binary_operation[name[x_index] + name[gap]]] assign[=] call[name[arr]][name[x_index]]
variable[x_index] assign[=] binary_operation[name[x_index] - name[gap]]
call[name[arr]][binary_operation[name[x_index] + name[gap]]] assign[=] name[y]
variable[y_index] assign[=] binary_operation[name[y_index] + constant[1]]
variable[gap] assign[=] binary_operation[name[gap] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]]
return[name[arr]] | keyword[def] identifier[shell_sort] ( identifier[arr] ):
literal[string]
identifier[n] = identifier[len] ( identifier[arr] )
identifier[gap] = identifier[n] // literal[int]
keyword[while] identifier[gap] > literal[int] :
identifier[y_index] = identifier[gap]
keyword[while] identifier[y_index] < identifier[len] ( identifier[arr] ):
identifier[y] = identifier[arr] [ identifier[y_index] ]
identifier[x_index] = identifier[y_index] - identifier[gap]
keyword[while] identifier[x_index] >= literal[int] keyword[and] identifier[y] < identifier[arr] [ identifier[x_index] ]:
identifier[arr] [ identifier[x_index] + identifier[gap] ]= identifier[arr] [ identifier[x_index] ]
identifier[x_index] = identifier[x_index] - identifier[gap]
identifier[arr] [ identifier[x_index] + identifier[gap] ]= identifier[y]
identifier[y_index] = identifier[y_index] + literal[int]
identifier[gap] = identifier[gap] // literal[int]
keyword[return] identifier[arr] | def shell_sort(arr):
""" Shell Sort
Complexity: O(n^2)
"""
n = len(arr)
# Initialize size of the gap
gap = n // 2
while gap > 0:
y_index = gap
while y_index < len(arr):
y = arr[y_index]
x_index = y_index - gap
while x_index >= 0 and y < arr[x_index]:
arr[x_index + gap] = arr[x_index]
x_index = x_index - gap # depends on [control=['while'], data=[]]
arr[x_index + gap] = y
y_index = y_index + 1 # depends on [control=['while'], data=['y_index']]
gap = gap // 2 # depends on [control=['while'], data=['gap']]
return arr |
def add_channel_cb(self, viewer, channel):
"""Called when a channel is added from the main interface.
Parameter is channel (a bunch)."""
fitsimage = channel.fitsimage
fitssettings = fitsimage.get_settings()
for name in ['cuts']:
fitssettings.get_setting(name).add_callback(
'set', self.cutset_cb, fitsimage)
fitsimage.add_callback('transform', self.transform_cb)
rgbmap = fitsimage.get_rgbmap()
rgbmap.add_callback('changed', self.rgbmap_cb, fitsimage)
# add old highlight set to channel external data
channel.extdata.setdefault('thumbs_old_highlight', set([])) | def function[add_channel_cb, parameter[self, viewer, channel]]:
constant[Called when a channel is added from the main interface.
Parameter is channel (a bunch).]
variable[fitsimage] assign[=] name[channel].fitsimage
variable[fitssettings] assign[=] call[name[fitsimage].get_settings, parameter[]]
for taget[name[name]] in starred[list[[<ast.Constant object at 0x7da1b0d56890>]]] begin[:]
call[call[name[fitssettings].get_setting, parameter[name[name]]].add_callback, parameter[constant[set], name[self].cutset_cb, name[fitsimage]]]
call[name[fitsimage].add_callback, parameter[constant[transform], name[self].transform_cb]]
variable[rgbmap] assign[=] call[name[fitsimage].get_rgbmap, parameter[]]
call[name[rgbmap].add_callback, parameter[constant[changed], name[self].rgbmap_cb, name[fitsimage]]]
call[name[channel].extdata.setdefault, parameter[constant[thumbs_old_highlight], call[name[set], parameter[list[[]]]]]] | keyword[def] identifier[add_channel_cb] ( identifier[self] , identifier[viewer] , identifier[channel] ):
literal[string]
identifier[fitsimage] = identifier[channel] . identifier[fitsimage]
identifier[fitssettings] = identifier[fitsimage] . identifier[get_settings] ()
keyword[for] identifier[name] keyword[in] [ literal[string] ]:
identifier[fitssettings] . identifier[get_setting] ( identifier[name] ). identifier[add_callback] (
literal[string] , identifier[self] . identifier[cutset_cb] , identifier[fitsimage] )
identifier[fitsimage] . identifier[add_callback] ( literal[string] , identifier[self] . identifier[transform_cb] )
identifier[rgbmap] = identifier[fitsimage] . identifier[get_rgbmap] ()
identifier[rgbmap] . identifier[add_callback] ( literal[string] , identifier[self] . identifier[rgbmap_cb] , identifier[fitsimage] )
identifier[channel] . identifier[extdata] . identifier[setdefault] ( literal[string] , identifier[set] ([])) | def add_channel_cb(self, viewer, channel):
"""Called when a channel is added from the main interface.
Parameter is channel (a bunch)."""
fitsimage = channel.fitsimage
fitssettings = fitsimage.get_settings()
for name in ['cuts']:
fitssettings.get_setting(name).add_callback('set', self.cutset_cb, fitsimage) # depends on [control=['for'], data=['name']]
fitsimage.add_callback('transform', self.transform_cb)
rgbmap = fitsimage.get_rgbmap()
rgbmap.add_callback('changed', self.rgbmap_cb, fitsimage)
# add old highlight set to channel external data
channel.extdata.setdefault('thumbs_old_highlight', set([])) |
def inverted(values, input_min=0, input_max=1):
"""
Returns the inversion of the supplied values (*input_min* becomes
*input_max*, *input_max* becomes *input_min*, `input_min + 0.1` becomes
`input_max - 0.1`, etc.). All items in *values* are assumed to be between
*input_min* and *input_max* (which default to 0 and 1 respectively), and
the output will be in the same range. For example::
from gpiozero import MCP3008, PWMLED
from gpiozero.tools import inverted
from signal import pause
led = PWMLED(4)
pot = MCP3008(channel=0)
led.source = inverted(pot)
pause()
"""
values = _normalize(values)
if input_min >= input_max:
raise ValueError('input_min must be smaller than input_max')
for v in values:
yield input_min + input_max - v | def function[inverted, parameter[values, input_min, input_max]]:
constant[
Returns the inversion of the supplied values (*input_min* becomes
*input_max*, *input_max* becomes *input_min*, `input_min + 0.1` becomes
`input_max - 0.1`, etc.). All items in *values* are assumed to be between
*input_min* and *input_max* (which default to 0 and 1 respectively), and
the output will be in the same range. For example::
from gpiozero import MCP3008, PWMLED
from gpiozero.tools import inverted
from signal import pause
led = PWMLED(4)
pot = MCP3008(channel=0)
led.source = inverted(pot)
pause()
]
variable[values] assign[=] call[name[_normalize], parameter[name[values]]]
if compare[name[input_min] greater_or_equal[>=] name[input_max]] begin[:]
<ast.Raise object at 0x7da18f720f40>
for taget[name[v]] in starred[name[values]] begin[:]
<ast.Yield object at 0x7da18f7223b0> | keyword[def] identifier[inverted] ( identifier[values] , identifier[input_min] = literal[int] , identifier[input_max] = literal[int] ):
literal[string]
identifier[values] = identifier[_normalize] ( identifier[values] )
keyword[if] identifier[input_min] >= identifier[input_max] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[for] identifier[v] keyword[in] identifier[values] :
keyword[yield] identifier[input_min] + identifier[input_max] - identifier[v] | def inverted(values, input_min=0, input_max=1):
"""
Returns the inversion of the supplied values (*input_min* becomes
*input_max*, *input_max* becomes *input_min*, `input_min + 0.1` becomes
`input_max - 0.1`, etc.). All items in *values* are assumed to be between
*input_min* and *input_max* (which default to 0 and 1 respectively), and
the output will be in the same range. For example::
from gpiozero import MCP3008, PWMLED
from gpiozero.tools import inverted
from signal import pause
led = PWMLED(4)
pot = MCP3008(channel=0)
led.source = inverted(pot)
pause()
"""
values = _normalize(values)
if input_min >= input_max:
raise ValueError('input_min must be smaller than input_max') # depends on [control=['if'], data=[]]
for v in values:
yield (input_min + input_max - v) # depends on [control=['for'], data=['v']] |
def hist(self,xdata,disp=True,**kwargs):
'''Graphs a histogram.
xdata: List of values to bin. Can optionally include a header, see testGraph_barAndHist.py in https://github.com/Dfenestrator/GooPyCharts for an example.
disp: for displaying plots immediately. Set to True by default. Set to False for other operations, then use show() to display the plot.
**kwargs: Access to other Google Charts API options. The key is the option name, the value is the option's full JS code.
'''
#combine data into proper format
data = [self.xlabel]+xdata
#Include other options, supplied by **kwargs
other = ''
for option in kwargs:
other += option + ': ' + kwargs[option] + ',\n'
#input argument format to template is in dictionary format (see template for where variables are inserted)
argDict = { 'data':str(data),
'title':self.title,
'functionName':slugify(self.title),
'height':self.height,
'width':self.width,
'logScaleFlag':'false',
'ylabel':self.ylabel,
'plotType':'Histogram',
'numFig':self.numFig,
'other':other}
self.javascript = (graphPgTemplateStart+graphPgTemplate_hist+graphPgTemplateEnd) % argDict
if disp:
self.dispFile() | def function[hist, parameter[self, xdata, disp]]:
constant[Graphs a histogram.
xdata: List of values to bin. Can optionally include a header, see testGraph_barAndHist.py in https://github.com/Dfenestrator/GooPyCharts for an example.
disp: for displaying plots immediately. Set to True by default. Set to False for other operations, then use show() to display the plot.
**kwargs: Access to other Google Charts API options. The key is the option name, the value is the option's full JS code.
]
variable[data] assign[=] binary_operation[list[[<ast.Attribute object at 0x7da18bc730d0>]] + name[xdata]]
variable[other] assign[=] constant[]
for taget[name[option]] in starred[name[kwargs]] begin[:]
<ast.AugAssign object at 0x7da1b0619d50>
variable[argDict] assign[=] dictionary[[<ast.Constant object at 0x7da1b061ab90>, <ast.Constant object at 0x7da1b061ba30>, <ast.Constant object at 0x7da1b0619cc0>, <ast.Constant object at 0x7da1b061b280>, <ast.Constant object at 0x7da1b06180a0>, <ast.Constant object at 0x7da1b0618670>, <ast.Constant object at 0x7da1b06192d0>, <ast.Constant object at 0x7da1b061ba60>, <ast.Constant object at 0x7da1b0619a80>, <ast.Constant object at 0x7da1b061b850>], [<ast.Call object at 0x7da1b061b2e0>, <ast.Attribute object at 0x7da1b06183d0>, <ast.Call object at 0x7da1b06188e0>, <ast.Attribute object at 0x7da1b0618d00>, <ast.Attribute object at 0x7da1b06196c0>, <ast.Constant object at 0x7da1b0619690>, <ast.Attribute object at 0x7da1b0618970>, <ast.Constant object at 0x7da1b0618eb0>, <ast.Attribute object at 0x7da1b0618460>, <ast.Name object at 0x7da1b0619ff0>]]
name[self].javascript assign[=] binary_operation[binary_operation[binary_operation[name[graphPgTemplateStart] + name[graphPgTemplate_hist]] + name[graphPgTemplateEnd]] <ast.Mod object at 0x7da2590d6920> name[argDict]]
if name[disp] begin[:]
call[name[self].dispFile, parameter[]] | keyword[def] identifier[hist] ( identifier[self] , identifier[xdata] , identifier[disp] = keyword[True] ,** identifier[kwargs] ):
literal[string]
identifier[data] =[ identifier[self] . identifier[xlabel] ]+ identifier[xdata]
identifier[other] = literal[string]
keyword[for] identifier[option] keyword[in] identifier[kwargs] :
identifier[other] += identifier[option] + literal[string] + identifier[kwargs] [ identifier[option] ]+ literal[string]
identifier[argDict] ={ literal[string] : identifier[str] ( identifier[data] ),
literal[string] : identifier[self] . identifier[title] ,
literal[string] : identifier[slugify] ( identifier[self] . identifier[title] ),
literal[string] : identifier[self] . identifier[height] ,
literal[string] : identifier[self] . identifier[width] ,
literal[string] : literal[string] ,
literal[string] : identifier[self] . identifier[ylabel] ,
literal[string] : literal[string] ,
literal[string] : identifier[self] . identifier[numFig] ,
literal[string] : identifier[other] }
identifier[self] . identifier[javascript] =( identifier[graphPgTemplateStart] + identifier[graphPgTemplate_hist] + identifier[graphPgTemplateEnd] )% identifier[argDict]
keyword[if] identifier[disp] :
identifier[self] . identifier[dispFile] () | def hist(self, xdata, disp=True, **kwargs):
"""Graphs a histogram.
xdata: List of values to bin. Can optionally include a header, see testGraph_barAndHist.py in https://github.com/Dfenestrator/GooPyCharts for an example.
disp: for displaying plots immediately. Set to True by default. Set to False for other operations, then use show() to display the plot.
**kwargs: Access to other Google Charts API options. The key is the option name, the value is the option's full JS code.
"""
#combine data into proper format
data = [self.xlabel] + xdata
#Include other options, supplied by **kwargs
other = ''
for option in kwargs:
other += option + ': ' + kwargs[option] + ',\n' # depends on [control=['for'], data=['option']]
#input argument format to template is in dictionary format (see template for where variables are inserted)
argDict = {'data': str(data), 'title': self.title, 'functionName': slugify(self.title), 'height': self.height, 'width': self.width, 'logScaleFlag': 'false', 'ylabel': self.ylabel, 'plotType': 'Histogram', 'numFig': self.numFig, 'other': other}
self.javascript = (graphPgTemplateStart + graphPgTemplate_hist + graphPgTemplateEnd) % argDict
if disp:
self.dispFile() # depends on [control=['if'], data=[]] |
def _writeMzmlIndexList(xmlWriter, spectrumIndexList, chromatogramIndexList):
""" #TODO: docstring
:param xmlWriter: #TODO: docstring
:param spectrumIndexList: #TODO: docstring
:param chromatogramIndexList: #TODO: docstring
"""
counts = 0
if spectrumIndexList:
counts += 1
if chromatogramIndexList:
counts += 1
if counts == 0:
return None
#Create indexList node
xmlIndexList = xmlWriter.element('indexList', {'count': str(counts)})
xmlIndexList.__enter__()
xmlWriter.write('\n')
_writeIndexListElement(xmlWriter, 'spectrum', spectrumIndexList)
_writeIndexListElement(xmlWriter, 'chromatogram', chromatogramIndexList)
#Close indexList node
xmlIndexList.__exit__(None, None, None)
xmlWriter.write('\n') | def function[_writeMzmlIndexList, parameter[xmlWriter, spectrumIndexList, chromatogramIndexList]]:
constant[ #TODO: docstring
:param xmlWriter: #TODO: docstring
:param spectrumIndexList: #TODO: docstring
:param chromatogramIndexList: #TODO: docstring
]
variable[counts] assign[=] constant[0]
if name[spectrumIndexList] begin[:]
<ast.AugAssign object at 0x7da1b2874a30>
if name[chromatogramIndexList] begin[:]
<ast.AugAssign object at 0x7da1b28754e0>
if compare[name[counts] equal[==] constant[0]] begin[:]
return[constant[None]]
variable[xmlIndexList] assign[=] call[name[xmlWriter].element, parameter[constant[indexList], dictionary[[<ast.Constant object at 0x7da1b28752a0>], [<ast.Call object at 0x7da1b2876050>]]]]
call[name[xmlIndexList].__enter__, parameter[]]
call[name[xmlWriter].write, parameter[constant[
]]]
call[name[_writeIndexListElement], parameter[name[xmlWriter], constant[spectrum], name[spectrumIndexList]]]
call[name[_writeIndexListElement], parameter[name[xmlWriter], constant[chromatogram], name[chromatogramIndexList]]]
call[name[xmlIndexList].__exit__, parameter[constant[None], constant[None], constant[None]]]
call[name[xmlWriter].write, parameter[constant[
]]] | keyword[def] identifier[_writeMzmlIndexList] ( identifier[xmlWriter] , identifier[spectrumIndexList] , identifier[chromatogramIndexList] ):
literal[string]
identifier[counts] = literal[int]
keyword[if] identifier[spectrumIndexList] :
identifier[counts] += literal[int]
keyword[if] identifier[chromatogramIndexList] :
identifier[counts] += literal[int]
keyword[if] identifier[counts] == literal[int] :
keyword[return] keyword[None]
identifier[xmlIndexList] = identifier[xmlWriter] . identifier[element] ( literal[string] ,{ literal[string] : identifier[str] ( identifier[counts] )})
identifier[xmlIndexList] . identifier[__enter__] ()
identifier[xmlWriter] . identifier[write] ( literal[string] )
identifier[_writeIndexListElement] ( identifier[xmlWriter] , literal[string] , identifier[spectrumIndexList] )
identifier[_writeIndexListElement] ( identifier[xmlWriter] , literal[string] , identifier[chromatogramIndexList] )
identifier[xmlIndexList] . identifier[__exit__] ( keyword[None] , keyword[None] , keyword[None] )
identifier[xmlWriter] . identifier[write] ( literal[string] ) | def _writeMzmlIndexList(xmlWriter, spectrumIndexList, chromatogramIndexList):
""" #TODO: docstring
:param xmlWriter: #TODO: docstring
:param spectrumIndexList: #TODO: docstring
:param chromatogramIndexList: #TODO: docstring
"""
counts = 0
if spectrumIndexList:
counts += 1 # depends on [control=['if'], data=[]]
if chromatogramIndexList:
counts += 1 # depends on [control=['if'], data=[]]
if counts == 0:
return None # depends on [control=['if'], data=[]]
#Create indexList node
xmlIndexList = xmlWriter.element('indexList', {'count': str(counts)})
xmlIndexList.__enter__()
xmlWriter.write('\n')
_writeIndexListElement(xmlWriter, 'spectrum', spectrumIndexList)
_writeIndexListElement(xmlWriter, 'chromatogram', chromatogramIndexList)
#Close indexList node
xmlIndexList.__exit__(None, None, None)
xmlWriter.write('\n') |
def find_signature(self, data_stream, msg_signature):
""" Takes the stream of bytes received and looks for a message that matches the signature
of the expected response """
signature_match_index = None # The message that will be returned if it matches the signature
msg_signature = msg_signature.split() # Split into list
# convert to bytearray in order to be able to compare with the messages list which contains bytearrays
msg_signature = bytearray(int(x, 16) for x in msg_signature)
# loop through each message returned from Russound
index_of_last_f7 = None
for i in range(len(data_stream)):
if data_stream[i] == 247:
index_of_last_f7 = i
# the below line checks for the matching signature, ensuring ALL bytes of the response have been received
if (data_stream[i:i + len(msg_signature)] == msg_signature) and (len(data_stream) - i >= 24):
signature_match_index = i
break
if signature_match_index is None:
# Scrap bytes up to end of msg (to avoid searching these again)
data_stream = data_stream[index_of_last_f7:len(data_stream)]
matching_message = None
else:
matching_message = data_stream[signature_match_index:len(data_stream)]
_LOGGER.debug("Message signature found at location: %s", signature_match_index)
return matching_message, data_stream | def function[find_signature, parameter[self, data_stream, msg_signature]]:
constant[ Takes the stream of bytes received and looks for a message that matches the signature
of the expected response ]
variable[signature_match_index] assign[=] constant[None]
variable[msg_signature] assign[=] call[name[msg_signature].split, parameter[]]
variable[msg_signature] assign[=] call[name[bytearray], parameter[<ast.GeneratorExp object at 0x7da1b09123e0>]]
variable[index_of_last_f7] assign[=] constant[None]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[data_stream]]]]]] begin[:]
if compare[call[name[data_stream]][name[i]] equal[==] constant[247]] begin[:]
variable[index_of_last_f7] assign[=] name[i]
if <ast.BoolOp object at 0x7da1b092e680> begin[:]
variable[signature_match_index] assign[=] name[i]
break
if compare[name[signature_match_index] is constant[None]] begin[:]
variable[data_stream] assign[=] call[name[data_stream]][<ast.Slice object at 0x7da1b092e1a0>]
variable[matching_message] assign[=] constant[None]
call[name[_LOGGER].debug, parameter[constant[Message signature found at location: %s], name[signature_match_index]]]
return[tuple[[<ast.Name object at 0x7da1b092c100>, <ast.Name object at 0x7da1b092c160>]]] | keyword[def] identifier[find_signature] ( identifier[self] , identifier[data_stream] , identifier[msg_signature] ):
literal[string]
identifier[signature_match_index] = keyword[None]
identifier[msg_signature] = identifier[msg_signature] . identifier[split] ()
identifier[msg_signature] = identifier[bytearray] ( identifier[int] ( identifier[x] , literal[int] ) keyword[for] identifier[x] keyword[in] identifier[msg_signature] )
identifier[index_of_last_f7] = keyword[None]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[data_stream] )):
keyword[if] identifier[data_stream] [ identifier[i] ]== literal[int] :
identifier[index_of_last_f7] = identifier[i]
keyword[if] ( identifier[data_stream] [ identifier[i] : identifier[i] + identifier[len] ( identifier[msg_signature] )]== identifier[msg_signature] ) keyword[and] ( identifier[len] ( identifier[data_stream] )- identifier[i] >= literal[int] ):
identifier[signature_match_index] = identifier[i]
keyword[break]
keyword[if] identifier[signature_match_index] keyword[is] keyword[None] :
identifier[data_stream] = identifier[data_stream] [ identifier[index_of_last_f7] : identifier[len] ( identifier[data_stream] )]
identifier[matching_message] = keyword[None]
keyword[else] :
identifier[matching_message] = identifier[data_stream] [ identifier[signature_match_index] : identifier[len] ( identifier[data_stream] )]
identifier[_LOGGER] . identifier[debug] ( literal[string] , identifier[signature_match_index] )
keyword[return] identifier[matching_message] , identifier[data_stream] | def find_signature(self, data_stream, msg_signature):
""" Takes the stream of bytes received and looks for a message that matches the signature
of the expected response """
signature_match_index = None # The message that will be returned if it matches the signature
msg_signature = msg_signature.split() # Split into list
# convert to bytearray in order to be able to compare with the messages list which contains bytearrays
msg_signature = bytearray((int(x, 16) for x in msg_signature))
# loop through each message returned from Russound
index_of_last_f7 = None
for i in range(len(data_stream)):
if data_stream[i] == 247:
index_of_last_f7 = i # depends on [control=['if'], data=[]]
# the below line checks for the matching signature, ensuring ALL bytes of the response have been received
if data_stream[i:i + len(msg_signature)] == msg_signature and len(data_stream) - i >= 24:
signature_match_index = i
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
if signature_match_index is None:
# Scrap bytes up to end of msg (to avoid searching these again)
data_stream = data_stream[index_of_last_f7:len(data_stream)]
matching_message = None # depends on [control=['if'], data=[]]
else:
matching_message = data_stream[signature_match_index:len(data_stream)]
_LOGGER.debug('Message signature found at location: %s', signature_match_index)
return (matching_message, data_stream) |
def connectSig(self, signal):
"""
Connect to port item on subunit
"""
if self.direction == DIRECTION.IN:
if self.src is not None:
raise HwtSyntaxError(
"Port %s is already associated with %r"
% (self.name, self.src))
self.src = signal
signal.endpoints.append(self)
elif self.direction == DIRECTION.OUT:
if self.dst is not None:
raise HwtSyntaxError(
"Port %s is already associated with %r"
% (self.name, self.dst))
self.dst = signal
signal.drivers.append(self)
else:
raise NotImplementedError(self)
signal.hidden = False
signal.ctx.subUnits.add(self.unit) | def function[connectSig, parameter[self, signal]]:
constant[
Connect to port item on subunit
]
if compare[name[self].direction equal[==] name[DIRECTION].IN] begin[:]
if compare[name[self].src is_not constant[None]] begin[:]
<ast.Raise object at 0x7da1b032d060>
name[self].src assign[=] name[signal]
call[name[signal].endpoints.append, parameter[name[self]]]
name[signal].hidden assign[=] constant[False]
call[name[signal].ctx.subUnits.add, parameter[name[self].unit]] | keyword[def] identifier[connectSig] ( identifier[self] , identifier[signal] ):
literal[string]
keyword[if] identifier[self] . identifier[direction] == identifier[DIRECTION] . identifier[IN] :
keyword[if] identifier[self] . identifier[src] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[HwtSyntaxError] (
literal[string]
%( identifier[self] . identifier[name] , identifier[self] . identifier[src] ))
identifier[self] . identifier[src] = identifier[signal]
identifier[signal] . identifier[endpoints] . identifier[append] ( identifier[self] )
keyword[elif] identifier[self] . identifier[direction] == identifier[DIRECTION] . identifier[OUT] :
keyword[if] identifier[self] . identifier[dst] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[HwtSyntaxError] (
literal[string]
%( identifier[self] . identifier[name] , identifier[self] . identifier[dst] ))
identifier[self] . identifier[dst] = identifier[signal]
identifier[signal] . identifier[drivers] . identifier[append] ( identifier[self] )
keyword[else] :
keyword[raise] identifier[NotImplementedError] ( identifier[self] )
identifier[signal] . identifier[hidden] = keyword[False]
identifier[signal] . identifier[ctx] . identifier[subUnits] . identifier[add] ( identifier[self] . identifier[unit] ) | def connectSig(self, signal):
"""
Connect to port item on subunit
"""
if self.direction == DIRECTION.IN:
if self.src is not None:
raise HwtSyntaxError('Port %s is already associated with %r' % (self.name, self.src)) # depends on [control=['if'], data=[]]
self.src = signal
signal.endpoints.append(self) # depends on [control=['if'], data=[]]
elif self.direction == DIRECTION.OUT:
if self.dst is not None:
raise HwtSyntaxError('Port %s is already associated with %r' % (self.name, self.dst)) # depends on [control=['if'], data=[]]
self.dst = signal
signal.drivers.append(self) # depends on [control=['if'], data=[]]
else:
raise NotImplementedError(self)
signal.hidden = False
signal.ctx.subUnits.add(self.unit) |
def find(self, key):
'''Get a shared variable for a parameter by name.
Parameters
----------
key : str or int
The name of the parameter to look up, or the index of the parameter
in our parameter list. These are both dependent on the
implementation of the layer.
Returns
-------
param : shared variable
A shared variable containing values for the given parameter.
Raises
------
KeyError
If a param with the given name does not exist.
'''
name = self._fmt(str(key))
for i, p in enumerate(self._params):
if key == i or name == p.name:
return p
raise KeyError(key) | def function[find, parameter[self, key]]:
constant[Get a shared variable for a parameter by name.
Parameters
----------
key : str or int
The name of the parameter to look up, or the index of the parameter
in our parameter list. These are both dependent on the
implementation of the layer.
Returns
-------
param : shared variable
A shared variable containing values for the given parameter.
Raises
------
KeyError
If a param with the given name does not exist.
]
variable[name] assign[=] call[name[self]._fmt, parameter[call[name[str], parameter[name[key]]]]]
for taget[tuple[[<ast.Name object at 0x7da1b02d1cc0>, <ast.Name object at 0x7da1b02d1f60>]]] in starred[call[name[enumerate], parameter[name[self]._params]]] begin[:]
if <ast.BoolOp object at 0x7da1b02d1210> begin[:]
return[name[p]]
<ast.Raise object at 0x7da1b02c0250> | keyword[def] identifier[find] ( identifier[self] , identifier[key] ):
literal[string]
identifier[name] = identifier[self] . identifier[_fmt] ( identifier[str] ( identifier[key] ))
keyword[for] identifier[i] , identifier[p] keyword[in] identifier[enumerate] ( identifier[self] . identifier[_params] ):
keyword[if] identifier[key] == identifier[i] keyword[or] identifier[name] == identifier[p] . identifier[name] :
keyword[return] identifier[p]
keyword[raise] identifier[KeyError] ( identifier[key] ) | def find(self, key):
"""Get a shared variable for a parameter by name.
Parameters
----------
key : str or int
The name of the parameter to look up, or the index of the parameter
in our parameter list. These are both dependent on the
implementation of the layer.
Returns
-------
param : shared variable
A shared variable containing values for the given parameter.
Raises
------
KeyError
If a param with the given name does not exist.
"""
name = self._fmt(str(key))
for (i, p) in enumerate(self._params):
if key == i or name == p.name:
return p # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
raise KeyError(key) |
def consume(self, limit=None):
"""Returns an iterator that waits for one message at a time."""
for total_message_count in count():
if limit and total_message_count >= limit:
raise StopIteration
if not self.channel.is_open:
raise StopIteration
self.channel.wait()
yield True | def function[consume, parameter[self, limit]]:
constant[Returns an iterator that waits for one message at a time.]
for taget[name[total_message_count]] in starred[call[name[count], parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da1b0fd5810> begin[:]
<ast.Raise object at 0x7da1b0fd4be0>
if <ast.UnaryOp object at 0x7da1b0fd76a0> begin[:]
<ast.Raise object at 0x7da1b0fd7d60>
call[name[self].channel.wait, parameter[]]
<ast.Yield object at 0x7da1b0fd7a00> | keyword[def] identifier[consume] ( identifier[self] , identifier[limit] = keyword[None] ):
literal[string]
keyword[for] identifier[total_message_count] keyword[in] identifier[count] ():
keyword[if] identifier[limit] keyword[and] identifier[total_message_count] >= identifier[limit] :
keyword[raise] identifier[StopIteration]
keyword[if] keyword[not] identifier[self] . identifier[channel] . identifier[is_open] :
keyword[raise] identifier[StopIteration]
identifier[self] . identifier[channel] . identifier[wait] ()
keyword[yield] keyword[True] | def consume(self, limit=None):
"""Returns an iterator that waits for one message at a time."""
for total_message_count in count():
if limit and total_message_count >= limit:
raise StopIteration # depends on [control=['if'], data=[]]
if not self.channel.is_open:
raise StopIteration # depends on [control=['if'], data=[]]
self.channel.wait()
yield True # depends on [control=['for'], data=['total_message_count']] |
def add_assay(self, name, assay):
"""
Add an assay to the material.
:param name: The name of the new assay.
:param assay: A numpy array containing the size class mass fractions
for the assay. The sequence of the assay's elements must correspond
to the sequence of the material's size classes.
"""
if not type(assay) is numpy.ndarray:
raise Exception("Invalid assay. It must be a numpy array.")
elif not assay.shape == (self.size_class_count,):
raise Exception(
"Invalid assay: It must have the same number of elements "
"as the material has size classes.")
elif name in self.assays.keys():
raise Exception(
"Invalid assay: An assay with that name already exists.")
self.assays[name] = assay | def function[add_assay, parameter[self, name, assay]]:
constant[
Add an assay to the material.
:param name: The name of the new assay.
:param assay: A numpy array containing the size class mass fractions
for the assay. The sequence of the assay's elements must correspond
to the sequence of the material's size classes.
]
if <ast.UnaryOp object at 0x7da1b00f4c10> begin[:]
<ast.Raise object at 0x7da1b00f7550>
call[name[self].assays][name[name]] assign[=] name[assay] | keyword[def] identifier[add_assay] ( identifier[self] , identifier[name] , identifier[assay] ):
literal[string]
keyword[if] keyword[not] identifier[type] ( identifier[assay] ) keyword[is] identifier[numpy] . identifier[ndarray] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[elif] keyword[not] identifier[assay] . identifier[shape] ==( identifier[self] . identifier[size_class_count] ,):
keyword[raise] identifier[Exception] (
literal[string]
literal[string] )
keyword[elif] identifier[name] keyword[in] identifier[self] . identifier[assays] . identifier[keys] ():
keyword[raise] identifier[Exception] (
literal[string] )
identifier[self] . identifier[assays] [ identifier[name] ]= identifier[assay] | def add_assay(self, name, assay):
"""
Add an assay to the material.
:param name: The name of the new assay.
:param assay: A numpy array containing the size class mass fractions
for the assay. The sequence of the assay's elements must correspond
to the sequence of the material's size classes.
"""
if not type(assay) is numpy.ndarray:
raise Exception('Invalid assay. It must be a numpy array.') # depends on [control=['if'], data=[]]
elif not assay.shape == (self.size_class_count,):
raise Exception('Invalid assay: It must have the same number of elements as the material has size classes.') # depends on [control=['if'], data=[]]
elif name in self.assays.keys():
raise Exception('Invalid assay: An assay with that name already exists.') # depends on [control=['if'], data=[]]
self.assays[name] = assay |
def _on_connection_finished(self, result):
"""Callback when the connection attempt to a BLE device has finished
This function if called when a new connection is successfully completed
Args:
event (BGAPIPacket): Connection event
"""
success, retval, context = self._parse_return(result)
conn_id = context['connection_id']
callback = context['callback']
if success is False:
callback(conn_id, self.id, False, 'Timeout opening connection')
with self.count_lock:
self.connecting_count -= 1
return
handle = retval['handle']
context['disconnect_handler'] = self._on_connection_failed
context['connect_time'] = time.time()
context['state'] = 'preparing'
self._connections[handle] = context
self.probe_services(handle, conn_id, self._probe_services_finished) | def function[_on_connection_finished, parameter[self, result]]:
constant[Callback when the connection attempt to a BLE device has finished
This function if called when a new connection is successfully completed
Args:
event (BGAPIPacket): Connection event
]
<ast.Tuple object at 0x7da20c76fc40> assign[=] call[name[self]._parse_return, parameter[name[result]]]
variable[conn_id] assign[=] call[name[context]][constant[connection_id]]
variable[callback] assign[=] call[name[context]][constant[callback]]
if compare[name[success] is constant[False]] begin[:]
call[name[callback], parameter[name[conn_id], name[self].id, constant[False], constant[Timeout opening connection]]]
with name[self].count_lock begin[:]
<ast.AugAssign object at 0x7da20c76eda0>
return[None]
variable[handle] assign[=] call[name[retval]][constant[handle]]
call[name[context]][constant[disconnect_handler]] assign[=] name[self]._on_connection_failed
call[name[context]][constant[connect_time]] assign[=] call[name[time].time, parameter[]]
call[name[context]][constant[state]] assign[=] constant[preparing]
call[name[self]._connections][name[handle]] assign[=] name[context]
call[name[self].probe_services, parameter[name[handle], name[conn_id], name[self]._probe_services_finished]] | keyword[def] identifier[_on_connection_finished] ( identifier[self] , identifier[result] ):
literal[string]
identifier[success] , identifier[retval] , identifier[context] = identifier[self] . identifier[_parse_return] ( identifier[result] )
identifier[conn_id] = identifier[context] [ literal[string] ]
identifier[callback] = identifier[context] [ literal[string] ]
keyword[if] identifier[success] keyword[is] keyword[False] :
identifier[callback] ( identifier[conn_id] , identifier[self] . identifier[id] , keyword[False] , literal[string] )
keyword[with] identifier[self] . identifier[count_lock] :
identifier[self] . identifier[connecting_count] -= literal[int]
keyword[return]
identifier[handle] = identifier[retval] [ literal[string] ]
identifier[context] [ literal[string] ]= identifier[self] . identifier[_on_connection_failed]
identifier[context] [ literal[string] ]= identifier[time] . identifier[time] ()
identifier[context] [ literal[string] ]= literal[string]
identifier[self] . identifier[_connections] [ identifier[handle] ]= identifier[context]
identifier[self] . identifier[probe_services] ( identifier[handle] , identifier[conn_id] , identifier[self] . identifier[_probe_services_finished] ) | def _on_connection_finished(self, result):
"""Callback when the connection attempt to a BLE device has finished
This function if called when a new connection is successfully completed
Args:
event (BGAPIPacket): Connection event
"""
(success, retval, context) = self._parse_return(result)
conn_id = context['connection_id']
callback = context['callback']
if success is False:
callback(conn_id, self.id, False, 'Timeout opening connection')
with self.count_lock:
self.connecting_count -= 1 # depends on [control=['with'], data=[]]
return # depends on [control=['if'], data=[]]
handle = retval['handle']
context['disconnect_handler'] = self._on_connection_failed
context['connect_time'] = time.time()
context['state'] = 'preparing'
self._connections[handle] = context
self.probe_services(handle, conn_id, self._probe_services_finished) |
def _resolve_subkeys(key, separator='.'):
"""Given a key which may actually be a nested key, return the top level
key and any nested subkeys as separate values.
Args:
key (str): A string that may or may not contain the separator.
separator (str): The namespace separator. Defaults to `.`.
Returns:
Tuple[str, str]: The key and subkey(s).
"""
subkey = None
if separator in key:
index = key.index(separator)
subkey = key[index + 1:]
key = key[:index]
return key, subkey | def function[_resolve_subkeys, parameter[key, separator]]:
constant[Given a key which may actually be a nested key, return the top level
key and any nested subkeys as separate values.
Args:
key (str): A string that may or may not contain the separator.
separator (str): The namespace separator. Defaults to `.`.
Returns:
Tuple[str, str]: The key and subkey(s).
]
variable[subkey] assign[=] constant[None]
if compare[name[separator] in name[key]] begin[:]
variable[index] assign[=] call[name[key].index, parameter[name[separator]]]
variable[subkey] assign[=] call[name[key]][<ast.Slice object at 0x7da1b28de0b0>]
variable[key] assign[=] call[name[key]][<ast.Slice object at 0x7da1b2844fa0>]
return[tuple[[<ast.Name object at 0x7da1b2846ec0>, <ast.Name object at 0x7da1b2847370>]]] | keyword[def] identifier[_resolve_subkeys] ( identifier[key] , identifier[separator] = literal[string] ):
literal[string]
identifier[subkey] = keyword[None]
keyword[if] identifier[separator] keyword[in] identifier[key] :
identifier[index] = identifier[key] . identifier[index] ( identifier[separator] )
identifier[subkey] = identifier[key] [ identifier[index] + literal[int] :]
identifier[key] = identifier[key] [: identifier[index] ]
keyword[return] identifier[key] , identifier[subkey] | def _resolve_subkeys(key, separator='.'):
"""Given a key which may actually be a nested key, return the top level
key and any nested subkeys as separate values.
Args:
key (str): A string that may or may not contain the separator.
separator (str): The namespace separator. Defaults to `.`.
Returns:
Tuple[str, str]: The key and subkey(s).
"""
subkey = None
if separator in key:
index = key.index(separator)
subkey = key[index + 1:]
key = key[:index] # depends on [control=['if'], data=['separator', 'key']]
return (key, subkey) |
def _after_submission_insertion(self, task, inputdata, debug, submission, submissionid):
"""
Called after any new submission is inserted into the database, but before starting the job. Should be overridden in subclasses.
:param task: Task related to the submission
:param inputdata: input of the student
:param debug: True, False or "ssh". See add_job.
:param submission: the new document that was inserted (do not contain _id)
:param submissionid: submission id of the submission
"""
# If we are submitting for a group, send the group (user list joined with ",") as username
if "group" not in [p.get_id() for p in task.get_problems()]: # do not overwrite
username = self._user_manager.session_username()
if task.is_group_task() and not self._user_manager.has_staff_rights_on_course(task.get_course(), username):
group = self._database.aggregations.find_one(
{"courseid": task.get_course_id(), "groups.students": username},
{"groups": {"$elemMatch": {"students": username}}})
inputdata["username"] = ','.join(group["groups"][0]["students"])
return self._delete_exceeding_submissions(self._user_manager.session_username(), task) | def function[_after_submission_insertion, parameter[self, task, inputdata, debug, submission, submissionid]]:
constant[
Called after any new submission is inserted into the database, but before starting the job. Should be overridden in subclasses.
:param task: Task related to the submission
:param inputdata: input of the student
:param debug: True, False or "ssh". See add_job.
:param submission: the new document that was inserted (do not contain _id)
:param submissionid: submission id of the submission
]
if compare[constant[group] <ast.NotIn object at 0x7da2590d7190> <ast.ListComp object at 0x7da20e9b0d00>] begin[:]
variable[username] assign[=] call[name[self]._user_manager.session_username, parameter[]]
if <ast.BoolOp object at 0x7da20e9b3e80> begin[:]
variable[group] assign[=] call[name[self]._database.aggregations.find_one, parameter[dictionary[[<ast.Constant object at 0x7da20e9b3880>, <ast.Constant object at 0x7da20e9b1420>], [<ast.Call object at 0x7da20e9b1c60>, <ast.Name object at 0x7da20e9b2800>]], dictionary[[<ast.Constant object at 0x7da20e9b0760>], [<ast.Dict object at 0x7da20e9b2500>]]]]
call[name[inputdata]][constant[username]] assign[=] call[constant[,].join, parameter[call[call[call[name[group]][constant[groups]]][constant[0]]][constant[students]]]]
return[call[name[self]._delete_exceeding_submissions, parameter[call[name[self]._user_manager.session_username, parameter[]], name[task]]]] | keyword[def] identifier[_after_submission_insertion] ( identifier[self] , identifier[task] , identifier[inputdata] , identifier[debug] , identifier[submission] , identifier[submissionid] ):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] [ identifier[p] . identifier[get_id] () keyword[for] identifier[p] keyword[in] identifier[task] . identifier[get_problems] ()]:
identifier[username] = identifier[self] . identifier[_user_manager] . identifier[session_username] ()
keyword[if] identifier[task] . identifier[is_group_task] () keyword[and] keyword[not] identifier[self] . identifier[_user_manager] . identifier[has_staff_rights_on_course] ( identifier[task] . identifier[get_course] (), identifier[username] ):
identifier[group] = identifier[self] . identifier[_database] . identifier[aggregations] . identifier[find_one] (
{ literal[string] : identifier[task] . identifier[get_course_id] (), literal[string] : identifier[username] },
{ literal[string] :{ literal[string] :{ literal[string] : identifier[username] }}})
identifier[inputdata] [ literal[string] ]= literal[string] . identifier[join] ( identifier[group] [ literal[string] ][ literal[int] ][ literal[string] ])
keyword[return] identifier[self] . identifier[_delete_exceeding_submissions] ( identifier[self] . identifier[_user_manager] . identifier[session_username] (), identifier[task] ) | def _after_submission_insertion(self, task, inputdata, debug, submission, submissionid):
"""
Called after any new submission is inserted into the database, but before starting the job. Should be overridden in subclasses.
:param task: Task related to the submission
:param inputdata: input of the student
:param debug: True, False or "ssh". See add_job.
:param submission: the new document that was inserted (do not contain _id)
:param submissionid: submission id of the submission
"""
# If we are submitting for a group, send the group (user list joined with ",") as username
if 'group' not in [p.get_id() for p in task.get_problems()]: # do not overwrite
username = self._user_manager.session_username()
if task.is_group_task() and (not self._user_manager.has_staff_rights_on_course(task.get_course(), username)):
group = self._database.aggregations.find_one({'courseid': task.get_course_id(), 'groups.students': username}, {'groups': {'$elemMatch': {'students': username}}})
inputdata['username'] = ','.join(group['groups'][0]['students']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return self._delete_exceeding_submissions(self._user_manager.session_username(), task) |
def children_after_parents(self, piper1, piper2):
"""
Custom compare function. Returns ``1`` if the first ``Piper`` instance
is upstream of the second ``Piper`` instance, ``-1`` if the first
``Piper`` is downstream of the second ``Piper`` and ``0`` if the two
``Pipers`` are independent.
Arguments:
- piper1(``Piper``) ``Piper`` instance.
- piper2(``Piper``) ``Piper`` instance.
"""
if piper1 in self[piper2].deep_nodes():
return 1
elif piper2 in self[piper1].deep_nodes():
return - 1
else:
return 0 | def function[children_after_parents, parameter[self, piper1, piper2]]:
constant[
Custom compare function. Returns ``1`` if the first ``Piper`` instance
is upstream of the second ``Piper`` instance, ``-1`` if the first
``Piper`` is downstream of the second ``Piper`` and ``0`` if the two
``Pipers`` are independent.
Arguments:
- piper1(``Piper``) ``Piper`` instance.
- piper2(``Piper``) ``Piper`` instance.
]
if compare[name[piper1] in call[call[name[self]][name[piper2]].deep_nodes, parameter[]]] begin[:]
return[constant[1]] | keyword[def] identifier[children_after_parents] ( identifier[self] , identifier[piper1] , identifier[piper2] ):
literal[string]
keyword[if] identifier[piper1] keyword[in] identifier[self] [ identifier[piper2] ]. identifier[deep_nodes] ():
keyword[return] literal[int]
keyword[elif] identifier[piper2] keyword[in] identifier[self] [ identifier[piper1] ]. identifier[deep_nodes] ():
keyword[return] - literal[int]
keyword[else] :
keyword[return] literal[int] | def children_after_parents(self, piper1, piper2):
"""
Custom compare function. Returns ``1`` if the first ``Piper`` instance
is upstream of the second ``Piper`` instance, ``-1`` if the first
``Piper`` is downstream of the second ``Piper`` and ``0`` if the two
``Pipers`` are independent.
Arguments:
- piper1(``Piper``) ``Piper`` instance.
- piper2(``Piper``) ``Piper`` instance.
"""
if piper1 in self[piper2].deep_nodes():
return 1 # depends on [control=['if'], data=[]]
elif piper2 in self[piper1].deep_nodes():
return -1 # depends on [control=['if'], data=[]]
else:
return 0 |
def str_to_v1_str(xml_str):
"""Convert a API v2 XML doc to v1 XML doc.
Removes elements that are only valid for v2 and changes namespace to v1.
If doc is already v1, it is returned unchanged.
Args:
xml_str : str
API v2 XML doc. E.g.: ``SystemMetadata v2``.
Returns:
str : API v1 XML doc. E.g.: ``SystemMetadata v1``.
"""
if str_is_v1(xml_str):
return xml_str
etree_obj = str_to_etree(xml_str)
strip_v2_elements(etree_obj)
etree_replace_namespace(etree_obj, d1_common.types.dataoneTypes_v1.Namespace)
return etree_to_str(etree_obj) | def function[str_to_v1_str, parameter[xml_str]]:
constant[Convert a API v2 XML doc to v1 XML doc.
Removes elements that are only valid for v2 and changes namespace to v1.
If doc is already v1, it is returned unchanged.
Args:
xml_str : str
API v2 XML doc. E.g.: ``SystemMetadata v2``.
Returns:
str : API v1 XML doc. E.g.: ``SystemMetadata v1``.
]
if call[name[str_is_v1], parameter[name[xml_str]]] begin[:]
return[name[xml_str]]
variable[etree_obj] assign[=] call[name[str_to_etree], parameter[name[xml_str]]]
call[name[strip_v2_elements], parameter[name[etree_obj]]]
call[name[etree_replace_namespace], parameter[name[etree_obj], name[d1_common].types.dataoneTypes_v1.Namespace]]
return[call[name[etree_to_str], parameter[name[etree_obj]]]] | keyword[def] identifier[str_to_v1_str] ( identifier[xml_str] ):
literal[string]
keyword[if] identifier[str_is_v1] ( identifier[xml_str] ):
keyword[return] identifier[xml_str]
identifier[etree_obj] = identifier[str_to_etree] ( identifier[xml_str] )
identifier[strip_v2_elements] ( identifier[etree_obj] )
identifier[etree_replace_namespace] ( identifier[etree_obj] , identifier[d1_common] . identifier[types] . identifier[dataoneTypes_v1] . identifier[Namespace] )
keyword[return] identifier[etree_to_str] ( identifier[etree_obj] ) | def str_to_v1_str(xml_str):
"""Convert a API v2 XML doc to v1 XML doc.
Removes elements that are only valid for v2 and changes namespace to v1.
If doc is already v1, it is returned unchanged.
Args:
xml_str : str
API v2 XML doc. E.g.: ``SystemMetadata v2``.
Returns:
str : API v1 XML doc. E.g.: ``SystemMetadata v1``.
"""
if str_is_v1(xml_str):
return xml_str # depends on [control=['if'], data=[]]
etree_obj = str_to_etree(xml_str)
strip_v2_elements(etree_obj)
etree_replace_namespace(etree_obj, d1_common.types.dataoneTypes_v1.Namespace)
return etree_to_str(etree_obj) |
def post_team_iteration(self, iteration, team_context):
"""PostTeamIteration.
Add an iteration to the team
:param :class:`<TeamSettingsIteration> <azure.devops.v5_0.work.models.TeamSettingsIteration>` iteration: Iteration to add
:param :class:`<TeamContext> <azure.devops.v5_0.work.models.TeamContext>` team_context: The team context for the operation
:rtype: :class:`<TeamSettingsIteration> <azure.devops.v5_0.work.models.TeamSettingsIteration>`
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
content = self._serialize.body(iteration, 'TeamSettingsIteration')
response = self._send(http_method='POST',
location_id='c9175577-28a1-4b06-9197-8636af9f64ad',
version='5.0',
route_values=route_values,
content=content)
return self._deserialize('TeamSettingsIteration', response) | def function[post_team_iteration, parameter[self, iteration, team_context]]:
constant[PostTeamIteration.
Add an iteration to the team
:param :class:`<TeamSettingsIteration> <azure.devops.v5_0.work.models.TeamSettingsIteration>` iteration: Iteration to add
:param :class:`<TeamContext> <azure.devops.v5_0.work.models.TeamContext>` team_context: The team context for the operation
:rtype: :class:`<TeamSettingsIteration> <azure.devops.v5_0.work.models.TeamSettingsIteration>`
]
variable[project] assign[=] constant[None]
variable[team] assign[=] constant[None]
if compare[name[team_context] is_not constant[None]] begin[:]
if name[team_context].project_id begin[:]
variable[project] assign[=] name[team_context].project_id
if name[team_context].team_id begin[:]
variable[team] assign[=] name[team_context].team_id
variable[route_values] assign[=] dictionary[[], []]
if compare[name[project] is_not constant[None]] begin[:]
call[name[route_values]][constant[project]] assign[=] call[name[self]._serialize.url, parameter[constant[project], name[project], constant[string]]]
if compare[name[team] is_not constant[None]] begin[:]
call[name[route_values]][constant[team]] assign[=] call[name[self]._serialize.url, parameter[constant[team], name[team], constant[string]]]
variable[content] assign[=] call[name[self]._serialize.body, parameter[name[iteration], constant[TeamSettingsIteration]]]
variable[response] assign[=] call[name[self]._send, parameter[]]
return[call[name[self]._deserialize, parameter[constant[TeamSettingsIteration], name[response]]]] | keyword[def] identifier[post_team_iteration] ( identifier[self] , identifier[iteration] , identifier[team_context] ):
literal[string]
identifier[project] = keyword[None]
identifier[team] = keyword[None]
keyword[if] identifier[team_context] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[team_context] . identifier[project_id] :
identifier[project] = identifier[team_context] . identifier[project_id]
keyword[else] :
identifier[project] = identifier[team_context] . identifier[project]
keyword[if] identifier[team_context] . identifier[team_id] :
identifier[team] = identifier[team_context] . identifier[team_id]
keyword[else] :
identifier[team] = identifier[team_context] . identifier[team]
identifier[route_values] ={}
keyword[if] identifier[project] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[project] , literal[string] )
keyword[if] identifier[team] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[team] , literal[string] )
identifier[content] = identifier[self] . identifier[_serialize] . identifier[body] ( identifier[iteration] , literal[string] )
identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] ,
identifier[location_id] = literal[string] ,
identifier[version] = literal[string] ,
identifier[route_values] = identifier[route_values] ,
identifier[content] = identifier[content] )
keyword[return] identifier[self] . identifier[_deserialize] ( literal[string] , identifier[response] ) | def post_team_iteration(self, iteration, team_context):
"""PostTeamIteration.
Add an iteration to the team
:param :class:`<TeamSettingsIteration> <azure.devops.v5_0.work.models.TeamSettingsIteration>` iteration: Iteration to add
:param :class:`<TeamContext> <azure.devops.v5_0.work.models.TeamContext>` team_context: The team context for the operation
:rtype: :class:`<TeamSettingsIteration> <azure.devops.v5_0.work.models.TeamSettingsIteration>`
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id # depends on [control=['if'], data=[]]
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id # depends on [control=['if'], data=[]]
else:
team = team_context.team # depends on [control=['if'], data=['team_context']]
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string') # depends on [control=['if'], data=['project']]
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string') # depends on [control=['if'], data=['team']]
content = self._serialize.body(iteration, 'TeamSettingsIteration')
response = self._send(http_method='POST', location_id='c9175577-28a1-4b06-9197-8636af9f64ad', version='5.0', route_values=route_values, content=content)
return self._deserialize('TeamSettingsIteration', response) |
def road_address(self):
"""
:example 세종특별자치시 도움5로 19 (어진동)
"""
pattern = self.random_element(self.road_address_formats)
return self.generator.parse(pattern) | def function[road_address, parameter[self]]:
constant[
:example 세종특별자치시 도움5로 19 (어진동)
]
variable[pattern] assign[=] call[name[self].random_element, parameter[name[self].road_address_formats]]
return[call[name[self].generator.parse, parameter[name[pattern]]]] | keyword[def] identifier[road_address] ( identifier[self] ):
literal[string]
identifier[pattern] = identifier[self] . identifier[random_element] ( identifier[self] . identifier[road_address_formats] )
keyword[return] identifier[self] . identifier[generator] . identifier[parse] ( identifier[pattern] ) | def road_address(self):
"""
:example 세종특별자치시 도움5로 19 (어진동)
"""
pattern = self.random_element(self.road_address_formats)
return self.generator.parse(pattern) |
def folderitem(self, obj, item, index):
"""Applies new properties to the item (Client) that is currently being
rendered as a row in the list
:param obj: client to be rendered as a row in the list
:param item: dict representation of the client, suitable for the list
:param index: current position of the item within the list
:type obj: ATContentType/DexterityContentType
:type item: dict
:type index: int
:return: the dict representation of the item
:rtype: dict
"""
url = obj.absolute_url()
title = obj.Title()
item["replace"]["Title"] = get_link(url, value=title)
item["getEffectiveDate"] = self.ulocalized_time(
obj.getEffectiveDate())
item["getExpirationDate"] = self.ulocalized_time(
obj.getExpirationDate())
return item | def function[folderitem, parameter[self, obj, item, index]]:
constant[Applies new properties to the item (Client) that is currently being
rendered as a row in the list
:param obj: client to be rendered as a row in the list
:param item: dict representation of the client, suitable for the list
:param index: current position of the item within the list
:type obj: ATContentType/DexterityContentType
:type item: dict
:type index: int
:return: the dict representation of the item
:rtype: dict
]
variable[url] assign[=] call[name[obj].absolute_url, parameter[]]
variable[title] assign[=] call[name[obj].Title, parameter[]]
call[call[name[item]][constant[replace]]][constant[Title]] assign[=] call[name[get_link], parameter[name[url]]]
call[name[item]][constant[getEffectiveDate]] assign[=] call[name[self].ulocalized_time, parameter[call[name[obj].getEffectiveDate, parameter[]]]]
call[name[item]][constant[getExpirationDate]] assign[=] call[name[self].ulocalized_time, parameter[call[name[obj].getExpirationDate, parameter[]]]]
return[name[item]] | keyword[def] identifier[folderitem] ( identifier[self] , identifier[obj] , identifier[item] , identifier[index] ):
literal[string]
identifier[url] = identifier[obj] . identifier[absolute_url] ()
identifier[title] = identifier[obj] . identifier[Title] ()
identifier[item] [ literal[string] ][ literal[string] ]= identifier[get_link] ( identifier[url] , identifier[value] = identifier[title] )
identifier[item] [ literal[string] ]= identifier[self] . identifier[ulocalized_time] (
identifier[obj] . identifier[getEffectiveDate] ())
identifier[item] [ literal[string] ]= identifier[self] . identifier[ulocalized_time] (
identifier[obj] . identifier[getExpirationDate] ())
keyword[return] identifier[item] | def folderitem(self, obj, item, index):
"""Applies new properties to the item (Client) that is currently being
rendered as a row in the list
:param obj: client to be rendered as a row in the list
:param item: dict representation of the client, suitable for the list
:param index: current position of the item within the list
:type obj: ATContentType/DexterityContentType
:type item: dict
:type index: int
:return: the dict representation of the item
:rtype: dict
"""
url = obj.absolute_url()
title = obj.Title()
item['replace']['Title'] = get_link(url, value=title)
item['getEffectiveDate'] = self.ulocalized_time(obj.getEffectiveDate())
item['getExpirationDate'] = self.ulocalized_time(obj.getExpirationDate())
return item |
def create_namespaced_network_policy(self, namespace, body, **kwargs):
"""
create a NetworkPolicy
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_namespaced_network_policy(namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1NetworkPolicy body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint.
:return: V1beta1NetworkPolicy
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_namespaced_network_policy_with_http_info(namespace, body, **kwargs)
else:
(data) = self.create_namespaced_network_policy_with_http_info(namespace, body, **kwargs)
return data | def function[create_namespaced_network_policy, parameter[self, namespace, body]]:
constant[
create a NetworkPolicy
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_namespaced_network_policy(namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1NetworkPolicy body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint.
:return: V1beta1NetworkPolicy
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async_req]]] begin[:]
return[call[name[self].create_namespaced_network_policy_with_http_info, parameter[name[namespace], name[body]]]] | keyword[def] identifier[create_namespaced_network_policy] ( identifier[self] , identifier[namespace] , identifier[body] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[create_namespaced_network_policy_with_http_info] ( identifier[namespace] , identifier[body] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[create_namespaced_network_policy_with_http_info] ( identifier[namespace] , identifier[body] ,** identifier[kwargs] )
keyword[return] identifier[data] | def create_namespaced_network_policy(self, namespace, body, **kwargs):
"""
create a NetworkPolicy
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_namespaced_network_policy(namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1NetworkPolicy body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint.
:return: V1beta1NetworkPolicy
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_namespaced_network_policy_with_http_info(namespace, body, **kwargs) # depends on [control=['if'], data=[]]
else:
data = self.create_namespaced_network_policy_with_http_info(namespace, body, **kwargs)
return data |
def _publish_status(self, status, parent=None):
"""send status (busy/idle) on IOPub"""
self.session.send(self.iopub_socket,
u'status',
{u'execution_state': status},
parent=parent,
ident=self._topic('status'),
) | def function[_publish_status, parameter[self, status, parent]]:
constant[send status (busy/idle) on IOPub]
call[name[self].session.send, parameter[name[self].iopub_socket, constant[status], dictionary[[<ast.Constant object at 0x7da18ede4670>], [<ast.Name object at 0x7da18ede76a0>]]]] | keyword[def] identifier[_publish_status] ( identifier[self] , identifier[status] , identifier[parent] = keyword[None] ):
literal[string]
identifier[self] . identifier[session] . identifier[send] ( identifier[self] . identifier[iopub_socket] ,
literal[string] ,
{ literal[string] : identifier[status] },
identifier[parent] = identifier[parent] ,
identifier[ident] = identifier[self] . identifier[_topic] ( literal[string] ),
) | def _publish_status(self, status, parent=None):
"""send status (busy/idle) on IOPub"""
self.session.send(self.iopub_socket, u'status', {u'execution_state': status}, parent=parent, ident=self._topic('status')) |
def load(self, id, *args, **kwargs):
"""
loads a remote resource by id
"""
self._pre_load(id, *args, **kwargs)
response = self._load(id, *args, **kwargs)
response = self._post_load(response, *args, **kwargs)
return response | def function[load, parameter[self, id]]:
constant[
loads a remote resource by id
]
call[name[self]._pre_load, parameter[name[id], <ast.Starred object at 0x7da1b0b3aef0>]]
variable[response] assign[=] call[name[self]._load, parameter[name[id], <ast.Starred object at 0x7da18bc72110>]]
variable[response] assign[=] call[name[self]._post_load, parameter[name[response], <ast.Starred object at 0x7da18bc71420>]]
return[name[response]] | keyword[def] identifier[load] ( identifier[self] , identifier[id] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[_pre_load] ( identifier[id] ,* identifier[args] ,** identifier[kwargs] )
identifier[response] = identifier[self] . identifier[_load] ( identifier[id] ,* identifier[args] ,** identifier[kwargs] )
identifier[response] = identifier[self] . identifier[_post_load] ( identifier[response] ,* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[response] | def load(self, id, *args, **kwargs):
"""
loads a remote resource by id
"""
self._pre_load(id, *args, **kwargs)
response = self._load(id, *args, **kwargs)
response = self._post_load(response, *args, **kwargs)
return response |
def get_files(*bases):
"""
List all files in a data directory.
"""
for base in bases:
basedir, _ = base.split(".", 1)
base = os.path.join(os.path.dirname(__file__), *base.split("."))
rem = len(os.path.dirname(base)) + len(basedir) + 2
for root, dirs, files in os.walk(base):
for name in files:
yield os.path.join(basedir, root, name)[rem:] | def function[get_files, parameter[]]:
constant[
List all files in a data directory.
]
for taget[name[base]] in starred[name[bases]] begin[:]
<ast.Tuple object at 0x7da20c6e5720> assign[=] call[name[base].split, parameter[constant[.], constant[1]]]
variable[base] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[__file__]]], <ast.Starred object at 0x7da20c6e7a30>]]
variable[rem] assign[=] binary_operation[binary_operation[call[name[len], parameter[call[name[os].path.dirname, parameter[name[base]]]]] + call[name[len], parameter[name[basedir]]]] + constant[2]]
for taget[tuple[[<ast.Name object at 0x7da20c6e59c0>, <ast.Name object at 0x7da20c6e61d0>, <ast.Name object at 0x7da20c6e5210>]]] in starred[call[name[os].walk, parameter[name[base]]]] begin[:]
for taget[name[name]] in starred[name[files]] begin[:]
<ast.Yield object at 0x7da20c6e45e0> | keyword[def] identifier[get_files] (* identifier[bases] ):
literal[string]
keyword[for] identifier[base] keyword[in] identifier[bases] :
identifier[basedir] , identifier[_] = identifier[base] . identifier[split] ( literal[string] , literal[int] )
identifier[base] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] ),* identifier[base] . identifier[split] ( literal[string] ))
identifier[rem] = identifier[len] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[base] ))+ identifier[len] ( identifier[basedir] )+ literal[int]
keyword[for] identifier[root] , identifier[dirs] , identifier[files] keyword[in] identifier[os] . identifier[walk] ( identifier[base] ):
keyword[for] identifier[name] keyword[in] identifier[files] :
keyword[yield] identifier[os] . identifier[path] . identifier[join] ( identifier[basedir] , identifier[root] , identifier[name] )[ identifier[rem] :] | def get_files(*bases):
"""
List all files in a data directory.
"""
for base in bases:
(basedir, _) = base.split('.', 1)
base = os.path.join(os.path.dirname(__file__), *base.split('.'))
rem = len(os.path.dirname(base)) + len(basedir) + 2
for (root, dirs, files) in os.walk(base):
for name in files:
yield os.path.join(basedir, root, name)[rem:] # depends on [control=['for'], data=['name']] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['base']] |
def compilable_sources(self, sourcedir, absolute=False, recursive=True,
excludes=[]):
"""
Find all scss sources that should be compiled, aka all sources that
are not "partials" Sass sources.
Args:
sourcedir (str): Directory path to scan.
Keyword Arguments:
absolute (bool): Returned paths will be absolute using
``sourcedir`` argument (if True), else return relative paths.
recursive (bool): Switch to enabled recursive finding (if True).
Default to True.
excludes (list): A list of excluding patterns (glob patterns).
Patterns are matched against the relative filepath (from its
sourcedir).
Returns:
list: List of source paths.
"""
filepaths = []
for root, dirs, files in os.walk(sourcedir):
# Sort structure to avoid arbitrary order
dirs.sort()
files.sort()
for item in files:
# Store relative directory but drop it if at root ('.')
relative_dir = os.path.relpath(root, sourcedir)
if relative_dir == '.':
relative_dir = ''
# Matching all conditions
absolute_filepath = os.path.join(root, item)
conditions = {
'sourcedir': sourcedir,
'nopartial': True,
'exclude_patterns': excludes,
'excluded_libdirs': [],
}
if self.match_conditions(absolute_filepath, **conditions):
relative_filepath = os.path.join(relative_dir, item)
if absolute:
filepath = absolute_filepath
else:
filepath = relative_filepath
filepaths.append(filepath)
# For non recursive usage, break from the first entry
if not recursive:
break
return filepaths | def function[compilable_sources, parameter[self, sourcedir, absolute, recursive, excludes]]:
constant[
Find all scss sources that should be compiled, aka all sources that
are not "partials" Sass sources.
Args:
sourcedir (str): Directory path to scan.
Keyword Arguments:
absolute (bool): Returned paths will be absolute using
``sourcedir`` argument (if True), else return relative paths.
recursive (bool): Switch to enabled recursive finding (if True).
Default to True.
excludes (list): A list of excluding patterns (glob patterns).
Patterns are matched against the relative filepath (from its
sourcedir).
Returns:
list: List of source paths.
]
variable[filepaths] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b094ed40>, <ast.Name object at 0x7da1b094ee90>, <ast.Name object at 0x7da1b094f1c0>]]] in starred[call[name[os].walk, parameter[name[sourcedir]]]] begin[:]
call[name[dirs].sort, parameter[]]
call[name[files].sort, parameter[]]
for taget[name[item]] in starred[name[files]] begin[:]
variable[relative_dir] assign[=] call[name[os].path.relpath, parameter[name[root], name[sourcedir]]]
if compare[name[relative_dir] equal[==] constant[.]] begin[:]
variable[relative_dir] assign[=] constant[]
variable[absolute_filepath] assign[=] call[name[os].path.join, parameter[name[root], name[item]]]
variable[conditions] assign[=] dictionary[[<ast.Constant object at 0x7da1b094fd30>, <ast.Constant object at 0x7da1b094ff40>, <ast.Constant object at 0x7da1b094d030>, <ast.Constant object at 0x7da1b094db40>], [<ast.Name object at 0x7da1b094e830>, <ast.Constant object at 0x7da1b094f580>, <ast.Name object at 0x7da1b094fc40>, <ast.List object at 0x7da1b094cc70>]]
if call[name[self].match_conditions, parameter[name[absolute_filepath]]] begin[:]
variable[relative_filepath] assign[=] call[name[os].path.join, parameter[name[relative_dir], name[item]]]
if name[absolute] begin[:]
variable[filepath] assign[=] name[absolute_filepath]
call[name[filepaths].append, parameter[name[filepath]]]
if <ast.UnaryOp object at 0x7da1b0aa5030> begin[:]
break
return[name[filepaths]] | keyword[def] identifier[compilable_sources] ( identifier[self] , identifier[sourcedir] , identifier[absolute] = keyword[False] , identifier[recursive] = keyword[True] ,
identifier[excludes] =[]):
literal[string]
identifier[filepaths] =[]
keyword[for] identifier[root] , identifier[dirs] , identifier[files] keyword[in] identifier[os] . identifier[walk] ( identifier[sourcedir] ):
identifier[dirs] . identifier[sort] ()
identifier[files] . identifier[sort] ()
keyword[for] identifier[item] keyword[in] identifier[files] :
identifier[relative_dir] = identifier[os] . identifier[path] . identifier[relpath] ( identifier[root] , identifier[sourcedir] )
keyword[if] identifier[relative_dir] == literal[string] :
identifier[relative_dir] = literal[string]
identifier[absolute_filepath] = identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[item] )
identifier[conditions] ={
literal[string] : identifier[sourcedir] ,
literal[string] : keyword[True] ,
literal[string] : identifier[excludes] ,
literal[string] :[],
}
keyword[if] identifier[self] . identifier[match_conditions] ( identifier[absolute_filepath] ,** identifier[conditions] ):
identifier[relative_filepath] = identifier[os] . identifier[path] . identifier[join] ( identifier[relative_dir] , identifier[item] )
keyword[if] identifier[absolute] :
identifier[filepath] = identifier[absolute_filepath]
keyword[else] :
identifier[filepath] = identifier[relative_filepath]
identifier[filepaths] . identifier[append] ( identifier[filepath] )
keyword[if] keyword[not] identifier[recursive] :
keyword[break]
keyword[return] identifier[filepaths] | def compilable_sources(self, sourcedir, absolute=False, recursive=True, excludes=[]):
"""
Find all scss sources that should be compiled, aka all sources that
are not "partials" Sass sources.
Args:
sourcedir (str): Directory path to scan.
Keyword Arguments:
absolute (bool): Returned paths will be absolute using
``sourcedir`` argument (if True), else return relative paths.
recursive (bool): Switch to enabled recursive finding (if True).
Default to True.
excludes (list): A list of excluding patterns (glob patterns).
Patterns are matched against the relative filepath (from its
sourcedir).
Returns:
list: List of source paths.
"""
filepaths = []
for (root, dirs, files) in os.walk(sourcedir):
# Sort structure to avoid arbitrary order
dirs.sort()
files.sort()
for item in files:
# Store relative directory but drop it if at root ('.')
relative_dir = os.path.relpath(root, sourcedir)
if relative_dir == '.':
relative_dir = '' # depends on [control=['if'], data=['relative_dir']]
# Matching all conditions
absolute_filepath = os.path.join(root, item)
conditions = {'sourcedir': sourcedir, 'nopartial': True, 'exclude_patterns': excludes, 'excluded_libdirs': []}
if self.match_conditions(absolute_filepath, **conditions):
relative_filepath = os.path.join(relative_dir, item)
if absolute:
filepath = absolute_filepath # depends on [control=['if'], data=[]]
else:
filepath = relative_filepath
filepaths.append(filepath) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']]
# For non recursive usage, break from the first entry
if not recursive:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return filepaths |
def _set(self, item, value):
"""
Helper function to keep the __setattr__ and __setitem__ calls
KISSish
Will only set the objects _data if the given items name is not prefixed
with _ or if the item exists in the protected items List.
"""
if item not in object.__getattribute__(self, "_protectedItems") \
and item[0] != "_":
keys = object.__getattribute__(self, "_data")
if not hasattr(value, '__call__'):
keys[item] = value
return value
if hasattr(value, '__call__') and item in keys:
raise Exception("""Cannot set model data to a function, same \
name exists in data""")
return object.__setattr__(self, item, value) | def function[_set, parameter[self, item, value]]:
constant[
Helper function to keep the __setattr__ and __setitem__ calls
KISSish
Will only set the objects _data if the given items name is not prefixed
with _ or if the item exists in the protected items List.
]
if <ast.BoolOp object at 0x7da207f00a30> begin[:]
variable[keys] assign[=] call[name[object].__getattribute__, parameter[name[self], constant[_data]]]
if <ast.UnaryOp object at 0x7da18dc07a60> begin[:]
call[name[keys]][name[item]] assign[=] name[value]
return[name[value]]
if <ast.BoolOp object at 0x7da18dc06d10> begin[:]
<ast.Raise object at 0x7da18dc04940>
return[call[name[object].__setattr__, parameter[name[self], name[item], name[value]]]] | keyword[def] identifier[_set] ( identifier[self] , identifier[item] , identifier[value] ):
literal[string]
keyword[if] identifier[item] keyword[not] keyword[in] identifier[object] . identifier[__getattribute__] ( identifier[self] , literal[string] ) keyword[and] identifier[item] [ literal[int] ]!= literal[string] :
identifier[keys] = identifier[object] . identifier[__getattribute__] ( identifier[self] , literal[string] )
keyword[if] keyword[not] identifier[hasattr] ( identifier[value] , literal[string] ):
identifier[keys] [ identifier[item] ]= identifier[value]
keyword[return] identifier[value]
keyword[if] identifier[hasattr] ( identifier[value] , literal[string] ) keyword[and] identifier[item] keyword[in] identifier[keys] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[return] identifier[object] . identifier[__setattr__] ( identifier[self] , identifier[item] , identifier[value] ) | def _set(self, item, value):
"""
Helper function to keep the __setattr__ and __setitem__ calls
KISSish
Will only set the objects _data if the given items name is not prefixed
with _ or if the item exists in the protected items List.
"""
if item not in object.__getattribute__(self, '_protectedItems') and item[0] != '_':
keys = object.__getattribute__(self, '_data')
if not hasattr(value, '__call__'):
keys[item] = value
return value # depends on [control=['if'], data=[]]
if hasattr(value, '__call__') and item in keys:
raise Exception('Cannot set model data to a function, same name exists in data') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return object.__setattr__(self, item, value) |
def _read_file_and_pos(self):
""" Read last position from file, store as current position
"""
try:
with open(self.pos_storage_filename, 'r+') as f:
_pos = f.read()
_logger.debug('Got position "%s" from file %s'
% (_pos, self.pos_storage_filename))
if not _pos:
return
log_file, log_pos = _pos.split(':')
try:
log_file = str(log_file)
log_pos = int(log_pos)
except (ValueError, TypeError) as e:
_logger.critical('Can not read position: %s' % e)
sys.exit(13)
self._log_file = log_file
self._log_pos = log_pos
except IOError as e:
_logger.error(e) | def function[_read_file_and_pos, parameter[self]]:
constant[ Read last position from file, store as current position
]
<ast.Try object at 0x7da1b26a5120> | keyword[def] identifier[_read_file_and_pos] ( identifier[self] ):
literal[string]
keyword[try] :
keyword[with] identifier[open] ( identifier[self] . identifier[pos_storage_filename] , literal[string] ) keyword[as] identifier[f] :
identifier[_pos] = identifier[f] . identifier[read] ()
identifier[_logger] . identifier[debug] ( literal[string]
%( identifier[_pos] , identifier[self] . identifier[pos_storage_filename] ))
keyword[if] keyword[not] identifier[_pos] :
keyword[return]
identifier[log_file] , identifier[log_pos] = identifier[_pos] . identifier[split] ( literal[string] )
keyword[try] :
identifier[log_file] = identifier[str] ( identifier[log_file] )
identifier[log_pos] = identifier[int] ( identifier[log_pos] )
keyword[except] ( identifier[ValueError] , identifier[TypeError] ) keyword[as] identifier[e] :
identifier[_logger] . identifier[critical] ( literal[string] % identifier[e] )
identifier[sys] . identifier[exit] ( literal[int] )
identifier[self] . identifier[_log_file] = identifier[log_file]
identifier[self] . identifier[_log_pos] = identifier[log_pos]
keyword[except] identifier[IOError] keyword[as] identifier[e] :
identifier[_logger] . identifier[error] ( identifier[e] ) | def _read_file_and_pos(self):
""" Read last position from file, store as current position
"""
try:
with open(self.pos_storage_filename, 'r+') as f:
_pos = f.read()
_logger.debug('Got position "%s" from file %s' % (_pos, self.pos_storage_filename))
if not _pos:
return # depends on [control=['if'], data=[]]
(log_file, log_pos) = _pos.split(':')
try:
log_file = str(log_file)
log_pos = int(log_pos) # depends on [control=['try'], data=[]]
except (ValueError, TypeError) as e:
_logger.critical('Can not read position: %s' % e)
sys.exit(13) # depends on [control=['except'], data=['e']]
self._log_file = log_file
self._log_pos = log_pos # depends on [control=['with'], data=['f']] # depends on [control=['try'], data=[]]
except IOError as e:
_logger.error(e) # depends on [control=['except'], data=['e']] |
def legal_inn():
"""Return a random taxation ID number for a company."""
mask = [2, 4, 10, 3, 5, 9, 4, 6, 8]
inn = [random.randint(1, 9) for _ in range(10)]
weighted = [v * mask[i] for i, v in enumerate(inn[:-1])]
inn[9] = sum(weighted) % 11 % 10
return "".join(map(str, inn)) | def function[legal_inn, parameter[]]:
constant[Return a random taxation ID number for a company.]
variable[mask] assign[=] list[[<ast.Constant object at 0x7da1b00f90f0>, <ast.Constant object at 0x7da1b00f97e0>, <ast.Constant object at 0x7da1b00f8be0>, <ast.Constant object at 0x7da1b00fb220>, <ast.Constant object at 0x7da1b00fa0e0>, <ast.Constant object at 0x7da1b00f9900>, <ast.Constant object at 0x7da1b00f8c40>, <ast.Constant object at 0x7da1b00f9450>, <ast.Constant object at 0x7da1b00faf20>]]
variable[inn] assign[=] <ast.ListComp object at 0x7da1b00fbf40>
variable[weighted] assign[=] <ast.ListComp object at 0x7da1b00fae00>
call[name[inn]][constant[9]] assign[=] binary_operation[binary_operation[call[name[sum], parameter[name[weighted]]] <ast.Mod object at 0x7da2590d6920> constant[11]] <ast.Mod object at 0x7da2590d6920> constant[10]]
return[call[constant[].join, parameter[call[name[map], parameter[name[str], name[inn]]]]]] | keyword[def] identifier[legal_inn] ():
literal[string]
identifier[mask] =[ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ]
identifier[inn] =[ identifier[random] . identifier[randint] ( literal[int] , literal[int] ) keyword[for] identifier[_] keyword[in] identifier[range] ( literal[int] )]
identifier[weighted] =[ identifier[v] * identifier[mask] [ identifier[i] ] keyword[for] identifier[i] , identifier[v] keyword[in] identifier[enumerate] ( identifier[inn] [:- literal[int] ])]
identifier[inn] [ literal[int] ]= identifier[sum] ( identifier[weighted] )% literal[int] % literal[int]
keyword[return] literal[string] . identifier[join] ( identifier[map] ( identifier[str] , identifier[inn] )) | def legal_inn():
"""Return a random taxation ID number for a company."""
mask = [2, 4, 10, 3, 5, 9, 4, 6, 8]
inn = [random.randint(1, 9) for _ in range(10)]
weighted = [v * mask[i] for (i, v) in enumerate(inn[:-1])]
inn[9] = sum(weighted) % 11 % 10
return ''.join(map(str, inn)) |
def _decodeTimestamp(byteIter):
""" Decodes a 7-octet timestamp """
dateStr = decodeSemiOctets(byteIter, 7)
timeZoneStr = dateStr[-2:]
return datetime.strptime(dateStr[:-2], '%y%m%d%H%M%S').replace(tzinfo=SmsPduTzInfo(timeZoneStr)) | def function[_decodeTimestamp, parameter[byteIter]]:
constant[ Decodes a 7-octet timestamp ]
variable[dateStr] assign[=] call[name[decodeSemiOctets], parameter[name[byteIter], constant[7]]]
variable[timeZoneStr] assign[=] call[name[dateStr]][<ast.Slice object at 0x7da18eb564d0>]
return[call[call[name[datetime].strptime, parameter[call[name[dateStr]][<ast.Slice object at 0x7da18eb54bb0>], constant[%y%m%d%H%M%S]]].replace, parameter[]]] | keyword[def] identifier[_decodeTimestamp] ( identifier[byteIter] ):
literal[string]
identifier[dateStr] = identifier[decodeSemiOctets] ( identifier[byteIter] , literal[int] )
identifier[timeZoneStr] = identifier[dateStr] [- literal[int] :]
keyword[return] identifier[datetime] . identifier[strptime] ( identifier[dateStr] [:- literal[int] ], literal[string] ). identifier[replace] ( identifier[tzinfo] = identifier[SmsPduTzInfo] ( identifier[timeZoneStr] )) | def _decodeTimestamp(byteIter):
""" Decodes a 7-octet timestamp """
dateStr = decodeSemiOctets(byteIter, 7)
timeZoneStr = dateStr[-2:]
return datetime.strptime(dateStr[:-2], '%y%m%d%H%M%S').replace(tzinfo=SmsPduTzInfo(timeZoneStr)) |
def reply_location(
self,
latitude: float,
longitude: float,
quote: bool = None,
disable_notification: bool = None,
reply_to_message_id: int = None,
reply_markup: Union[
"pyrogram.InlineKeyboardMarkup",
"pyrogram.ReplyKeyboardMarkup",
"pyrogram.ReplyKeyboardRemove",
"pyrogram.ForceReply"
] = None
) -> "Message":
"""Bound method *reply_location* of :obj:`Message <pyrogram.Message>`.
Use as a shortcut for:
.. code-block:: python
client.send_location(
chat_id=message.chat.id,
latitude=41.890251,
longitude=12.492373
)
Example:
.. code-block:: python
message.reply_location(41.890251, 12.492373)
Args:
latitude (``float``):
Latitude of the location.
longitude (``float``):
Longitude of the location.
quote (``bool``, *optional*):
If ``True``, the message will be sent as a reply to this message.
If *reply_to_message_id* is passed, this parameter will be ignored.
Defaults to ``True`` in group chats and ``False`` in private chats.
disable_notification (``bool``, *optional*):
Sends the message silently.
Users will receive a notification with no sound.
reply_to_message_id (``int``, *optional*):
If the message is a reply, ID of the original message
reply_markup (:obj:`InlineKeyboardMarkup` | :obj:`ReplyKeyboardMarkup` | :obj:`ReplyKeyboardRemove` | :obj:`ForceReply`, *optional*):
Additional interface options. An object for an inline keyboard, custom reply keyboard,
instructions to remove reply keyboard or to force a reply from the user.
Returns:
On success, the sent :obj:`Message <pyrogram.Message>` is returned.
Raises:
:class:`RPCError <pyrogram.RPCError>` in case of a Telegram RPC error.
"""
if quote is None:
quote = self.chat.type != "private"
if reply_to_message_id is None and quote:
reply_to_message_id = self.message_id
return self._client.send_location(
chat_id=self.chat.id,
latitude=latitude,
longitude=longitude,
disable_notification=disable_notification,
reply_to_message_id=reply_to_message_id,
reply_markup=reply_markup
) | def function[reply_location, parameter[self, latitude, longitude, quote, disable_notification, reply_to_message_id, reply_markup]]:
constant[Bound method *reply_location* of :obj:`Message <pyrogram.Message>`.
Use as a shortcut for:
.. code-block:: python
client.send_location(
chat_id=message.chat.id,
latitude=41.890251,
longitude=12.492373
)
Example:
.. code-block:: python
message.reply_location(41.890251, 12.492373)
Args:
latitude (``float``):
Latitude of the location.
longitude (``float``):
Longitude of the location.
quote (``bool``, *optional*):
If ``True``, the message will be sent as a reply to this message.
If *reply_to_message_id* is passed, this parameter will be ignored.
Defaults to ``True`` in group chats and ``False`` in private chats.
disable_notification (``bool``, *optional*):
Sends the message silently.
Users will receive a notification with no sound.
reply_to_message_id (``int``, *optional*):
If the message is a reply, ID of the original message
reply_markup (:obj:`InlineKeyboardMarkup` | :obj:`ReplyKeyboardMarkup` | :obj:`ReplyKeyboardRemove` | :obj:`ForceReply`, *optional*):
Additional interface options. An object for an inline keyboard, custom reply keyboard,
instructions to remove reply keyboard or to force a reply from the user.
Returns:
On success, the sent :obj:`Message <pyrogram.Message>` is returned.
Raises:
:class:`RPCError <pyrogram.RPCError>` in case of a Telegram RPC error.
]
if compare[name[quote] is constant[None]] begin[:]
variable[quote] assign[=] compare[name[self].chat.type not_equal[!=] constant[private]]
if <ast.BoolOp object at 0x7da18bc72a40> begin[:]
variable[reply_to_message_id] assign[=] name[self].message_id
return[call[name[self]._client.send_location, parameter[]]] | keyword[def] identifier[reply_location] (
identifier[self] ,
identifier[latitude] : identifier[float] ,
identifier[longitude] : identifier[float] ,
identifier[quote] : identifier[bool] = keyword[None] ,
identifier[disable_notification] : identifier[bool] = keyword[None] ,
identifier[reply_to_message_id] : identifier[int] = keyword[None] ,
identifier[reply_markup] : identifier[Union] [
literal[string] ,
literal[string] ,
literal[string] ,
literal[string]
]= keyword[None]
)-> literal[string] :
literal[string]
keyword[if] identifier[quote] keyword[is] keyword[None] :
identifier[quote] = identifier[self] . identifier[chat] . identifier[type] != literal[string]
keyword[if] identifier[reply_to_message_id] keyword[is] keyword[None] keyword[and] identifier[quote] :
identifier[reply_to_message_id] = identifier[self] . identifier[message_id]
keyword[return] identifier[self] . identifier[_client] . identifier[send_location] (
identifier[chat_id] = identifier[self] . identifier[chat] . identifier[id] ,
identifier[latitude] = identifier[latitude] ,
identifier[longitude] = identifier[longitude] ,
identifier[disable_notification] = identifier[disable_notification] ,
identifier[reply_to_message_id] = identifier[reply_to_message_id] ,
identifier[reply_markup] = identifier[reply_markup]
) | def reply_location(self, latitude: float, longitude: float, quote: bool=None, disable_notification: bool=None, reply_to_message_id: int=None, reply_markup: Union['pyrogram.InlineKeyboardMarkup', 'pyrogram.ReplyKeyboardMarkup', 'pyrogram.ReplyKeyboardRemove', 'pyrogram.ForceReply']=None) -> 'Message':
"""Bound method *reply_location* of :obj:`Message <pyrogram.Message>`.
Use as a shortcut for:
.. code-block:: python
client.send_location(
chat_id=message.chat.id,
latitude=41.890251,
longitude=12.492373
)
Example:
.. code-block:: python
message.reply_location(41.890251, 12.492373)
Args:
latitude (``float``):
Latitude of the location.
longitude (``float``):
Longitude of the location.
quote (``bool``, *optional*):
If ``True``, the message will be sent as a reply to this message.
If *reply_to_message_id* is passed, this parameter will be ignored.
Defaults to ``True`` in group chats and ``False`` in private chats.
disable_notification (``bool``, *optional*):
Sends the message silently.
Users will receive a notification with no sound.
reply_to_message_id (``int``, *optional*):
If the message is a reply, ID of the original message
reply_markup (:obj:`InlineKeyboardMarkup` | :obj:`ReplyKeyboardMarkup` | :obj:`ReplyKeyboardRemove` | :obj:`ForceReply`, *optional*):
Additional interface options. An object for an inline keyboard, custom reply keyboard,
instructions to remove reply keyboard or to force a reply from the user.
Returns:
On success, the sent :obj:`Message <pyrogram.Message>` is returned.
Raises:
:class:`RPCError <pyrogram.RPCError>` in case of a Telegram RPC error.
"""
if quote is None:
quote = self.chat.type != 'private' # depends on [control=['if'], data=['quote']]
if reply_to_message_id is None and quote:
reply_to_message_id = self.message_id # depends on [control=['if'], data=[]]
return self._client.send_location(chat_id=self.chat.id, latitude=latitude, longitude=longitude, disable_notification=disable_notification, reply_to_message_id=reply_to_message_id, reply_markup=reply_markup) |
def lognorm(x, mu, sigma=1.0):
""" Log-normal function from scipy """
return stats.lognorm(sigma, scale=mu).pdf(x) | def function[lognorm, parameter[x, mu, sigma]]:
constant[ Log-normal function from scipy ]
return[call[call[name[stats].lognorm, parameter[name[sigma]]].pdf, parameter[name[x]]]] | keyword[def] identifier[lognorm] ( identifier[x] , identifier[mu] , identifier[sigma] = literal[int] ):
literal[string]
keyword[return] identifier[stats] . identifier[lognorm] ( identifier[sigma] , identifier[scale] = identifier[mu] ). identifier[pdf] ( identifier[x] ) | def lognorm(x, mu, sigma=1.0):
""" Log-normal function from scipy """
return stats.lognorm(sigma, scale=mu).pdf(x) |
def _u_distance_covariance_sqr_naive(x, y, exponent=1):
"""
Naive unbiased estimator for distance covariance.
Computes the unbiased estimator for distance covariance between two
matrices, using an :math:`O(N^2)` algorithm.
"""
a = _u_distance_matrix(x, exponent=exponent)
b = _u_distance_matrix(y, exponent=exponent)
return u_product(a, b) | def function[_u_distance_covariance_sqr_naive, parameter[x, y, exponent]]:
constant[
Naive unbiased estimator for distance covariance.
Computes the unbiased estimator for distance covariance between two
matrices, using an :math:`O(N^2)` algorithm.
]
variable[a] assign[=] call[name[_u_distance_matrix], parameter[name[x]]]
variable[b] assign[=] call[name[_u_distance_matrix], parameter[name[y]]]
return[call[name[u_product], parameter[name[a], name[b]]]] | keyword[def] identifier[_u_distance_covariance_sqr_naive] ( identifier[x] , identifier[y] , identifier[exponent] = literal[int] ):
literal[string]
identifier[a] = identifier[_u_distance_matrix] ( identifier[x] , identifier[exponent] = identifier[exponent] )
identifier[b] = identifier[_u_distance_matrix] ( identifier[y] , identifier[exponent] = identifier[exponent] )
keyword[return] identifier[u_product] ( identifier[a] , identifier[b] ) | def _u_distance_covariance_sqr_naive(x, y, exponent=1):
"""
Naive unbiased estimator for distance covariance.
Computes the unbiased estimator for distance covariance between two
matrices, using an :math:`O(N^2)` algorithm.
"""
a = _u_distance_matrix(x, exponent=exponent)
b = _u_distance_matrix(y, exponent=exponent)
return u_product(a, b) |
def _parse_local_version(local):
"""
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
"""
if local is not None:
return tuple(
part.lower() if not part.isdigit() else int(part)
for part in _local_version_separators.split(local)
) | def function[_parse_local_version, parameter[local]]:
constant[
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
]
if compare[name[local] is_not constant[None]] begin[:]
return[call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da20c7c8490>]]] | keyword[def] identifier[_parse_local_version] ( identifier[local] ):
literal[string]
keyword[if] identifier[local] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[tuple] (
identifier[part] . identifier[lower] () keyword[if] keyword[not] identifier[part] . identifier[isdigit] () keyword[else] identifier[int] ( identifier[part] )
keyword[for] identifier[part] keyword[in] identifier[_local_version_separators] . identifier[split] ( identifier[local] )
) | def _parse_local_version(local):
"""
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
"""
if local is not None:
return tuple((part.lower() if not part.isdigit() else int(part) for part in _local_version_separators.split(local))) # depends on [control=['if'], data=['local']] |
def resume(env, identifier):
"""Resumes a paused virtual server."""
vsi = SoftLayer.VSManager(env.client)
vs_id = helpers.resolve_id(vsi.resolve_ids, identifier, 'VS')
env.client['Virtual_Guest'].resume(id=vs_id) | def function[resume, parameter[env, identifier]]:
constant[Resumes a paused virtual server.]
variable[vsi] assign[=] call[name[SoftLayer].VSManager, parameter[name[env].client]]
variable[vs_id] assign[=] call[name[helpers].resolve_id, parameter[name[vsi].resolve_ids, name[identifier], constant[VS]]]
call[call[name[env].client][constant[Virtual_Guest]].resume, parameter[]] | keyword[def] identifier[resume] ( identifier[env] , identifier[identifier] ):
literal[string]
identifier[vsi] = identifier[SoftLayer] . identifier[VSManager] ( identifier[env] . identifier[client] )
identifier[vs_id] = identifier[helpers] . identifier[resolve_id] ( identifier[vsi] . identifier[resolve_ids] , identifier[identifier] , literal[string] )
identifier[env] . identifier[client] [ literal[string] ]. identifier[resume] ( identifier[id] = identifier[vs_id] ) | def resume(env, identifier):
"""Resumes a paused virtual server."""
vsi = SoftLayer.VSManager(env.client)
vs_id = helpers.resolve_id(vsi.resolve_ids, identifier, 'VS')
env.client['Virtual_Guest'].resume(id=vs_id) |
def load(obj, settings_module, identifier="py", silent=False, key=None):
"""Tries to import a python module"""
mod, loaded_from = get_module(obj, settings_module, silent)
if mod and loaded_from:
obj.logger.debug("py_loader: {}".format(mod))
else:
obj.logger.debug(
"py_loader: %s (Ignoring, Not Found)", settings_module
)
return
for setting in dir(mod):
if setting.isupper():
if key is None or key == setting:
setting_value = getattr(mod, setting)
obj.logger.debug(
"py_loader: loading %s: %s (%s)",
setting,
"*****" if "secret" in settings_module else setting_value,
identifier,
)
obj.set(setting, setting_value, loader_identifier=identifier)
obj._loaded_files.append(mod.__file__) | def function[load, parameter[obj, settings_module, identifier, silent, key]]:
constant[Tries to import a python module]
<ast.Tuple object at 0x7da1b180d150> assign[=] call[name[get_module], parameter[name[obj], name[settings_module], name[silent]]]
if <ast.BoolOp object at 0x7da1b180c940> begin[:]
call[name[obj].logger.debug, parameter[call[constant[py_loader: {}].format, parameter[name[mod]]]]]
for taget[name[setting]] in starred[call[name[dir], parameter[name[mod]]]] begin[:]
if call[name[setting].isupper, parameter[]] begin[:]
if <ast.BoolOp object at 0x7da1b180d330> begin[:]
variable[setting_value] assign[=] call[name[getattr], parameter[name[mod], name[setting]]]
call[name[obj].logger.debug, parameter[constant[py_loader: loading %s: %s (%s)], name[setting], <ast.IfExp object at 0x7da1b17f9330>, name[identifier]]]
call[name[obj].set, parameter[name[setting], name[setting_value]]]
call[name[obj]._loaded_files.append, parameter[name[mod].__file__]] | keyword[def] identifier[load] ( identifier[obj] , identifier[settings_module] , identifier[identifier] = literal[string] , identifier[silent] = keyword[False] , identifier[key] = keyword[None] ):
literal[string]
identifier[mod] , identifier[loaded_from] = identifier[get_module] ( identifier[obj] , identifier[settings_module] , identifier[silent] )
keyword[if] identifier[mod] keyword[and] identifier[loaded_from] :
identifier[obj] . identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[mod] ))
keyword[else] :
identifier[obj] . identifier[logger] . identifier[debug] (
literal[string] , identifier[settings_module]
)
keyword[return]
keyword[for] identifier[setting] keyword[in] identifier[dir] ( identifier[mod] ):
keyword[if] identifier[setting] . identifier[isupper] ():
keyword[if] identifier[key] keyword[is] keyword[None] keyword[or] identifier[key] == identifier[setting] :
identifier[setting_value] = identifier[getattr] ( identifier[mod] , identifier[setting] )
identifier[obj] . identifier[logger] . identifier[debug] (
literal[string] ,
identifier[setting] ,
literal[string] keyword[if] literal[string] keyword[in] identifier[settings_module] keyword[else] identifier[setting_value] ,
identifier[identifier] ,
)
identifier[obj] . identifier[set] ( identifier[setting] , identifier[setting_value] , identifier[loader_identifier] = identifier[identifier] )
identifier[obj] . identifier[_loaded_files] . identifier[append] ( identifier[mod] . identifier[__file__] ) | def load(obj, settings_module, identifier='py', silent=False, key=None):
"""Tries to import a python module"""
(mod, loaded_from) = get_module(obj, settings_module, silent)
if mod and loaded_from:
obj.logger.debug('py_loader: {}'.format(mod)) # depends on [control=['if'], data=[]]
else:
obj.logger.debug('py_loader: %s (Ignoring, Not Found)', settings_module)
return
for setting in dir(mod):
if setting.isupper():
if key is None or key == setting:
setting_value = getattr(mod, setting)
obj.logger.debug('py_loader: loading %s: %s (%s)', setting, '*****' if 'secret' in settings_module else setting_value, identifier)
obj.set(setting, setting_value, loader_identifier=identifier) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['setting']]
obj._loaded_files.append(mod.__file__) |
def propagate_timezone_option(self):
"""Set our timezone value and give it too to unset satellites
:return: None
"""
if self.use_timezone:
# first apply myself
os.environ['TZ'] = self.use_timezone
time.tzset()
tab = [self.schedulers, self.pollers, self.brokers, self.receivers, self.reactionners]
for sat_list in tab:
for sat in sat_list:
if sat.use_timezone == 'NOTSET':
setattr(sat, 'use_timezone', self.use_timezone) | def function[propagate_timezone_option, parameter[self]]:
constant[Set our timezone value and give it too to unset satellites
:return: None
]
if name[self].use_timezone begin[:]
call[name[os].environ][constant[TZ]] assign[=] name[self].use_timezone
call[name[time].tzset, parameter[]]
variable[tab] assign[=] list[[<ast.Attribute object at 0x7da204620310>, <ast.Attribute object at 0x7da204620a90>, <ast.Attribute object at 0x7da204622980>, <ast.Attribute object at 0x7da2046236d0>, <ast.Attribute object at 0x7da204623a90>]]
for taget[name[sat_list]] in starred[name[tab]] begin[:]
for taget[name[sat]] in starred[name[sat_list]] begin[:]
if compare[name[sat].use_timezone equal[==] constant[NOTSET]] begin[:]
call[name[setattr], parameter[name[sat], constant[use_timezone], name[self].use_timezone]] | keyword[def] identifier[propagate_timezone_option] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[use_timezone] :
identifier[os] . identifier[environ] [ literal[string] ]= identifier[self] . identifier[use_timezone]
identifier[time] . identifier[tzset] ()
identifier[tab] =[ identifier[self] . identifier[schedulers] , identifier[self] . identifier[pollers] , identifier[self] . identifier[brokers] , identifier[self] . identifier[receivers] , identifier[self] . identifier[reactionners] ]
keyword[for] identifier[sat_list] keyword[in] identifier[tab] :
keyword[for] identifier[sat] keyword[in] identifier[sat_list] :
keyword[if] identifier[sat] . identifier[use_timezone] == literal[string] :
identifier[setattr] ( identifier[sat] , literal[string] , identifier[self] . identifier[use_timezone] ) | def propagate_timezone_option(self):
"""Set our timezone value and give it too to unset satellites
:return: None
"""
if self.use_timezone:
# first apply myself
os.environ['TZ'] = self.use_timezone
time.tzset()
tab = [self.schedulers, self.pollers, self.brokers, self.receivers, self.reactionners]
for sat_list in tab:
for sat in sat_list:
if sat.use_timezone == 'NOTSET':
setattr(sat, 'use_timezone', self.use_timezone) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['sat']] # depends on [control=['for'], data=['sat_list']] # depends on [control=['if'], data=[]] |
def legend(self):
''' Splattable list of :class:`~bokeh.models.annotations.Legend` objects.
'''
panels = self.above + self.below + self.left + self.right + self.center
legends = [obj for obj in panels if isinstance(obj, Legend)]
return _legend_attr_splat(legends) | def function[legend, parameter[self]]:
constant[ Splattable list of :class:`~bokeh.models.annotations.Legend` objects.
]
variable[panels] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[self].above + name[self].below] + name[self].left] + name[self].right] + name[self].center]
variable[legends] assign[=] <ast.ListComp object at 0x7da20c6c7fd0>
return[call[name[_legend_attr_splat], parameter[name[legends]]]] | keyword[def] identifier[legend] ( identifier[self] ):
literal[string]
identifier[panels] = identifier[self] . identifier[above] + identifier[self] . identifier[below] + identifier[self] . identifier[left] + identifier[self] . identifier[right] + identifier[self] . identifier[center]
identifier[legends] =[ identifier[obj] keyword[for] identifier[obj] keyword[in] identifier[panels] keyword[if] identifier[isinstance] ( identifier[obj] , identifier[Legend] )]
keyword[return] identifier[_legend_attr_splat] ( identifier[legends] ) | def legend(self):
""" Splattable list of :class:`~bokeh.models.annotations.Legend` objects.
"""
panels = self.above + self.below + self.left + self.right + self.center
legends = [obj for obj in panels if isinstance(obj, Legend)]
return _legend_attr_splat(legends) |
def get_access_token(self, code):
"""Get new access token."""
try:
self._token = super().fetch_token(
MINUT_TOKEN_URL,
client_id=self._client_id,
client_secret=self._client_secret,
code=code,
)
# except Exception as e:
except MissingTokenError as error:
_LOGGER.debug("Token issues: %s", error)
return self._token | def function[get_access_token, parameter[self, code]]:
constant[Get new access token.]
<ast.Try object at 0x7da1b1120040>
return[name[self]._token] | keyword[def] identifier[get_access_token] ( identifier[self] , identifier[code] ):
literal[string]
keyword[try] :
identifier[self] . identifier[_token] = identifier[super] (). identifier[fetch_token] (
identifier[MINUT_TOKEN_URL] ,
identifier[client_id] = identifier[self] . identifier[_client_id] ,
identifier[client_secret] = identifier[self] . identifier[_client_secret] ,
identifier[code] = identifier[code] ,
)
keyword[except] identifier[MissingTokenError] keyword[as] identifier[error] :
identifier[_LOGGER] . identifier[debug] ( literal[string] , identifier[error] )
keyword[return] identifier[self] . identifier[_token] | def get_access_token(self, code):
"""Get new access token."""
try:
self._token = super().fetch_token(MINUT_TOKEN_URL, client_id=self._client_id, client_secret=self._client_secret, code=code) # depends on [control=['try'], data=[]]
# except Exception as e:
except MissingTokenError as error:
_LOGGER.debug('Token issues: %s', error) # depends on [control=['except'], data=['error']]
return self._token |
def _save_to_hdx(self, action, id_field_name, file_to_upload=None):
# type: (str, str, Optional[str]) -> None
"""Creates or updates an HDX object in HDX, saving current data and replacing with returned HDX object data
from HDX
Args:
action (str): Action to perform: 'create' or 'update'
id_field_name (str): Name of field containing HDX object identifier
file_to_upload (Optional[str]): File to upload to HDX
Returns:
None
"""
result = self._write_to_hdx(action, self.data, id_field_name, file_to_upload)
self.old_data = self.data
self.data = result | def function[_save_to_hdx, parameter[self, action, id_field_name, file_to_upload]]:
constant[Creates or updates an HDX object in HDX, saving current data and replacing with returned HDX object data
from HDX
Args:
action (str): Action to perform: 'create' or 'update'
id_field_name (str): Name of field containing HDX object identifier
file_to_upload (Optional[str]): File to upload to HDX
Returns:
None
]
variable[result] assign[=] call[name[self]._write_to_hdx, parameter[name[action], name[self].data, name[id_field_name], name[file_to_upload]]]
name[self].old_data assign[=] name[self].data
name[self].data assign[=] name[result] | keyword[def] identifier[_save_to_hdx] ( identifier[self] , identifier[action] , identifier[id_field_name] , identifier[file_to_upload] = keyword[None] ):
literal[string]
identifier[result] = identifier[self] . identifier[_write_to_hdx] ( identifier[action] , identifier[self] . identifier[data] , identifier[id_field_name] , identifier[file_to_upload] )
identifier[self] . identifier[old_data] = identifier[self] . identifier[data]
identifier[self] . identifier[data] = identifier[result] | def _save_to_hdx(self, action, id_field_name, file_to_upload=None):
# type: (str, str, Optional[str]) -> None
"Creates or updates an HDX object in HDX, saving current data and replacing with returned HDX object data\n from HDX\n\n Args:\n action (str): Action to perform: 'create' or 'update'\n id_field_name (str): Name of field containing HDX object identifier\n file_to_upload (Optional[str]): File to upload to HDX\n\n Returns:\n None\n "
result = self._write_to_hdx(action, self.data, id_field_name, file_to_upload)
self.old_data = self.data
self.data = result |
def QA_SU_save_stock_day(client=DATABASE, ui_log=None, ui_progress=None):
'''
save stock_day
保存日线数据
:param client:
:param ui_log: 给GUI qt 界面使用
:param ui_progress: 给GUI qt 界面使用
:param ui_progress_int_value: 给GUI qt 界面使用
'''
stock_list = QA_fetch_get_stock_list().code.unique().tolist()
coll_stock_day = client.stock_day
coll_stock_day.create_index(
[("code",
pymongo.ASCENDING),
("date_stamp",
pymongo.ASCENDING)]
)
err = []
# saveing result
def __gen_param(stock_list, coll_stock_day, ip_list=[]):
results = []
count = len(ip_list)
total = len(stock_list)
for item in range(len(stock_list)):
try:
code = stock_list[item]
QA_util_log_info(
'##JOB01 Now Saving STOCK_DAY==== {}'.format(str(code)),
ui_log
)
# 首选查找数据库 是否 有 这个代码的数据
search_cond = {'code': str(code)[0:6]}
ref = coll_stock_day.find(search_cond)
end_date = str(now_time())[0:10]
ref_count = coll_stock_day.count_documents(search_cond)
# 当前数据库已经包含了这个代码的数据, 继续增量更新
# 加入这个判断的原因是因为如果股票是刚上市的 数据库会没有数据 所以会有负索引问题出现
if ref_count > 0:
# 接着上次获取的日期继续更新
start_date = ref[ref_count - 1]['date']
# print("ref[ref.count() - 1]['date'] {} {}".format(ref.count(), coll_stock_day.count_documents({'code': str(code)[0:6]})))
else:
# 当前数据库中没有这个代码的股票数据, 从1990-01-01 开始下载所有的数据
start_date = '1990-01-01'
QA_util_log_info(
'UPDATE_STOCK_DAY \n Trying updating {} from {} to {}'
.format(code,
start_date,
end_date),
ui_log
)
if start_date != end_date:
# 更新过的,不更新
results.extend([(code, start_date, end_date, '00', 'day', ip_list[item % count]['ip'],
ip_list[item % count]['port'], item, total, ui_log, ui_progress)])
except Exception as error0:
print('Exception:{}'.format(error0))
err.append(code)
return results
ips = get_ip_list_by_multi_process_ping(stock_ip_list, _type='stock')[:cpu_count() * 2 + 1]
param = __gen_param(stock_list, coll_stock_day, ips)
ps = QA_SU_save_stock_day_parallelism(processes=cpu_count() if len(ips) >= cpu_count() else len(ips),
client=client, ui_log=ui_log)
ps.add(do_saving_work, param)
ps.run()
if len(err) < 1:
QA_util_log_info('SUCCESS save stock day ^_^', ui_log)
else:
QA_util_log_info('ERROR CODE \n ', ui_log)
QA_util_log_info(err, ui_log) | def function[QA_SU_save_stock_day, parameter[client, ui_log, ui_progress]]:
constant[
save stock_day
保存日线数据
:param client:
:param ui_log: 给GUI qt 界面使用
:param ui_progress: 给GUI qt 界面使用
:param ui_progress_int_value: 给GUI qt 界面使用
]
variable[stock_list] assign[=] call[call[call[name[QA_fetch_get_stock_list], parameter[]].code.unique, parameter[]].tolist, parameter[]]
variable[coll_stock_day] assign[=] name[client].stock_day
call[name[coll_stock_day].create_index, parameter[list[[<ast.Tuple object at 0x7da1b1e1b460>, <ast.Tuple object at 0x7da1b1e1b310>]]]]
variable[err] assign[=] list[[]]
def function[__gen_param, parameter[stock_list, coll_stock_day, ip_list]]:
variable[results] assign[=] list[[]]
variable[count] assign[=] call[name[len], parameter[name[ip_list]]]
variable[total] assign[=] call[name[len], parameter[name[stock_list]]]
for taget[name[item]] in starred[call[name[range], parameter[call[name[len], parameter[name[stock_list]]]]]] begin[:]
<ast.Try object at 0x7da1b1f25cf0>
return[name[results]]
variable[ips] assign[=] call[call[name[get_ip_list_by_multi_process_ping], parameter[name[stock_ip_list]]]][<ast.Slice object at 0x7da1b1f26170>]
variable[param] assign[=] call[name[__gen_param], parameter[name[stock_list], name[coll_stock_day], name[ips]]]
variable[ps] assign[=] call[name[QA_SU_save_stock_day_parallelism], parameter[]]
call[name[ps].add, parameter[name[do_saving_work], name[param]]]
call[name[ps].run, parameter[]]
if compare[call[name[len], parameter[name[err]]] less[<] constant[1]] begin[:]
call[name[QA_util_log_info], parameter[constant[SUCCESS save stock day ^_^], name[ui_log]]] | keyword[def] identifier[QA_SU_save_stock_day] ( identifier[client] = identifier[DATABASE] , identifier[ui_log] = keyword[None] , identifier[ui_progress] = keyword[None] ):
literal[string]
identifier[stock_list] = identifier[QA_fetch_get_stock_list] (). identifier[code] . identifier[unique] (). identifier[tolist] ()
identifier[coll_stock_day] = identifier[client] . identifier[stock_day]
identifier[coll_stock_day] . identifier[create_index] (
[( literal[string] ,
identifier[pymongo] . identifier[ASCENDING] ),
( literal[string] ,
identifier[pymongo] . identifier[ASCENDING] )]
)
identifier[err] =[]
keyword[def] identifier[__gen_param] ( identifier[stock_list] , identifier[coll_stock_day] , identifier[ip_list] =[]):
identifier[results] =[]
identifier[count] = identifier[len] ( identifier[ip_list] )
identifier[total] = identifier[len] ( identifier[stock_list] )
keyword[for] identifier[item] keyword[in] identifier[range] ( identifier[len] ( identifier[stock_list] )):
keyword[try] :
identifier[code] = identifier[stock_list] [ identifier[item] ]
identifier[QA_util_log_info] (
literal[string] . identifier[format] ( identifier[str] ( identifier[code] )),
identifier[ui_log]
)
identifier[search_cond] ={ literal[string] : identifier[str] ( identifier[code] )[ literal[int] : literal[int] ]}
identifier[ref] = identifier[coll_stock_day] . identifier[find] ( identifier[search_cond] )
identifier[end_date] = identifier[str] ( identifier[now_time] ())[ literal[int] : literal[int] ]
identifier[ref_count] = identifier[coll_stock_day] . identifier[count_documents] ( identifier[search_cond] )
keyword[if] identifier[ref_count] > literal[int] :
identifier[start_date] = identifier[ref] [ identifier[ref_count] - literal[int] ][ literal[string] ]
keyword[else] :
identifier[start_date] = literal[string]
identifier[QA_util_log_info] (
literal[string]
. identifier[format] ( identifier[code] ,
identifier[start_date] ,
identifier[end_date] ),
identifier[ui_log]
)
keyword[if] identifier[start_date] != identifier[end_date] :
identifier[results] . identifier[extend] ([( identifier[code] , identifier[start_date] , identifier[end_date] , literal[string] , literal[string] , identifier[ip_list] [ identifier[item] % identifier[count] ][ literal[string] ],
identifier[ip_list] [ identifier[item] % identifier[count] ][ literal[string] ], identifier[item] , identifier[total] , identifier[ui_log] , identifier[ui_progress] )])
keyword[except] identifier[Exception] keyword[as] identifier[error0] :
identifier[print] ( literal[string] . identifier[format] ( identifier[error0] ))
identifier[err] . identifier[append] ( identifier[code] )
keyword[return] identifier[results]
identifier[ips] = identifier[get_ip_list_by_multi_process_ping] ( identifier[stock_ip_list] , identifier[_type] = literal[string] )[: identifier[cpu_count] ()* literal[int] + literal[int] ]
identifier[param] = identifier[__gen_param] ( identifier[stock_list] , identifier[coll_stock_day] , identifier[ips] )
identifier[ps] = identifier[QA_SU_save_stock_day_parallelism] ( identifier[processes] = identifier[cpu_count] () keyword[if] identifier[len] ( identifier[ips] )>= identifier[cpu_count] () keyword[else] identifier[len] ( identifier[ips] ),
identifier[client] = identifier[client] , identifier[ui_log] = identifier[ui_log] )
identifier[ps] . identifier[add] ( identifier[do_saving_work] , identifier[param] )
identifier[ps] . identifier[run] ()
keyword[if] identifier[len] ( identifier[err] )< literal[int] :
identifier[QA_util_log_info] ( literal[string] , identifier[ui_log] )
keyword[else] :
identifier[QA_util_log_info] ( literal[string] , identifier[ui_log] )
identifier[QA_util_log_info] ( identifier[err] , identifier[ui_log] ) | def QA_SU_save_stock_day(client=DATABASE, ui_log=None, ui_progress=None):
"""
save stock_day
保存日线数据
:param client:
:param ui_log: 给GUI qt 界面使用
:param ui_progress: 给GUI qt 界面使用
:param ui_progress_int_value: 给GUI qt 界面使用
"""
stock_list = QA_fetch_get_stock_list().code.unique().tolist()
coll_stock_day = client.stock_day
coll_stock_day.create_index([('code', pymongo.ASCENDING), ('date_stamp', pymongo.ASCENDING)])
err = []
# saveing result
def __gen_param(stock_list, coll_stock_day, ip_list=[]):
results = []
count = len(ip_list)
total = len(stock_list)
for item in range(len(stock_list)):
try:
code = stock_list[item]
QA_util_log_info('##JOB01 Now Saving STOCK_DAY==== {}'.format(str(code)), ui_log)
# 首选查找数据库 是否 有 这个代码的数据
search_cond = {'code': str(code)[0:6]}
ref = coll_stock_day.find(search_cond)
end_date = str(now_time())[0:10]
ref_count = coll_stock_day.count_documents(search_cond)
# 当前数据库已经包含了这个代码的数据, 继续增量更新
# 加入这个判断的原因是因为如果股票是刚上市的 数据库会没有数据 所以会有负索引问题出现
if ref_count > 0:
# 接着上次获取的日期继续更新
start_date = ref[ref_count - 1]['date'] # depends on [control=['if'], data=['ref_count']]
else:
# print("ref[ref.count() - 1]['date'] {} {}".format(ref.count(), coll_stock_day.count_documents({'code': str(code)[0:6]})))
# 当前数据库中没有这个代码的股票数据, 从1990-01-01 开始下载所有的数据
start_date = '1990-01-01'
QA_util_log_info('UPDATE_STOCK_DAY \n Trying updating {} from {} to {}'.format(code, start_date, end_date), ui_log)
if start_date != end_date:
# 更新过的,不更新
results.extend([(code, start_date, end_date, '00', 'day', ip_list[item % count]['ip'], ip_list[item % count]['port'], item, total, ui_log, ui_progress)]) # depends on [control=['if'], data=['start_date', 'end_date']] # depends on [control=['try'], data=[]]
except Exception as error0:
print('Exception:{}'.format(error0))
err.append(code) # depends on [control=['except'], data=['error0']] # depends on [control=['for'], data=['item']]
return results
ips = get_ip_list_by_multi_process_ping(stock_ip_list, _type='stock')[:cpu_count() * 2 + 1]
param = __gen_param(stock_list, coll_stock_day, ips)
ps = QA_SU_save_stock_day_parallelism(processes=cpu_count() if len(ips) >= cpu_count() else len(ips), client=client, ui_log=ui_log)
ps.add(do_saving_work, param)
ps.run()
if len(err) < 1:
QA_util_log_info('SUCCESS save stock day ^_^', ui_log) # depends on [control=['if'], data=[]]
else:
QA_util_log_info('ERROR CODE \n ', ui_log)
QA_util_log_info(err, ui_log) |
def attendee(request, form, user_id=None):
''' Returns a list of all manifested attendees if no attendee is specified,
else displays the attendee manifest. '''
if user_id is None and form.cleaned_data["user"] is not None:
user_id = form.cleaned_data["user"]
if user_id is None:
return attendee_list(request)
attendee = people.Attendee.objects.get(user__id=user_id)
name = attendee.attendeeprofilebase.attendee_name()
reports = []
profile_data = []
try:
profile = people.AttendeeProfileBase.objects.get_subclass(
attendee=attendee
)
fields = profile._meta.get_fields()
except people.AttendeeProfileBase.DoesNotExist:
fields = []
exclude = set(["attendeeprofilebase_ptr", "id"])
for field in fields:
if field.name in exclude:
# Not actually important
continue
if not hasattr(field, "verbose_name"):
continue # Not a publicly visible field
value = getattr(profile, field.name)
if isinstance(field, models.ManyToManyField):
value = ", ".join(str(i) for i in value.all())
profile_data.append((field.verbose_name, value))
cart = CartController.for_user(attendee.user)
reservation = cart.cart.reservation_duration + cart.cart.time_last_updated
profile_data.append(("Current cart reserved until", reservation))
reports.append(ListReport("Profile", ["", ""], profile_data))
links = []
links.append((
reverse(views.badge, args=[user_id]),
"View badge",
))
links.append((
reverse(views.amend_registration, args=[user_id]),
"Amend current cart",
))
links.append((
reverse(views.extend_reservation, args=[user_id]),
"Extend reservation",
))
reports.append(Links("Actions for " + name, links))
# Paid and pending products
ic = ItemController(attendee.user)
reports.append(ListReport(
"Paid Products",
["Product", "Quantity"],
[(pq.product, pq.quantity) for pq in ic.items_purchased()],
))
reports.append(ListReport(
"Unpaid Products",
["Product", "Quantity"],
[(pq.product, pq.quantity) for pq in ic.items_pending()],
))
# Invoices
invoices = commerce.Invoice.objects.filter(
user=attendee.user,
)
reports.append(QuerysetReport(
"Invoices",
["id", "get_status_display", "value"],
invoices,
headings=["Invoice ID", "Status", "Value"],
link_view=views.invoice,
))
# Credit Notes
credit_notes = commerce.CreditNote.objects.filter(
invoice__user=attendee.user,
).select_related("invoice", "creditnoteapplication", "creditnoterefund")
reports.append(QuerysetReport(
"Credit Notes",
["id", "status", "value"],
credit_notes,
link_view=views.credit_note,
))
# All payments
payments = commerce.PaymentBase.objects.filter(
invoice__user=attendee.user,
).select_related("invoice")
reports.append(QuerysetReport(
"Payments",
["invoice__id", "id", "reference", "amount"],
payments,
link_view=views.invoice,
))
return reports | def function[attendee, parameter[request, form, user_id]]:
constant[ Returns a list of all manifested attendees if no attendee is specified,
else displays the attendee manifest. ]
if <ast.BoolOp object at 0x7da18f58fdc0> begin[:]
variable[user_id] assign[=] call[name[form].cleaned_data][constant[user]]
if compare[name[user_id] is constant[None]] begin[:]
return[call[name[attendee_list], parameter[name[request]]]]
variable[attendee] assign[=] call[name[people].Attendee.objects.get, parameter[]]
variable[name] assign[=] call[name[attendee].attendeeprofilebase.attendee_name, parameter[]]
variable[reports] assign[=] list[[]]
variable[profile_data] assign[=] list[[]]
<ast.Try object at 0x7da18f58fa90>
variable[exclude] assign[=] call[name[set], parameter[list[[<ast.Constant object at 0x7da18f58c7f0>, <ast.Constant object at 0x7da18f58d2d0>]]]]
for taget[name[field]] in starred[name[fields]] begin[:]
if compare[name[field].name in name[exclude]] begin[:]
continue
if <ast.UnaryOp object at 0x7da18f58ed70> begin[:]
continue
variable[value] assign[=] call[name[getattr], parameter[name[profile], name[field].name]]
if call[name[isinstance], parameter[name[field], name[models].ManyToManyField]] begin[:]
variable[value] assign[=] call[constant[, ].join, parameter[<ast.GeneratorExp object at 0x7da18f58fd00>]]
call[name[profile_data].append, parameter[tuple[[<ast.Attribute object at 0x7da18f58cf40>, <ast.Name object at 0x7da18f58c8e0>]]]]
variable[cart] assign[=] call[name[CartController].for_user, parameter[name[attendee].user]]
variable[reservation] assign[=] binary_operation[name[cart].cart.reservation_duration + name[cart].cart.time_last_updated]
call[name[profile_data].append, parameter[tuple[[<ast.Constant object at 0x7da18f58dc00>, <ast.Name object at 0x7da18f58e6b0>]]]]
call[name[reports].append, parameter[call[name[ListReport], parameter[constant[Profile], list[[<ast.Constant object at 0x7da18f58e7d0>, <ast.Constant object at 0x7da18f58e410>]], name[profile_data]]]]]
variable[links] assign[=] list[[]]
call[name[links].append, parameter[tuple[[<ast.Call object at 0x7da18f58d9f0>, <ast.Constant object at 0x7da18f58f4c0>]]]]
call[name[links].append, parameter[tuple[[<ast.Call object at 0x7da18f58c490>, <ast.Constant object at 0x7da18f58f3a0>]]]]
call[name[links].append, parameter[tuple[[<ast.Call object at 0x7da18f58c2b0>, <ast.Constant object at 0x7da18f58e8c0>]]]]
call[name[reports].append, parameter[call[name[Links], parameter[binary_operation[constant[Actions for ] + name[name]], name[links]]]]]
variable[ic] assign[=] call[name[ItemController], parameter[name[attendee].user]]
call[name[reports].append, parameter[call[name[ListReport], parameter[constant[Paid Products], list[[<ast.Constant object at 0x7da20c990850>, <ast.Constant object at 0x7da20c990a30>]], <ast.ListComp object at 0x7da20c992020>]]]]
call[name[reports].append, parameter[call[name[ListReport], parameter[constant[Unpaid Products], list[[<ast.Constant object at 0x7da20c992c80>, <ast.Constant object at 0x7da20c991510>]], <ast.ListComp object at 0x7da20c992050>]]]]
variable[invoices] assign[=] call[name[commerce].Invoice.objects.filter, parameter[]]
call[name[reports].append, parameter[call[name[QuerysetReport], parameter[constant[Invoices], list[[<ast.Constant object at 0x7da20c993c40>, <ast.Constant object at 0x7da20c991ab0>, <ast.Constant object at 0x7da20c993f70>]], name[invoices]]]]]
variable[credit_notes] assign[=] call[call[name[commerce].CreditNote.objects.filter, parameter[]].select_related, parameter[constant[invoice], constant[creditnoteapplication], constant[creditnoterefund]]]
call[name[reports].append, parameter[call[name[QuerysetReport], parameter[constant[Credit Notes], list[[<ast.Constant object at 0x7da207f9bb20>, <ast.Constant object at 0x7da207f9a440>, <ast.Constant object at 0x7da207f99480>]], name[credit_notes]]]]]
variable[payments] assign[=] call[call[name[commerce].PaymentBase.objects.filter, parameter[]].select_related, parameter[constant[invoice]]]
call[name[reports].append, parameter[call[name[QuerysetReport], parameter[constant[Payments], list[[<ast.Constant object at 0x7da207f99bd0>, <ast.Constant object at 0x7da207f9a560>, <ast.Constant object at 0x7da207f9acb0>, <ast.Constant object at 0x7da207f9b580>]], name[payments]]]]]
return[name[reports]] | keyword[def] identifier[attendee] ( identifier[request] , identifier[form] , identifier[user_id] = keyword[None] ):
literal[string]
keyword[if] identifier[user_id] keyword[is] keyword[None] keyword[and] identifier[form] . identifier[cleaned_data] [ literal[string] ] keyword[is] keyword[not] keyword[None] :
identifier[user_id] = identifier[form] . identifier[cleaned_data] [ literal[string] ]
keyword[if] identifier[user_id] keyword[is] keyword[None] :
keyword[return] identifier[attendee_list] ( identifier[request] )
identifier[attendee] = identifier[people] . identifier[Attendee] . identifier[objects] . identifier[get] ( identifier[user__id] = identifier[user_id] )
identifier[name] = identifier[attendee] . identifier[attendeeprofilebase] . identifier[attendee_name] ()
identifier[reports] =[]
identifier[profile_data] =[]
keyword[try] :
identifier[profile] = identifier[people] . identifier[AttendeeProfileBase] . identifier[objects] . identifier[get_subclass] (
identifier[attendee] = identifier[attendee]
)
identifier[fields] = identifier[profile] . identifier[_meta] . identifier[get_fields] ()
keyword[except] identifier[people] . identifier[AttendeeProfileBase] . identifier[DoesNotExist] :
identifier[fields] =[]
identifier[exclude] = identifier[set] ([ literal[string] , literal[string] ])
keyword[for] identifier[field] keyword[in] identifier[fields] :
keyword[if] identifier[field] . identifier[name] keyword[in] identifier[exclude] :
keyword[continue]
keyword[if] keyword[not] identifier[hasattr] ( identifier[field] , literal[string] ):
keyword[continue]
identifier[value] = identifier[getattr] ( identifier[profile] , identifier[field] . identifier[name] )
keyword[if] identifier[isinstance] ( identifier[field] , identifier[models] . identifier[ManyToManyField] ):
identifier[value] = literal[string] . identifier[join] ( identifier[str] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[value] . identifier[all] ())
identifier[profile_data] . identifier[append] (( identifier[field] . identifier[verbose_name] , identifier[value] ))
identifier[cart] = identifier[CartController] . identifier[for_user] ( identifier[attendee] . identifier[user] )
identifier[reservation] = identifier[cart] . identifier[cart] . identifier[reservation_duration] + identifier[cart] . identifier[cart] . identifier[time_last_updated]
identifier[profile_data] . identifier[append] (( literal[string] , identifier[reservation] ))
identifier[reports] . identifier[append] ( identifier[ListReport] ( literal[string] ,[ literal[string] , literal[string] ], identifier[profile_data] ))
identifier[links] =[]
identifier[links] . identifier[append] ((
identifier[reverse] ( identifier[views] . identifier[badge] , identifier[args] =[ identifier[user_id] ]),
literal[string] ,
))
identifier[links] . identifier[append] ((
identifier[reverse] ( identifier[views] . identifier[amend_registration] , identifier[args] =[ identifier[user_id] ]),
literal[string] ,
))
identifier[links] . identifier[append] ((
identifier[reverse] ( identifier[views] . identifier[extend_reservation] , identifier[args] =[ identifier[user_id] ]),
literal[string] ,
))
identifier[reports] . identifier[append] ( identifier[Links] ( literal[string] + identifier[name] , identifier[links] ))
identifier[ic] = identifier[ItemController] ( identifier[attendee] . identifier[user] )
identifier[reports] . identifier[append] ( identifier[ListReport] (
literal[string] ,
[ literal[string] , literal[string] ],
[( identifier[pq] . identifier[product] , identifier[pq] . identifier[quantity] ) keyword[for] identifier[pq] keyword[in] identifier[ic] . identifier[items_purchased] ()],
))
identifier[reports] . identifier[append] ( identifier[ListReport] (
literal[string] ,
[ literal[string] , literal[string] ],
[( identifier[pq] . identifier[product] , identifier[pq] . identifier[quantity] ) keyword[for] identifier[pq] keyword[in] identifier[ic] . identifier[items_pending] ()],
))
identifier[invoices] = identifier[commerce] . identifier[Invoice] . identifier[objects] . identifier[filter] (
identifier[user] = identifier[attendee] . identifier[user] ,
)
identifier[reports] . identifier[append] ( identifier[QuerysetReport] (
literal[string] ,
[ literal[string] , literal[string] , literal[string] ],
identifier[invoices] ,
identifier[headings] =[ literal[string] , literal[string] , literal[string] ],
identifier[link_view] = identifier[views] . identifier[invoice] ,
))
identifier[credit_notes] = identifier[commerce] . identifier[CreditNote] . identifier[objects] . identifier[filter] (
identifier[invoice__user] = identifier[attendee] . identifier[user] ,
). identifier[select_related] ( literal[string] , literal[string] , literal[string] )
identifier[reports] . identifier[append] ( identifier[QuerysetReport] (
literal[string] ,
[ literal[string] , literal[string] , literal[string] ],
identifier[credit_notes] ,
identifier[link_view] = identifier[views] . identifier[credit_note] ,
))
identifier[payments] = identifier[commerce] . identifier[PaymentBase] . identifier[objects] . identifier[filter] (
identifier[invoice__user] = identifier[attendee] . identifier[user] ,
). identifier[select_related] ( literal[string] )
identifier[reports] . identifier[append] ( identifier[QuerysetReport] (
literal[string] ,
[ literal[string] , literal[string] , literal[string] , literal[string] ],
identifier[payments] ,
identifier[link_view] = identifier[views] . identifier[invoice] ,
))
keyword[return] identifier[reports] | def attendee(request, form, user_id=None):
""" Returns a list of all manifested attendees if no attendee is specified,
else displays the attendee manifest. """
if user_id is None and form.cleaned_data['user'] is not None:
user_id = form.cleaned_data['user'] # depends on [control=['if'], data=[]]
if user_id is None:
return attendee_list(request) # depends on [control=['if'], data=[]]
attendee = people.Attendee.objects.get(user__id=user_id)
name = attendee.attendeeprofilebase.attendee_name()
reports = []
profile_data = []
try:
profile = people.AttendeeProfileBase.objects.get_subclass(attendee=attendee)
fields = profile._meta.get_fields() # depends on [control=['try'], data=[]]
except people.AttendeeProfileBase.DoesNotExist:
fields = [] # depends on [control=['except'], data=[]]
exclude = set(['attendeeprofilebase_ptr', 'id'])
for field in fields:
if field.name in exclude:
# Not actually important
continue # depends on [control=['if'], data=[]]
if not hasattr(field, 'verbose_name'):
continue # Not a publicly visible field # depends on [control=['if'], data=[]]
value = getattr(profile, field.name)
if isinstance(field, models.ManyToManyField):
value = ', '.join((str(i) for i in value.all())) # depends on [control=['if'], data=[]]
profile_data.append((field.verbose_name, value)) # depends on [control=['for'], data=['field']]
cart = CartController.for_user(attendee.user)
reservation = cart.cart.reservation_duration + cart.cart.time_last_updated
profile_data.append(('Current cart reserved until', reservation))
reports.append(ListReport('Profile', ['', ''], profile_data))
links = []
links.append((reverse(views.badge, args=[user_id]), 'View badge'))
links.append((reverse(views.amend_registration, args=[user_id]), 'Amend current cart'))
links.append((reverse(views.extend_reservation, args=[user_id]), 'Extend reservation'))
reports.append(Links('Actions for ' + name, links))
# Paid and pending products
ic = ItemController(attendee.user)
reports.append(ListReport('Paid Products', ['Product', 'Quantity'], [(pq.product, pq.quantity) for pq in ic.items_purchased()]))
reports.append(ListReport('Unpaid Products', ['Product', 'Quantity'], [(pq.product, pq.quantity) for pq in ic.items_pending()]))
# Invoices
invoices = commerce.Invoice.objects.filter(user=attendee.user)
reports.append(QuerysetReport('Invoices', ['id', 'get_status_display', 'value'], invoices, headings=['Invoice ID', 'Status', 'Value'], link_view=views.invoice))
# Credit Notes
credit_notes = commerce.CreditNote.objects.filter(invoice__user=attendee.user).select_related('invoice', 'creditnoteapplication', 'creditnoterefund')
reports.append(QuerysetReport('Credit Notes', ['id', 'status', 'value'], credit_notes, link_view=views.credit_note))
# All payments
payments = commerce.PaymentBase.objects.filter(invoice__user=attendee.user).select_related('invoice')
reports.append(QuerysetReport('Payments', ['invoice__id', 'id', 'reference', 'amount'], payments, link_view=views.invoice))
return reports |
def extract_landmarks(gtf, landmarks=ALL_LANDMARKS):
"""Given an gene annotation GFF/GTF file,
# Arguments
gtf: File path or a loaded `pd.DataFrame` with columns:
seqname, feature, start, end, strand
landmarks: list or a dictionary of landmark extractors (function or name)
# Note
When landmark extractor names are used, they have to be implemented in
the module `concise.preprocessing.position`
# Returns
Dictionary of pd.DataFrames with landmark positions
(columns: seqname, position, strand)
"""
if isinstance(gtf, str):
_logger.info("Reading gtf file..")
gtf = read_gtf(gtf)
_logger.info("Done")
_logger.info("Running landmark extractors..")
# landmarks to a dictionary with a function
assert isinstance(landmarks, (list, tuple, set, dict))
if isinstance(landmarks, dict):
landmarks = {k: _get_fun(v) for k, v in landmarks.items()}
else:
landmarks = {_to_string(fn_str): _get_fun(fn_str)
for fn_str in landmarks}
r = {k: _validate_pos(v(gtf)) for k, v in landmarks.items()}
_logger.info("Done!")
return r | def function[extract_landmarks, parameter[gtf, landmarks]]:
constant[Given an gene annotation GFF/GTF file,
# Arguments
gtf: File path or a loaded `pd.DataFrame` with columns:
seqname, feature, start, end, strand
landmarks: list or a dictionary of landmark extractors (function or name)
# Note
When landmark extractor names are used, they have to be implemented in
the module `concise.preprocessing.position`
# Returns
Dictionary of pd.DataFrames with landmark positions
(columns: seqname, position, strand)
]
if call[name[isinstance], parameter[name[gtf], name[str]]] begin[:]
call[name[_logger].info, parameter[constant[Reading gtf file..]]]
variable[gtf] assign[=] call[name[read_gtf], parameter[name[gtf]]]
call[name[_logger].info, parameter[constant[Done]]]
call[name[_logger].info, parameter[constant[Running landmark extractors..]]]
assert[call[name[isinstance], parameter[name[landmarks], tuple[[<ast.Name object at 0x7da1b031c580>, <ast.Name object at 0x7da1b031d960>, <ast.Name object at 0x7da1b031ece0>, <ast.Name object at 0x7da1b031dd80>]]]]]
if call[name[isinstance], parameter[name[landmarks], name[dict]]] begin[:]
variable[landmarks] assign[=] <ast.DictComp object at 0x7da1b031ded0>
variable[r] assign[=] <ast.DictComp object at 0x7da204565660>
call[name[_logger].info, parameter[constant[Done!]]]
return[name[r]] | keyword[def] identifier[extract_landmarks] ( identifier[gtf] , identifier[landmarks] = identifier[ALL_LANDMARKS] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[gtf] , identifier[str] ):
identifier[_logger] . identifier[info] ( literal[string] )
identifier[gtf] = identifier[read_gtf] ( identifier[gtf] )
identifier[_logger] . identifier[info] ( literal[string] )
identifier[_logger] . identifier[info] ( literal[string] )
keyword[assert] identifier[isinstance] ( identifier[landmarks] ,( identifier[list] , identifier[tuple] , identifier[set] , identifier[dict] ))
keyword[if] identifier[isinstance] ( identifier[landmarks] , identifier[dict] ):
identifier[landmarks] ={ identifier[k] : identifier[_get_fun] ( identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[landmarks] . identifier[items] ()}
keyword[else] :
identifier[landmarks] ={ identifier[_to_string] ( identifier[fn_str] ): identifier[_get_fun] ( identifier[fn_str] )
keyword[for] identifier[fn_str] keyword[in] identifier[landmarks] }
identifier[r] ={ identifier[k] : identifier[_validate_pos] ( identifier[v] ( identifier[gtf] )) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[landmarks] . identifier[items] ()}
identifier[_logger] . identifier[info] ( literal[string] )
keyword[return] identifier[r] | def extract_landmarks(gtf, landmarks=ALL_LANDMARKS):
"""Given an gene annotation GFF/GTF file,
# Arguments
gtf: File path or a loaded `pd.DataFrame` with columns:
seqname, feature, start, end, strand
landmarks: list or a dictionary of landmark extractors (function or name)
# Note
When landmark extractor names are used, they have to be implemented in
the module `concise.preprocessing.position`
# Returns
Dictionary of pd.DataFrames with landmark positions
(columns: seqname, position, strand)
"""
if isinstance(gtf, str):
_logger.info('Reading gtf file..')
gtf = read_gtf(gtf)
_logger.info('Done') # depends on [control=['if'], data=[]]
_logger.info('Running landmark extractors..')
# landmarks to a dictionary with a function
assert isinstance(landmarks, (list, tuple, set, dict))
if isinstance(landmarks, dict):
landmarks = {k: _get_fun(v) for (k, v) in landmarks.items()} # depends on [control=['if'], data=[]]
else:
landmarks = {_to_string(fn_str): _get_fun(fn_str) for fn_str in landmarks}
r = {k: _validate_pos(v(gtf)) for (k, v) in landmarks.items()}
_logger.info('Done!')
return r |
def smooth(self, smoothing_factor):
"""
return a new time series which is a exponential smoothed version of the original data series.
soomth forward once, backward once, and then take the average.
:param float smoothing_factor: smoothing factor
:return: :class:`TimeSeries` object.
"""
forward_smooth = {}
backward_smooth = {}
output = {}
if self:
pre = self.values[0]
next = self.values[-1]
for key, value in self.items():
forward_smooth[key] = smoothing_factor * pre + (1 - smoothing_factor) * value
pre = forward_smooth[key]
for key, value in reversed(self.items()):
backward_smooth[key] = smoothing_factor * next + (1 - smoothing_factor) * value
next = backward_smooth[key]
for key in forward_smooth.keys():
output[key] = (forward_smooth[key] + backward_smooth[key]) / 2
return TimeSeries(output) | def function[smooth, parameter[self, smoothing_factor]]:
constant[
return a new time series which is a exponential smoothed version of the original data series.
soomth forward once, backward once, and then take the average.
:param float smoothing_factor: smoothing factor
:return: :class:`TimeSeries` object.
]
variable[forward_smooth] assign[=] dictionary[[], []]
variable[backward_smooth] assign[=] dictionary[[], []]
variable[output] assign[=] dictionary[[], []]
if name[self] begin[:]
variable[pre] assign[=] call[name[self].values][constant[0]]
variable[next] assign[=] call[name[self].values][<ast.UnaryOp object at 0x7da2054a6cb0>]
for taget[tuple[[<ast.Name object at 0x7da2054a6500>, <ast.Name object at 0x7da2054a4310>]]] in starred[call[name[self].items, parameter[]]] begin[:]
call[name[forward_smooth]][name[key]] assign[=] binary_operation[binary_operation[name[smoothing_factor] * name[pre]] + binary_operation[binary_operation[constant[1] - name[smoothing_factor]] * name[value]]]
variable[pre] assign[=] call[name[forward_smooth]][name[key]]
for taget[tuple[[<ast.Name object at 0x7da2054a4e80>, <ast.Name object at 0x7da2054a45b0>]]] in starred[call[name[reversed], parameter[call[name[self].items, parameter[]]]]] begin[:]
call[name[backward_smooth]][name[key]] assign[=] binary_operation[binary_operation[name[smoothing_factor] * name[next]] + binary_operation[binary_operation[constant[1] - name[smoothing_factor]] * name[value]]]
variable[next] assign[=] call[name[backward_smooth]][name[key]]
for taget[name[key]] in starred[call[name[forward_smooth].keys, parameter[]]] begin[:]
call[name[output]][name[key]] assign[=] binary_operation[binary_operation[call[name[forward_smooth]][name[key]] + call[name[backward_smooth]][name[key]]] / constant[2]]
return[call[name[TimeSeries], parameter[name[output]]]] | keyword[def] identifier[smooth] ( identifier[self] , identifier[smoothing_factor] ):
literal[string]
identifier[forward_smooth] ={}
identifier[backward_smooth] ={}
identifier[output] ={}
keyword[if] identifier[self] :
identifier[pre] = identifier[self] . identifier[values] [ literal[int] ]
identifier[next] = identifier[self] . identifier[values] [- literal[int] ]
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[self] . identifier[items] ():
identifier[forward_smooth] [ identifier[key] ]= identifier[smoothing_factor] * identifier[pre] +( literal[int] - identifier[smoothing_factor] )* identifier[value]
identifier[pre] = identifier[forward_smooth] [ identifier[key] ]
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[reversed] ( identifier[self] . identifier[items] ()):
identifier[backward_smooth] [ identifier[key] ]= identifier[smoothing_factor] * identifier[next] +( literal[int] - identifier[smoothing_factor] )* identifier[value]
identifier[next] = identifier[backward_smooth] [ identifier[key] ]
keyword[for] identifier[key] keyword[in] identifier[forward_smooth] . identifier[keys] ():
identifier[output] [ identifier[key] ]=( identifier[forward_smooth] [ identifier[key] ]+ identifier[backward_smooth] [ identifier[key] ])/ literal[int]
keyword[return] identifier[TimeSeries] ( identifier[output] ) | def smooth(self, smoothing_factor):
"""
return a new time series which is a exponential smoothed version of the original data series.
soomth forward once, backward once, and then take the average.
:param float smoothing_factor: smoothing factor
:return: :class:`TimeSeries` object.
"""
forward_smooth = {}
backward_smooth = {}
output = {}
if self:
pre = self.values[0]
next = self.values[-1]
for (key, value) in self.items():
forward_smooth[key] = smoothing_factor * pre + (1 - smoothing_factor) * value
pre = forward_smooth[key] # depends on [control=['for'], data=[]]
for (key, value) in reversed(self.items()):
backward_smooth[key] = smoothing_factor * next + (1 - smoothing_factor) * value
next = backward_smooth[key] # depends on [control=['for'], data=[]]
for key in forward_smooth.keys():
output[key] = (forward_smooth[key] + backward_smooth[key]) / 2 # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=[]]
return TimeSeries(output) |
def subplots(scale_x=None, scale_y=None, scale=None, **kwargs):
r'''
Run ``matplotlib.pyplot.subplots`` with ``figsize`` set to the correct multiple of the default.
:additional options:
**scale, scale_x, scale_y** (``<float>``)
Scale the figure-size (along one of the dimensions).
'''
if 'figsize' in kwargs: return plt.subplots(**kwargs)
width, height = mpl.rcParams['figure.figsize']
if scale is not None:
width *= scale
height *= scale
if scale_x is not None:
width *= scale_x
if scale_y is not None:
height *= scale_y
nrows = kwargs.pop('nrows', 1)
ncols = kwargs.pop('ncols', 1)
width = ncols * width
height = nrows * height
return plt.subplots(nrows=nrows, ncols=ncols, figsize=(width,height), **kwargs) | def function[subplots, parameter[scale_x, scale_y, scale]]:
constant[
Run ``matplotlib.pyplot.subplots`` with ``figsize`` set to the correct multiple of the default.
:additional options:
**scale, scale_x, scale_y** (``<float>``)
Scale the figure-size (along one of the dimensions).
]
if compare[constant[figsize] in name[kwargs]] begin[:]
return[call[name[plt].subplots, parameter[]]]
<ast.Tuple object at 0x7da1b1fabaf0> assign[=] call[name[mpl].rcParams][constant[figure.figsize]]
if compare[name[scale] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b1fa9000>
<ast.AugAssign object at 0x7da1b1fab310>
if compare[name[scale_x] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b1fa8d00>
if compare[name[scale_y] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b1fa9ed0>
variable[nrows] assign[=] call[name[kwargs].pop, parameter[constant[nrows], constant[1]]]
variable[ncols] assign[=] call[name[kwargs].pop, parameter[constant[ncols], constant[1]]]
variable[width] assign[=] binary_operation[name[ncols] * name[width]]
variable[height] assign[=] binary_operation[name[nrows] * name[height]]
return[call[name[plt].subplots, parameter[]]] | keyword[def] identifier[subplots] ( identifier[scale_x] = keyword[None] , identifier[scale_y] = keyword[None] , identifier[scale] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[kwargs] : keyword[return] identifier[plt] . identifier[subplots] (** identifier[kwargs] )
identifier[width] , identifier[height] = identifier[mpl] . identifier[rcParams] [ literal[string] ]
keyword[if] identifier[scale] keyword[is] keyword[not] keyword[None] :
identifier[width] *= identifier[scale]
identifier[height] *= identifier[scale]
keyword[if] identifier[scale_x] keyword[is] keyword[not] keyword[None] :
identifier[width] *= identifier[scale_x]
keyword[if] identifier[scale_y] keyword[is] keyword[not] keyword[None] :
identifier[height] *= identifier[scale_y]
identifier[nrows] = identifier[kwargs] . identifier[pop] ( literal[string] , literal[int] )
identifier[ncols] = identifier[kwargs] . identifier[pop] ( literal[string] , literal[int] )
identifier[width] = identifier[ncols] * identifier[width]
identifier[height] = identifier[nrows] * identifier[height]
keyword[return] identifier[plt] . identifier[subplots] ( identifier[nrows] = identifier[nrows] , identifier[ncols] = identifier[ncols] , identifier[figsize] =( identifier[width] , identifier[height] ),** identifier[kwargs] ) | def subplots(scale_x=None, scale_y=None, scale=None, **kwargs):
"""
Run ``matplotlib.pyplot.subplots`` with ``figsize`` set to the correct multiple of the default.
:additional options:
**scale, scale_x, scale_y** (``<float>``)
Scale the figure-size (along one of the dimensions).
"""
if 'figsize' in kwargs:
return plt.subplots(**kwargs) # depends on [control=['if'], data=['kwargs']]
(width, height) = mpl.rcParams['figure.figsize']
if scale is not None:
width *= scale
height *= scale # depends on [control=['if'], data=['scale']]
if scale_x is not None:
width *= scale_x # depends on [control=['if'], data=['scale_x']]
if scale_y is not None:
height *= scale_y # depends on [control=['if'], data=['scale_y']]
nrows = kwargs.pop('nrows', 1)
ncols = kwargs.pop('ncols', 1)
width = ncols * width
height = nrows * height
return plt.subplots(nrows=nrows, ncols=ncols, figsize=(width, height), **kwargs) |
def _get_udf_entry(self, udf_path):
# type: (str) -> udfmod.UDFFileEntry
'''
Internal method to get the UDF File Entry for a particular path.
Parameters:
udf_path - The path on the UDF filesystem to look up the record for.
Returns:
A udfmod.UDFFileEntry object representing the path.
'''
if self._needs_reshuffle:
self._reshuffle_extents()
(ident_unused, rec) = self._find_udf_record(utils.normpath(udf_path))
if rec is None:
raise pycdlibexception.PyCdlibInvalidInput('Cannot get entry for empty UDF File Entry')
return rec | def function[_get_udf_entry, parameter[self, udf_path]]:
constant[
Internal method to get the UDF File Entry for a particular path.
Parameters:
udf_path - The path on the UDF filesystem to look up the record for.
Returns:
A udfmod.UDFFileEntry object representing the path.
]
if name[self]._needs_reshuffle begin[:]
call[name[self]._reshuffle_extents, parameter[]]
<ast.Tuple object at 0x7da1b0de1780> assign[=] call[name[self]._find_udf_record, parameter[call[name[utils].normpath, parameter[name[udf_path]]]]]
if compare[name[rec] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b0de0760>
return[name[rec]] | keyword[def] identifier[_get_udf_entry] ( identifier[self] , identifier[udf_path] ):
literal[string]
keyword[if] identifier[self] . identifier[_needs_reshuffle] :
identifier[self] . identifier[_reshuffle_extents] ()
( identifier[ident_unused] , identifier[rec] )= identifier[self] . identifier[_find_udf_record] ( identifier[utils] . identifier[normpath] ( identifier[udf_path] ))
keyword[if] identifier[rec] keyword[is] keyword[None] :
keyword[raise] identifier[pycdlibexception] . identifier[PyCdlibInvalidInput] ( literal[string] )
keyword[return] identifier[rec] | def _get_udf_entry(self, udf_path):
# type: (str) -> udfmod.UDFFileEntry
'\n Internal method to get the UDF File Entry for a particular path.\n\n Parameters:\n udf_path - The path on the UDF filesystem to look up the record for.\n Returns:\n A udfmod.UDFFileEntry object representing the path.\n '
if self._needs_reshuffle:
self._reshuffle_extents() # depends on [control=['if'], data=[]]
(ident_unused, rec) = self._find_udf_record(utils.normpath(udf_path))
if rec is None:
raise pycdlibexception.PyCdlibInvalidInput('Cannot get entry for empty UDF File Entry') # depends on [control=['if'], data=[]]
return rec |
def evalrepr(self):
"""Evaluable repr"""
args = [repr(arg) for arg in get_interfaces(self.argvalues)]
param = ", ".join(args)
return "%s(%s)" % (self.parent.evalrepr, param) | def function[evalrepr, parameter[self]]:
constant[Evaluable repr]
variable[args] assign[=] <ast.ListComp object at 0x7da1afe50880>
variable[param] assign[=] call[constant[, ].join, parameter[name[args]]]
return[binary_operation[constant[%s(%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1afe50c10>, <ast.Name object at 0x7da1afe53c70>]]]] | keyword[def] identifier[evalrepr] ( identifier[self] ):
literal[string]
identifier[args] =[ identifier[repr] ( identifier[arg] ) keyword[for] identifier[arg] keyword[in] identifier[get_interfaces] ( identifier[self] . identifier[argvalues] )]
identifier[param] = literal[string] . identifier[join] ( identifier[args] )
keyword[return] literal[string] %( identifier[self] . identifier[parent] . identifier[evalrepr] , identifier[param] ) | def evalrepr(self):
"""Evaluable repr"""
args = [repr(arg) for arg in get_interfaces(self.argvalues)]
param = ', '.join(args)
return '%s(%s)' % (self.parent.evalrepr, param) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.