code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def refresh_all(force=False):
"""Remove/close all libraries and state machines and reloads them freshly from the file system
:param bool force: Force flag to avoid any checks
"""
state_machines_editor_ctrl = rafcon.gui.singleton.main_window_controller.get_controller('state_machines_editor_ctrl')
states_editor_ctrl = rafcon.gui.singleton.main_window_controller.get_controller('states_editor_ctrl')
if force:
pass # no checks direct refresh
else:
# check if a state machine is still running
if not is_state_machine_stopped_to_proceed(root_window=states_editor_ctrl.get_root_window()):
return
# check if the a dirty flag is still set
all_tabs = list(states_editor_ctrl.tabs.values())
all_tabs.extend(states_editor_ctrl.closed_tabs.values())
dirty_source_editor = [tab_dict['controller'] for tab_dict in all_tabs if
tab_dict['source_code_view_is_dirty'] is True]
if state_machine_manager.has_dirty_state_machine() or dirty_source_editor:
message_string = "Are you sure you want to reload the libraries and all state machines?\n\n" \
"The following elements have been modified and not saved. " \
"These changes will get lost:"
for sm_id, sm in state_machine_manager.state_machines.items():
if sm.marked_dirty:
message_string = "%s\n* State machine #%s and name '%s'" % (
message_string, str(sm_id), sm.root_state.name)
for ctrl in dirty_source_editor:
message_string = "%s\n* Source code of state with name '%s' and path '%s'" % (
message_string, ctrl.model.state.name, ctrl.model.state.get_path())
dialog = RAFCONButtonDialog(message_string, ["Reload anyway", "Cancel"],
message_type=Gtk.MessageType.WARNING, parent=states_editor_ctrl.get_root_window())
response_id = dialog.run()
dialog.destroy()
if response_id == 1: # Reload anyway
pass
else:
logger.debug("Refresh canceled")
return
library_manager.clean_loaded_libraries()
refresh_libraries()
states_editor_ctrl.close_all_pages()
state_machines_editor_ctrl.refresh_all_state_machines() | def function[refresh_all, parameter[force]]:
constant[Remove/close all libraries and state machines and reloads them freshly from the file system
:param bool force: Force flag to avoid any checks
]
variable[state_machines_editor_ctrl] assign[=] call[name[rafcon].gui.singleton.main_window_controller.get_controller, parameter[constant[state_machines_editor_ctrl]]]
variable[states_editor_ctrl] assign[=] call[name[rafcon].gui.singleton.main_window_controller.get_controller, parameter[constant[states_editor_ctrl]]]
if name[force] begin[:]
pass
call[name[library_manager].clean_loaded_libraries, parameter[]]
call[name[refresh_libraries], parameter[]]
call[name[states_editor_ctrl].close_all_pages, parameter[]]
call[name[state_machines_editor_ctrl].refresh_all_state_machines, parameter[]] | keyword[def] identifier[refresh_all] ( identifier[force] = keyword[False] ):
literal[string]
identifier[state_machines_editor_ctrl] = identifier[rafcon] . identifier[gui] . identifier[singleton] . identifier[main_window_controller] . identifier[get_controller] ( literal[string] )
identifier[states_editor_ctrl] = identifier[rafcon] . identifier[gui] . identifier[singleton] . identifier[main_window_controller] . identifier[get_controller] ( literal[string] )
keyword[if] identifier[force] :
keyword[pass]
keyword[else] :
keyword[if] keyword[not] identifier[is_state_machine_stopped_to_proceed] ( identifier[root_window] = identifier[states_editor_ctrl] . identifier[get_root_window] ()):
keyword[return]
identifier[all_tabs] = identifier[list] ( identifier[states_editor_ctrl] . identifier[tabs] . identifier[values] ())
identifier[all_tabs] . identifier[extend] ( identifier[states_editor_ctrl] . identifier[closed_tabs] . identifier[values] ())
identifier[dirty_source_editor] =[ identifier[tab_dict] [ literal[string] ] keyword[for] identifier[tab_dict] keyword[in] identifier[all_tabs] keyword[if]
identifier[tab_dict] [ literal[string] ] keyword[is] keyword[True] ]
keyword[if] identifier[state_machine_manager] . identifier[has_dirty_state_machine] () keyword[or] identifier[dirty_source_editor] :
identifier[message_string] = literal[string] literal[string] literal[string]
keyword[for] identifier[sm_id] , identifier[sm] keyword[in] identifier[state_machine_manager] . identifier[state_machines] . identifier[items] ():
keyword[if] identifier[sm] . identifier[marked_dirty] :
identifier[message_string] = literal[string] %(
identifier[message_string] , identifier[str] ( identifier[sm_id] ), identifier[sm] . identifier[root_state] . identifier[name] )
keyword[for] identifier[ctrl] keyword[in] identifier[dirty_source_editor] :
identifier[message_string] = literal[string] %(
identifier[message_string] , identifier[ctrl] . identifier[model] . identifier[state] . identifier[name] , identifier[ctrl] . identifier[model] . identifier[state] . identifier[get_path] ())
identifier[dialog] = identifier[RAFCONButtonDialog] ( identifier[message_string] ,[ literal[string] , literal[string] ],
identifier[message_type] = identifier[Gtk] . identifier[MessageType] . identifier[WARNING] , identifier[parent] = identifier[states_editor_ctrl] . identifier[get_root_window] ())
identifier[response_id] = identifier[dialog] . identifier[run] ()
identifier[dialog] . identifier[destroy] ()
keyword[if] identifier[response_id] == literal[int] :
keyword[pass]
keyword[else] :
identifier[logger] . identifier[debug] ( literal[string] )
keyword[return]
identifier[library_manager] . identifier[clean_loaded_libraries] ()
identifier[refresh_libraries] ()
identifier[states_editor_ctrl] . identifier[close_all_pages] ()
identifier[state_machines_editor_ctrl] . identifier[refresh_all_state_machines] () | def refresh_all(force=False):
"""Remove/close all libraries and state machines and reloads them freshly from the file system
:param bool force: Force flag to avoid any checks
"""
state_machines_editor_ctrl = rafcon.gui.singleton.main_window_controller.get_controller('state_machines_editor_ctrl')
states_editor_ctrl = rafcon.gui.singleton.main_window_controller.get_controller('states_editor_ctrl')
if force:
pass # no checks direct refresh # depends on [control=['if'], data=[]]
else:
# check if a state machine is still running
if not is_state_machine_stopped_to_proceed(root_window=states_editor_ctrl.get_root_window()):
return # depends on [control=['if'], data=[]]
# check if the a dirty flag is still set
all_tabs = list(states_editor_ctrl.tabs.values())
all_tabs.extend(states_editor_ctrl.closed_tabs.values())
dirty_source_editor = [tab_dict['controller'] for tab_dict in all_tabs if tab_dict['source_code_view_is_dirty'] is True]
if state_machine_manager.has_dirty_state_machine() or dirty_source_editor:
message_string = 'Are you sure you want to reload the libraries and all state machines?\n\nThe following elements have been modified and not saved. These changes will get lost:'
for (sm_id, sm) in state_machine_manager.state_machines.items():
if sm.marked_dirty:
message_string = "%s\n* State machine #%s and name '%s'" % (message_string, str(sm_id), sm.root_state.name) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
for ctrl in dirty_source_editor:
message_string = "%s\n* Source code of state with name '%s' and path '%s'" % (message_string, ctrl.model.state.name, ctrl.model.state.get_path()) # depends on [control=['for'], data=['ctrl']]
dialog = RAFCONButtonDialog(message_string, ['Reload anyway', 'Cancel'], message_type=Gtk.MessageType.WARNING, parent=states_editor_ctrl.get_root_window())
response_id = dialog.run()
dialog.destroy()
if response_id == 1: # Reload anyway
pass # depends on [control=['if'], data=[]]
else:
logger.debug('Refresh canceled')
return # depends on [control=['if'], data=[]]
library_manager.clean_loaded_libraries()
refresh_libraries()
states_editor_ctrl.close_all_pages()
state_machines_editor_ctrl.refresh_all_state_machines() |
def add_columns(t0, t1):
"""Add columns of table t1 to table t0."""
for colname in t1.colnames:
col = t1.columns[colname]
if colname in t0.columns:
continue
new_col = Column(name=col.name, length=len(t0), dtype=col.dtype) # ,
# shape=col.shape)
t0.add_column(new_col) | def function[add_columns, parameter[t0, t1]]:
constant[Add columns of table t1 to table t0.]
for taget[name[colname]] in starred[name[t1].colnames] begin[:]
variable[col] assign[=] call[name[t1].columns][name[colname]]
if compare[name[colname] in name[t0].columns] begin[:]
continue
variable[new_col] assign[=] call[name[Column], parameter[]]
call[name[t0].add_column, parameter[name[new_col]]] | keyword[def] identifier[add_columns] ( identifier[t0] , identifier[t1] ):
literal[string]
keyword[for] identifier[colname] keyword[in] identifier[t1] . identifier[colnames] :
identifier[col] = identifier[t1] . identifier[columns] [ identifier[colname] ]
keyword[if] identifier[colname] keyword[in] identifier[t0] . identifier[columns] :
keyword[continue]
identifier[new_col] = identifier[Column] ( identifier[name] = identifier[col] . identifier[name] , identifier[length] = identifier[len] ( identifier[t0] ), identifier[dtype] = identifier[col] . identifier[dtype] )
identifier[t0] . identifier[add_column] ( identifier[new_col] ) | def add_columns(t0, t1):
"""Add columns of table t1 to table t0."""
for colname in t1.colnames:
col = t1.columns[colname]
if colname in t0.columns:
continue # depends on [control=['if'], data=[]]
new_col = Column(name=col.name, length=len(t0), dtype=col.dtype) # ,
# shape=col.shape)
t0.add_column(new_col) # depends on [control=['for'], data=['colname']] |
def populate_database():
"""Populate the database with some data useful for development."""
if User.fetch_by(username='admin'):
return
# Admin user
admin = User(name='Administrator', password='password',
username='admin', is_admin=True)
# Class
class_ = Class(name='CS32')
Session.add(class_)
Session.flush()
# Project
project = Project(name='Project 1', class_id=class_.id)
Session.add(project)
Session.flush()
# File verification
fv = FileVerifier(filename='test.c', min_size=3, min_lines=1,
project_id=project.id)
Session.add_all([admin, fv])
try:
transaction.commit()
print('Admin user created')
except IntegrityError:
transaction.abort() | def function[populate_database, parameter[]]:
constant[Populate the database with some data useful for development.]
if call[name[User].fetch_by, parameter[]] begin[:]
return[None]
variable[admin] assign[=] call[name[User], parameter[]]
variable[class_] assign[=] call[name[Class], parameter[]]
call[name[Session].add, parameter[name[class_]]]
call[name[Session].flush, parameter[]]
variable[project] assign[=] call[name[Project], parameter[]]
call[name[Session].add, parameter[name[project]]]
call[name[Session].flush, parameter[]]
variable[fv] assign[=] call[name[FileVerifier], parameter[]]
call[name[Session].add_all, parameter[list[[<ast.Name object at 0x7da18dc9b7f0>, <ast.Name object at 0x7da18dc9bd60>]]]]
<ast.Try object at 0x7da18dc9b5b0> | keyword[def] identifier[populate_database] ():
literal[string]
keyword[if] identifier[User] . identifier[fetch_by] ( identifier[username] = literal[string] ):
keyword[return]
identifier[admin] = identifier[User] ( identifier[name] = literal[string] , identifier[password] = literal[string] ,
identifier[username] = literal[string] , identifier[is_admin] = keyword[True] )
identifier[class_] = identifier[Class] ( identifier[name] = literal[string] )
identifier[Session] . identifier[add] ( identifier[class_] )
identifier[Session] . identifier[flush] ()
identifier[project] = identifier[Project] ( identifier[name] = literal[string] , identifier[class_id] = identifier[class_] . identifier[id] )
identifier[Session] . identifier[add] ( identifier[project] )
identifier[Session] . identifier[flush] ()
identifier[fv] = identifier[FileVerifier] ( identifier[filename] = literal[string] , identifier[min_size] = literal[int] , identifier[min_lines] = literal[int] ,
identifier[project_id] = identifier[project] . identifier[id] )
identifier[Session] . identifier[add_all] ([ identifier[admin] , identifier[fv] ])
keyword[try] :
identifier[transaction] . identifier[commit] ()
identifier[print] ( literal[string] )
keyword[except] identifier[IntegrityError] :
identifier[transaction] . identifier[abort] () | def populate_database():
"""Populate the database with some data useful for development."""
if User.fetch_by(username='admin'):
return # depends on [control=['if'], data=[]]
# Admin user
admin = User(name='Administrator', password='password', username='admin', is_admin=True)
# Class
class_ = Class(name='CS32')
Session.add(class_)
Session.flush()
# Project
project = Project(name='Project 1', class_id=class_.id)
Session.add(project)
Session.flush()
# File verification
fv = FileVerifier(filename='test.c', min_size=3, min_lines=1, project_id=project.id)
Session.add_all([admin, fv])
try:
transaction.commit()
print('Admin user created') # depends on [control=['try'], data=[]]
except IntegrityError:
transaction.abort() # depends on [control=['except'], data=[]] |
def _is_qstring(message):
"""Check if its a QString without adding any dep to PyQt5."""
my_class = str(message.__class__)
my_class_name = my_class.replace('<class \'', '').replace('\'>', '')
if my_class_name == 'PyQt5.QtCore.QString':
return True
return False | def function[_is_qstring, parameter[message]]:
constant[Check if its a QString without adding any dep to PyQt5.]
variable[my_class] assign[=] call[name[str], parameter[name[message].__class__]]
variable[my_class_name] assign[=] call[call[name[my_class].replace, parameter[constant[<class '], constant[]]].replace, parameter[constant['>], constant[]]]
if compare[name[my_class_name] equal[==] constant[PyQt5.QtCore.QString]] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[_is_qstring] ( identifier[message] ):
literal[string]
identifier[my_class] = identifier[str] ( identifier[message] . identifier[__class__] )
identifier[my_class_name] = identifier[my_class] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
keyword[if] identifier[my_class_name] == literal[string] :
keyword[return] keyword[True]
keyword[return] keyword[False] | def _is_qstring(message):
"""Check if its a QString without adding any dep to PyQt5."""
my_class = str(message.__class__)
my_class_name = my_class.replace("<class '", '').replace("'>", '')
if my_class_name == 'PyQt5.QtCore.QString':
return True # depends on [control=['if'], data=[]]
return False |
def brier_score(self):
"""
Calculate the Brier Score
"""
reliability, resolution, uncertainty = self.brier_score_components()
return reliability - resolution + uncertainty | def function[brier_score, parameter[self]]:
constant[
Calculate the Brier Score
]
<ast.Tuple object at 0x7da18f8129e0> assign[=] call[name[self].brier_score_components, parameter[]]
return[binary_operation[binary_operation[name[reliability] - name[resolution]] + name[uncertainty]]] | keyword[def] identifier[brier_score] ( identifier[self] ):
literal[string]
identifier[reliability] , identifier[resolution] , identifier[uncertainty] = identifier[self] . identifier[brier_score_components] ()
keyword[return] identifier[reliability] - identifier[resolution] + identifier[uncertainty] | def brier_score(self):
"""
Calculate the Brier Score
"""
(reliability, resolution, uncertainty) = self.brier_score_components()
return reliability - resolution + uncertainty |
def show_instance(name, call=None):
'''
Displays details about a particular Linode VM. Either a name or a linode_id must
be provided.
.. versionadded:: 2015.8.0
name
The name of the VM for which to display details.
CLI Example:
.. code-block:: bash
salt-cloud -a show_instance vm_name
.. note::
The ``image`` label only displays information about the VM's distribution vendor,
such as "Debian" or "RHEL" and does not display the actual image name. This is
due to a limitation of the Linode API.
'''
if call != 'action':
raise SaltCloudException(
'The show_instance action must be called with -a or --action.'
)
node_id = get_linode_id_from_name(name)
node_data = get_linode(kwargs={'linode_id': node_id})
ips = get_ips(node_id)
state = int(node_data['STATUS'])
ret = {'id': node_data['LINODEID'],
'image': node_data['DISTRIBUTIONVENDOR'],
'name': node_data['LABEL'],
'size': node_data['TOTALRAM'],
'state': _get_status_descr_by_id(state),
'private_ips': ips['private_ips'],
'public_ips': ips['public_ips']}
return ret | def function[show_instance, parameter[name, call]]:
constant[
Displays details about a particular Linode VM. Either a name or a linode_id must
be provided.
.. versionadded:: 2015.8.0
name
The name of the VM for which to display details.
CLI Example:
.. code-block:: bash
salt-cloud -a show_instance vm_name
.. note::
The ``image`` label only displays information about the VM's distribution vendor,
such as "Debian" or "RHEL" and does not display the actual image name. This is
due to a limitation of the Linode API.
]
if compare[name[call] not_equal[!=] constant[action]] begin[:]
<ast.Raise object at 0x7da2045662f0>
variable[node_id] assign[=] call[name[get_linode_id_from_name], parameter[name[name]]]
variable[node_data] assign[=] call[name[get_linode], parameter[]]
variable[ips] assign[=] call[name[get_ips], parameter[name[node_id]]]
variable[state] assign[=] call[name[int], parameter[call[name[node_data]][constant[STATUS]]]]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b1f3fb50>, <ast.Constant object at 0x7da1b1f3faf0>, <ast.Constant object at 0x7da1b1f3fc70>, <ast.Constant object at 0x7da1b1f3fb80>, <ast.Constant object at 0x7da1b1f3fcd0>, <ast.Constant object at 0x7da1b1f3f4c0>, <ast.Constant object at 0x7da1b1f3f760>], [<ast.Subscript object at 0x7da1b1f3feb0>, <ast.Subscript object at 0x7da1b1f3fe50>, <ast.Subscript object at 0x7da1b1f3f730>, <ast.Subscript object at 0x7da1b1f3f670>, <ast.Call object at 0x7da1b1f3fbb0>, <ast.Subscript object at 0x7da1b1f3f5b0>, <ast.Subscript object at 0x7da1b1f3f820>]]
return[name[ret]] | keyword[def] identifier[show_instance] ( identifier[name] , identifier[call] = keyword[None] ):
literal[string]
keyword[if] identifier[call] != literal[string] :
keyword[raise] identifier[SaltCloudException] (
literal[string]
)
identifier[node_id] = identifier[get_linode_id_from_name] ( identifier[name] )
identifier[node_data] = identifier[get_linode] ( identifier[kwargs] ={ literal[string] : identifier[node_id] })
identifier[ips] = identifier[get_ips] ( identifier[node_id] )
identifier[state] = identifier[int] ( identifier[node_data] [ literal[string] ])
identifier[ret] ={ literal[string] : identifier[node_data] [ literal[string] ],
literal[string] : identifier[node_data] [ literal[string] ],
literal[string] : identifier[node_data] [ literal[string] ],
literal[string] : identifier[node_data] [ literal[string] ],
literal[string] : identifier[_get_status_descr_by_id] ( identifier[state] ),
literal[string] : identifier[ips] [ literal[string] ],
literal[string] : identifier[ips] [ literal[string] ]}
keyword[return] identifier[ret] | def show_instance(name, call=None):
"""
Displays details about a particular Linode VM. Either a name or a linode_id must
be provided.
.. versionadded:: 2015.8.0
name
The name of the VM for which to display details.
CLI Example:
.. code-block:: bash
salt-cloud -a show_instance vm_name
.. note::
The ``image`` label only displays information about the VM's distribution vendor,
such as "Debian" or "RHEL" and does not display the actual image name. This is
due to a limitation of the Linode API.
"""
if call != 'action':
raise SaltCloudException('The show_instance action must be called with -a or --action.') # depends on [control=['if'], data=[]]
node_id = get_linode_id_from_name(name)
node_data = get_linode(kwargs={'linode_id': node_id})
ips = get_ips(node_id)
state = int(node_data['STATUS'])
ret = {'id': node_data['LINODEID'], 'image': node_data['DISTRIBUTIONVENDOR'], 'name': node_data['LABEL'], 'size': node_data['TOTALRAM'], 'state': _get_status_descr_by_id(state), 'private_ips': ips['private_ips'], 'public_ips': ips['public_ips']}
return ret |
def process_teffs(self, teffs, coords, s=np.array([0., 0., 1.]), t=None):
"""
Change the local effective temperatures for any values within the
"cone" defined by the spot. Any teff within the spot will have its
current value multiplied by the "relteff" factor
:parameter array teffs: array of teffs for computations
:parameter array coords: array of coords for computations
:t float: current time
"""
if t is None:
# then assume at t0
t = self._t0
pointing_vector = self.pointing_vector(s,t)
logger.debug("spot.process_teffs at t={} with pointing_vector={} and radius={}".format(t, pointing_vector, self._radius))
cos_alpha_coords = np.dot(coords, pointing_vector) / np.linalg.norm(coords, axis=1)
cos_alpha_spot = np.cos(self._radius)
filter_ = cos_alpha_coords > cos_alpha_spot
teffs[filter_] = teffs[filter_] * self._relteff
return teffs | def function[process_teffs, parameter[self, teffs, coords, s, t]]:
constant[
Change the local effective temperatures for any values within the
"cone" defined by the spot. Any teff within the spot will have its
current value multiplied by the "relteff" factor
:parameter array teffs: array of teffs for computations
:parameter array coords: array of coords for computations
:t float: current time
]
if compare[name[t] is constant[None]] begin[:]
variable[t] assign[=] name[self]._t0
variable[pointing_vector] assign[=] call[name[self].pointing_vector, parameter[name[s], name[t]]]
call[name[logger].debug, parameter[call[constant[spot.process_teffs at t={} with pointing_vector={} and radius={}].format, parameter[name[t], name[pointing_vector], name[self]._radius]]]]
variable[cos_alpha_coords] assign[=] binary_operation[call[name[np].dot, parameter[name[coords], name[pointing_vector]]] / call[name[np].linalg.norm, parameter[name[coords]]]]
variable[cos_alpha_spot] assign[=] call[name[np].cos, parameter[name[self]._radius]]
variable[filter_] assign[=] compare[name[cos_alpha_coords] greater[>] name[cos_alpha_spot]]
call[name[teffs]][name[filter_]] assign[=] binary_operation[call[name[teffs]][name[filter_]] * name[self]._relteff]
return[name[teffs]] | keyword[def] identifier[process_teffs] ( identifier[self] , identifier[teffs] , identifier[coords] , identifier[s] = identifier[np] . identifier[array] ([ literal[int] , literal[int] , literal[int] ]), identifier[t] = keyword[None] ):
literal[string]
keyword[if] identifier[t] keyword[is] keyword[None] :
identifier[t] = identifier[self] . identifier[_t0]
identifier[pointing_vector] = identifier[self] . identifier[pointing_vector] ( identifier[s] , identifier[t] )
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[t] , identifier[pointing_vector] , identifier[self] . identifier[_radius] ))
identifier[cos_alpha_coords] = identifier[np] . identifier[dot] ( identifier[coords] , identifier[pointing_vector] )/ identifier[np] . identifier[linalg] . identifier[norm] ( identifier[coords] , identifier[axis] = literal[int] )
identifier[cos_alpha_spot] = identifier[np] . identifier[cos] ( identifier[self] . identifier[_radius] )
identifier[filter_] = identifier[cos_alpha_coords] > identifier[cos_alpha_spot]
identifier[teffs] [ identifier[filter_] ]= identifier[teffs] [ identifier[filter_] ]* identifier[self] . identifier[_relteff]
keyword[return] identifier[teffs] | def process_teffs(self, teffs, coords, s=np.array([0.0, 0.0, 1.0]), t=None):
"""
Change the local effective temperatures for any values within the
"cone" defined by the spot. Any teff within the spot will have its
current value multiplied by the "relteff" factor
:parameter array teffs: array of teffs for computations
:parameter array coords: array of coords for computations
:t float: current time
"""
if t is None:
# then assume at t0
t = self._t0 # depends on [control=['if'], data=['t']]
pointing_vector = self.pointing_vector(s, t)
logger.debug('spot.process_teffs at t={} with pointing_vector={} and radius={}'.format(t, pointing_vector, self._radius))
cos_alpha_coords = np.dot(coords, pointing_vector) / np.linalg.norm(coords, axis=1)
cos_alpha_spot = np.cos(self._radius)
filter_ = cos_alpha_coords > cos_alpha_spot
teffs[filter_] = teffs[filter_] * self._relteff
return teffs |
def delete(self):
"""Delete this document and any counterpart document"""
with self.draft_context():
draft = self.one(Q._uid == self._uid)
if draft:
super(PublisherFrame, draft).delete()
with self.published_context():
published = self.one(Q._uid == self._uid)
if published:
super(PublisherFrame, published).delete() | def function[delete, parameter[self]]:
constant[Delete this document and any counterpart document]
with call[name[self].draft_context, parameter[]] begin[:]
variable[draft] assign[=] call[name[self].one, parameter[compare[name[Q]._uid equal[==] name[self]._uid]]]
if name[draft] begin[:]
call[call[name[super], parameter[name[PublisherFrame], name[draft]]].delete, parameter[]]
with call[name[self].published_context, parameter[]] begin[:]
variable[published] assign[=] call[name[self].one, parameter[compare[name[Q]._uid equal[==] name[self]._uid]]]
if name[published] begin[:]
call[call[name[super], parameter[name[PublisherFrame], name[published]]].delete, parameter[]] | keyword[def] identifier[delete] ( identifier[self] ):
literal[string]
keyword[with] identifier[self] . identifier[draft_context] ():
identifier[draft] = identifier[self] . identifier[one] ( identifier[Q] . identifier[_uid] == identifier[self] . identifier[_uid] )
keyword[if] identifier[draft] :
identifier[super] ( identifier[PublisherFrame] , identifier[draft] ). identifier[delete] ()
keyword[with] identifier[self] . identifier[published_context] ():
identifier[published] = identifier[self] . identifier[one] ( identifier[Q] . identifier[_uid] == identifier[self] . identifier[_uid] )
keyword[if] identifier[published] :
identifier[super] ( identifier[PublisherFrame] , identifier[published] ). identifier[delete] () | def delete(self):
"""Delete this document and any counterpart document"""
with self.draft_context():
draft = self.one(Q._uid == self._uid)
if draft:
super(PublisherFrame, draft).delete() # depends on [control=['if'], data=[]] # depends on [control=['with'], data=[]]
with self.published_context():
published = self.one(Q._uid == self._uid)
if published:
super(PublisherFrame, published).delete() # depends on [control=['if'], data=[]] # depends on [control=['with'], data=[]] |
def configure_deletefor(self, ns, definition):
"""
Register a delete-for relation endpoint.
The definition's func should be a delete function, which must:
- accept kwargs for path data
- return truthy/falsey
:param ns: the namespace
:param definition: the endpoint definition
"""
@self.add_route(ns.relation_path, Operation.DeleteFor, ns)
@wraps(definition.func)
def delete(**path_data):
headers = dict()
response_data = dict()
require_response_data(definition.func(**path_data))
definition.header_func(headers, response_data)
response_format = self.negotiate_response_content(definition.response_formats)
return dump_response_data(
"",
None,
status_code=Operation.DeleteFor.value.default_code,
headers=headers,
response_format=response_format,
)
delete.__doc__ = "Delete a {} relative to a {}".format(pluralize(ns.object_name), ns.subject_name) | def function[configure_deletefor, parameter[self, ns, definition]]:
constant[
Register a delete-for relation endpoint.
The definition's func should be a delete function, which must:
- accept kwargs for path data
- return truthy/falsey
:param ns: the namespace
:param definition: the endpoint definition
]
def function[delete, parameter[]]:
variable[headers] assign[=] call[name[dict], parameter[]]
variable[response_data] assign[=] call[name[dict], parameter[]]
call[name[require_response_data], parameter[call[name[definition].func, parameter[]]]]
call[name[definition].header_func, parameter[name[headers], name[response_data]]]
variable[response_format] assign[=] call[name[self].negotiate_response_content, parameter[name[definition].response_formats]]
return[call[name[dump_response_data], parameter[constant[], constant[None]]]]
name[delete].__doc__ assign[=] call[constant[Delete a {} relative to a {}].format, parameter[call[name[pluralize], parameter[name[ns].object_name]], name[ns].subject_name]] | keyword[def] identifier[configure_deletefor] ( identifier[self] , identifier[ns] , identifier[definition] ):
literal[string]
@ identifier[self] . identifier[add_route] ( identifier[ns] . identifier[relation_path] , identifier[Operation] . identifier[DeleteFor] , identifier[ns] )
@ identifier[wraps] ( identifier[definition] . identifier[func] )
keyword[def] identifier[delete] (** identifier[path_data] ):
identifier[headers] = identifier[dict] ()
identifier[response_data] = identifier[dict] ()
identifier[require_response_data] ( identifier[definition] . identifier[func] (** identifier[path_data] ))
identifier[definition] . identifier[header_func] ( identifier[headers] , identifier[response_data] )
identifier[response_format] = identifier[self] . identifier[negotiate_response_content] ( identifier[definition] . identifier[response_formats] )
keyword[return] identifier[dump_response_data] (
literal[string] ,
keyword[None] ,
identifier[status_code] = identifier[Operation] . identifier[DeleteFor] . identifier[value] . identifier[default_code] ,
identifier[headers] = identifier[headers] ,
identifier[response_format] = identifier[response_format] ,
)
identifier[delete] . identifier[__doc__] = literal[string] . identifier[format] ( identifier[pluralize] ( identifier[ns] . identifier[object_name] ), identifier[ns] . identifier[subject_name] ) | def configure_deletefor(self, ns, definition):
"""
Register a delete-for relation endpoint.
The definition's func should be a delete function, which must:
- accept kwargs for path data
- return truthy/falsey
:param ns: the namespace
:param definition: the endpoint definition
"""
@self.add_route(ns.relation_path, Operation.DeleteFor, ns)
@wraps(definition.func)
def delete(**path_data):
headers = dict()
response_data = dict()
require_response_data(definition.func(**path_data))
definition.header_func(headers, response_data)
response_format = self.negotiate_response_content(definition.response_formats)
return dump_response_data('', None, status_code=Operation.DeleteFor.value.default_code, headers=headers, response_format=response_format)
delete.__doc__ = 'Delete a {} relative to a {}'.format(pluralize(ns.object_name), ns.subject_name) |
def loadPCD(filename, c="gold", alpha=1):
"""Return ``vtkActor`` from `Point Cloud` file format. Return an ``Actor(vtkActor)`` object."""
if not os.path.exists(filename):
colors.printc("~noentry Error in loadPCD: Cannot find file", filename, c=1)
return None
f = open(filename, "r")
lines = f.readlines()
f.close()
start = False
pts = []
N, expN = 0, 0
for text in lines:
if start:
if N >= expN:
break
l = text.split()
pts.append([float(l[0]), float(l[1]), float(l[2])])
N += 1
if not start and "POINTS" in text:
expN = int(text.split()[1])
if not start and "DATA ascii" in text:
start = True
if expN != N:
colors.printc("~!? Mismatch in pcd file", expN, len(pts), c="red")
src = vtk.vtkPointSource()
src.SetNumberOfPoints(len(pts))
src.Update()
poly = src.GetOutput()
for i, p in enumerate(pts):
poly.GetPoints().SetPoint(i, p)
if not poly:
colors.printc("~noentry Unable to load", filename, c="red")
return False
actor = Actor(poly, colors.getColor(c), alpha)
actor.GetProperty().SetPointSize(4)
return actor | def function[loadPCD, parameter[filename, c, alpha]]:
constant[Return ``vtkActor`` from `Point Cloud` file format. Return an ``Actor(vtkActor)`` object.]
if <ast.UnaryOp object at 0x7da2044c3ac0> begin[:]
call[name[colors].printc, parameter[constant[~noentry Error in loadPCD: Cannot find file], name[filename]]]
return[constant[None]]
variable[f] assign[=] call[name[open], parameter[name[filename], constant[r]]]
variable[lines] assign[=] call[name[f].readlines, parameter[]]
call[name[f].close, parameter[]]
variable[start] assign[=] constant[False]
variable[pts] assign[=] list[[]]
<ast.Tuple object at 0x7da2044c2170> assign[=] tuple[[<ast.Constant object at 0x7da2044c1e10>, <ast.Constant object at 0x7da2044c2da0>]]
for taget[name[text]] in starred[name[lines]] begin[:]
if name[start] begin[:]
if compare[name[N] greater_or_equal[>=] name[expN]] begin[:]
break
variable[l] assign[=] call[name[text].split, parameter[]]
call[name[pts].append, parameter[list[[<ast.Call object at 0x7da2044c2d40>, <ast.Call object at 0x7da2044c0be0>, <ast.Call object at 0x7da2044c0e50>]]]]
<ast.AugAssign object at 0x7da2044c21d0>
if <ast.BoolOp object at 0x7da2044c3430> begin[:]
variable[expN] assign[=] call[name[int], parameter[call[call[name[text].split, parameter[]]][constant[1]]]]
if <ast.BoolOp object at 0x7da2044c2e60> begin[:]
variable[start] assign[=] constant[True]
if compare[name[expN] not_equal[!=] name[N]] begin[:]
call[name[colors].printc, parameter[constant[~!? Mismatch in pcd file], name[expN], call[name[len], parameter[name[pts]]]]]
variable[src] assign[=] call[name[vtk].vtkPointSource, parameter[]]
call[name[src].SetNumberOfPoints, parameter[call[name[len], parameter[name[pts]]]]]
call[name[src].Update, parameter[]]
variable[poly] assign[=] call[name[src].GetOutput, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da2044c0160>, <ast.Name object at 0x7da2044c2c20>]]] in starred[call[name[enumerate], parameter[name[pts]]]] begin[:]
call[call[name[poly].GetPoints, parameter[]].SetPoint, parameter[name[i], name[p]]]
if <ast.UnaryOp object at 0x7da2044c3fa0> begin[:]
call[name[colors].printc, parameter[constant[~noentry Unable to load], name[filename]]]
return[constant[False]]
variable[actor] assign[=] call[name[Actor], parameter[name[poly], call[name[colors].getColor, parameter[name[c]]], name[alpha]]]
call[call[name[actor].GetProperty, parameter[]].SetPointSize, parameter[constant[4]]]
return[name[actor]] | keyword[def] identifier[loadPCD] ( identifier[filename] , identifier[c] = literal[string] , identifier[alpha] = literal[int] ):
literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[filename] ):
identifier[colors] . identifier[printc] ( literal[string] , identifier[filename] , identifier[c] = literal[int] )
keyword[return] keyword[None]
identifier[f] = identifier[open] ( identifier[filename] , literal[string] )
identifier[lines] = identifier[f] . identifier[readlines] ()
identifier[f] . identifier[close] ()
identifier[start] = keyword[False]
identifier[pts] =[]
identifier[N] , identifier[expN] = literal[int] , literal[int]
keyword[for] identifier[text] keyword[in] identifier[lines] :
keyword[if] identifier[start] :
keyword[if] identifier[N] >= identifier[expN] :
keyword[break]
identifier[l] = identifier[text] . identifier[split] ()
identifier[pts] . identifier[append] ([ identifier[float] ( identifier[l] [ literal[int] ]), identifier[float] ( identifier[l] [ literal[int] ]), identifier[float] ( identifier[l] [ literal[int] ])])
identifier[N] += literal[int]
keyword[if] keyword[not] identifier[start] keyword[and] literal[string] keyword[in] identifier[text] :
identifier[expN] = identifier[int] ( identifier[text] . identifier[split] ()[ literal[int] ])
keyword[if] keyword[not] identifier[start] keyword[and] literal[string] keyword[in] identifier[text] :
identifier[start] = keyword[True]
keyword[if] identifier[expN] != identifier[N] :
identifier[colors] . identifier[printc] ( literal[string] , identifier[expN] , identifier[len] ( identifier[pts] ), identifier[c] = literal[string] )
identifier[src] = identifier[vtk] . identifier[vtkPointSource] ()
identifier[src] . identifier[SetNumberOfPoints] ( identifier[len] ( identifier[pts] ))
identifier[src] . identifier[Update] ()
identifier[poly] = identifier[src] . identifier[GetOutput] ()
keyword[for] identifier[i] , identifier[p] keyword[in] identifier[enumerate] ( identifier[pts] ):
identifier[poly] . identifier[GetPoints] (). identifier[SetPoint] ( identifier[i] , identifier[p] )
keyword[if] keyword[not] identifier[poly] :
identifier[colors] . identifier[printc] ( literal[string] , identifier[filename] , identifier[c] = literal[string] )
keyword[return] keyword[False]
identifier[actor] = identifier[Actor] ( identifier[poly] , identifier[colors] . identifier[getColor] ( identifier[c] ), identifier[alpha] )
identifier[actor] . identifier[GetProperty] (). identifier[SetPointSize] ( literal[int] )
keyword[return] identifier[actor] | def loadPCD(filename, c='gold', alpha=1):
"""Return ``vtkActor`` from `Point Cloud` file format. Return an ``Actor(vtkActor)`` object."""
if not os.path.exists(filename):
colors.printc('~noentry Error in loadPCD: Cannot find file', filename, c=1)
return None # depends on [control=['if'], data=[]]
f = open(filename, 'r')
lines = f.readlines()
f.close()
start = False
pts = []
(N, expN) = (0, 0)
for text in lines:
if start:
if N >= expN:
break # depends on [control=['if'], data=[]]
l = text.split()
pts.append([float(l[0]), float(l[1]), float(l[2])])
N += 1 # depends on [control=['if'], data=[]]
if not start and 'POINTS' in text:
expN = int(text.split()[1]) # depends on [control=['if'], data=[]]
if not start and 'DATA ascii' in text:
start = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['text']]
if expN != N:
colors.printc('~!? Mismatch in pcd file', expN, len(pts), c='red') # depends on [control=['if'], data=['expN']]
src = vtk.vtkPointSource()
src.SetNumberOfPoints(len(pts))
src.Update()
poly = src.GetOutput()
for (i, p) in enumerate(pts):
poly.GetPoints().SetPoint(i, p) # depends on [control=['for'], data=[]]
if not poly:
colors.printc('~noentry Unable to load', filename, c='red')
return False # depends on [control=['if'], data=[]]
actor = Actor(poly, colors.getColor(c), alpha)
actor.GetProperty().SetPointSize(4)
return actor |
def _node_add_without_peer_list(self, node_sum, child_other):
'''_node_add_without_peer_list
Low-level api: Apply delta child_other to node_sum when there is no peer
of child_other can be found under node_sum. child_other is a list node.
Element node_sum will be modified during the process.
Parameters
----------
node_sum : `Element`
A config node in a config tree.
child_other : `Element`
A child of a config node in another config tree. This child has no
peer under node_sum.
Returns
-------
None
There is no return of this method.
'''
s_node = self.device.get_schema_node(child_other)
e = deepcopy(child_other)
scope = node_sum.getchildren()
siblings = self._get_sequence(scope, child_other.tag, node_sum)
if s_node.get('ordered-by') == 'user' and \
child_other.get(insert_tag) is not None:
if child_other.get(insert_tag) == 'first':
if siblings:
siblings[0].addprevious(self._del_attrib(e))
else:
node_sum.append(self._del_attrib(e))
elif child_other.get(insert_tag) == 'last':
if siblings:
siblings[-1].addnext(self._del_attrib(e))
else:
node_sum.append(self._del_attrib(e))
elif child_other.get(insert_tag) == 'before':
if child_other.get(key_tag) is None:
_inserterror('before', self.device.get_xpath(child_other),
'key')
sibling = node_sum.find(child_other.tag +
child_other.get(key_tag),
namespaces=child_other.nsmap)
if sibling is None:
path = self.device.get_xpath(child_other)
key = child_other.get(key_tag)
_inserterror('before', path, 'key', key)
sibling.addprevious(self._del_attrib(e))
elif child_other.get(insert_tag) == 'after':
if child_other.get(key_tag) is None:
_inserterror('after', self.device.get_xpath(child_other),
'key')
sibling = node_sum.find(child_other.tag +
child_other.get(key_tag),
namespaces=child_other.nsmap)
if sibling is None:
path = self.device.get_xpath(child_other)
key = child_other.get(key_tag)
_inserterror('after', path, 'key', key)
sibling.addnext(self._del_attrib(e))
else:
if siblings:
siblings[-1].addnext(self._del_attrib(e))
else:
node_sum.append(self._del_attrib(e)) | def function[_node_add_without_peer_list, parameter[self, node_sum, child_other]]:
constant[_node_add_without_peer_list
Low-level api: Apply delta child_other to node_sum when there is no peer
of child_other can be found under node_sum. child_other is a list node.
Element node_sum will be modified during the process.
Parameters
----------
node_sum : `Element`
A config node in a config tree.
child_other : `Element`
A child of a config node in another config tree. This child has no
peer under node_sum.
Returns
-------
None
There is no return of this method.
]
variable[s_node] assign[=] call[name[self].device.get_schema_node, parameter[name[child_other]]]
variable[e] assign[=] call[name[deepcopy], parameter[name[child_other]]]
variable[scope] assign[=] call[name[node_sum].getchildren, parameter[]]
variable[siblings] assign[=] call[name[self]._get_sequence, parameter[name[scope], name[child_other].tag, name[node_sum]]]
if <ast.BoolOp object at 0x7da1b2650910> begin[:]
if compare[call[name[child_other].get, parameter[name[insert_tag]]] equal[==] constant[first]] begin[:]
if name[siblings] begin[:]
call[call[name[siblings]][constant[0]].addprevious, parameter[call[name[self]._del_attrib, parameter[name[e]]]]] | keyword[def] identifier[_node_add_without_peer_list] ( identifier[self] , identifier[node_sum] , identifier[child_other] ):
literal[string]
identifier[s_node] = identifier[self] . identifier[device] . identifier[get_schema_node] ( identifier[child_other] )
identifier[e] = identifier[deepcopy] ( identifier[child_other] )
identifier[scope] = identifier[node_sum] . identifier[getchildren] ()
identifier[siblings] = identifier[self] . identifier[_get_sequence] ( identifier[scope] , identifier[child_other] . identifier[tag] , identifier[node_sum] )
keyword[if] identifier[s_node] . identifier[get] ( literal[string] )== literal[string] keyword[and] identifier[child_other] . identifier[get] ( identifier[insert_tag] ) keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[child_other] . identifier[get] ( identifier[insert_tag] )== literal[string] :
keyword[if] identifier[siblings] :
identifier[siblings] [ literal[int] ]. identifier[addprevious] ( identifier[self] . identifier[_del_attrib] ( identifier[e] ))
keyword[else] :
identifier[node_sum] . identifier[append] ( identifier[self] . identifier[_del_attrib] ( identifier[e] ))
keyword[elif] identifier[child_other] . identifier[get] ( identifier[insert_tag] )== literal[string] :
keyword[if] identifier[siblings] :
identifier[siblings] [- literal[int] ]. identifier[addnext] ( identifier[self] . identifier[_del_attrib] ( identifier[e] ))
keyword[else] :
identifier[node_sum] . identifier[append] ( identifier[self] . identifier[_del_attrib] ( identifier[e] ))
keyword[elif] identifier[child_other] . identifier[get] ( identifier[insert_tag] )== literal[string] :
keyword[if] identifier[child_other] . identifier[get] ( identifier[key_tag] ) keyword[is] keyword[None] :
identifier[_inserterror] ( literal[string] , identifier[self] . identifier[device] . identifier[get_xpath] ( identifier[child_other] ),
literal[string] )
identifier[sibling] = identifier[node_sum] . identifier[find] ( identifier[child_other] . identifier[tag] +
identifier[child_other] . identifier[get] ( identifier[key_tag] ),
identifier[namespaces] = identifier[child_other] . identifier[nsmap] )
keyword[if] identifier[sibling] keyword[is] keyword[None] :
identifier[path] = identifier[self] . identifier[device] . identifier[get_xpath] ( identifier[child_other] )
identifier[key] = identifier[child_other] . identifier[get] ( identifier[key_tag] )
identifier[_inserterror] ( literal[string] , identifier[path] , literal[string] , identifier[key] )
identifier[sibling] . identifier[addprevious] ( identifier[self] . identifier[_del_attrib] ( identifier[e] ))
keyword[elif] identifier[child_other] . identifier[get] ( identifier[insert_tag] )== literal[string] :
keyword[if] identifier[child_other] . identifier[get] ( identifier[key_tag] ) keyword[is] keyword[None] :
identifier[_inserterror] ( literal[string] , identifier[self] . identifier[device] . identifier[get_xpath] ( identifier[child_other] ),
literal[string] )
identifier[sibling] = identifier[node_sum] . identifier[find] ( identifier[child_other] . identifier[tag] +
identifier[child_other] . identifier[get] ( identifier[key_tag] ),
identifier[namespaces] = identifier[child_other] . identifier[nsmap] )
keyword[if] identifier[sibling] keyword[is] keyword[None] :
identifier[path] = identifier[self] . identifier[device] . identifier[get_xpath] ( identifier[child_other] )
identifier[key] = identifier[child_other] . identifier[get] ( identifier[key_tag] )
identifier[_inserterror] ( literal[string] , identifier[path] , literal[string] , identifier[key] )
identifier[sibling] . identifier[addnext] ( identifier[self] . identifier[_del_attrib] ( identifier[e] ))
keyword[else] :
keyword[if] identifier[siblings] :
identifier[siblings] [- literal[int] ]. identifier[addnext] ( identifier[self] . identifier[_del_attrib] ( identifier[e] ))
keyword[else] :
identifier[node_sum] . identifier[append] ( identifier[self] . identifier[_del_attrib] ( identifier[e] )) | def _node_add_without_peer_list(self, node_sum, child_other):
"""_node_add_without_peer_list
Low-level api: Apply delta child_other to node_sum when there is no peer
of child_other can be found under node_sum. child_other is a list node.
Element node_sum will be modified during the process.
Parameters
----------
node_sum : `Element`
A config node in a config tree.
child_other : `Element`
A child of a config node in another config tree. This child has no
peer under node_sum.
Returns
-------
None
There is no return of this method.
"""
s_node = self.device.get_schema_node(child_other)
e = deepcopy(child_other)
scope = node_sum.getchildren()
siblings = self._get_sequence(scope, child_other.tag, node_sum)
if s_node.get('ordered-by') == 'user' and child_other.get(insert_tag) is not None:
if child_other.get(insert_tag) == 'first':
if siblings:
siblings[0].addprevious(self._del_attrib(e)) # depends on [control=['if'], data=[]]
else:
node_sum.append(self._del_attrib(e)) # depends on [control=['if'], data=[]]
elif child_other.get(insert_tag) == 'last':
if siblings:
siblings[-1].addnext(self._del_attrib(e)) # depends on [control=['if'], data=[]]
else:
node_sum.append(self._del_attrib(e)) # depends on [control=['if'], data=[]]
elif child_other.get(insert_tag) == 'before':
if child_other.get(key_tag) is None:
_inserterror('before', self.device.get_xpath(child_other), 'key') # depends on [control=['if'], data=[]]
sibling = node_sum.find(child_other.tag + child_other.get(key_tag), namespaces=child_other.nsmap)
if sibling is None:
path = self.device.get_xpath(child_other)
key = child_other.get(key_tag)
_inserterror('before', path, 'key', key) # depends on [control=['if'], data=[]]
sibling.addprevious(self._del_attrib(e)) # depends on [control=['if'], data=[]]
elif child_other.get(insert_tag) == 'after':
if child_other.get(key_tag) is None:
_inserterror('after', self.device.get_xpath(child_other), 'key') # depends on [control=['if'], data=[]]
sibling = node_sum.find(child_other.tag + child_other.get(key_tag), namespaces=child_other.nsmap)
if sibling is None:
path = self.device.get_xpath(child_other)
key = child_other.get(key_tag)
_inserterror('after', path, 'key', key) # depends on [control=['if'], data=[]]
sibling.addnext(self._del_attrib(e)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif siblings:
siblings[-1].addnext(self._del_attrib(e)) # depends on [control=['if'], data=[]]
else:
node_sum.append(self._del_attrib(e)) |
def _build_specs(self, specs, kwargs, fp_precision):
"""
Returns the specs, the remaining kwargs and whether or not the
constructor was called with kwarg or explicit specs.
"""
if specs is None:
overrides = param.ParamOverrides(self, kwargs,
allow_extra_keywords=True)
extra_kwargs = overrides.extra_keywords()
kwargs = dict([(k,v) for (k,v) in kwargs.items()
if k not in extra_kwargs])
rounded_specs = list(self.round_floats([extra_kwargs],
fp_precision))
if extra_kwargs=={}: return [], kwargs, True
else: return rounded_specs, kwargs, False
return list(self.round_floats(specs, fp_precision)), kwargs, True | def function[_build_specs, parameter[self, specs, kwargs, fp_precision]]:
constant[
Returns the specs, the remaining kwargs and whether or not the
constructor was called with kwarg or explicit specs.
]
if compare[name[specs] is constant[None]] begin[:]
variable[overrides] assign[=] call[name[param].ParamOverrides, parameter[name[self], name[kwargs]]]
variable[extra_kwargs] assign[=] call[name[overrides].extra_keywords, parameter[]]
variable[kwargs] assign[=] call[name[dict], parameter[<ast.ListComp object at 0x7da1afe3bc70>]]
variable[rounded_specs] assign[=] call[name[list], parameter[call[name[self].round_floats, parameter[list[[<ast.Name object at 0x7da1afe3a3e0>]], name[fp_precision]]]]]
if compare[name[extra_kwargs] equal[==] dictionary[[], []]] begin[:]
return[tuple[[<ast.List object at 0x7da1afe3a200>, <ast.Name object at 0x7da1afe39ab0>, <ast.Constant object at 0x7da1afe394b0>]]]
return[tuple[[<ast.Call object at 0x7da1afe39720>, <ast.Name object at 0x7da1afe39540>, <ast.Constant object at 0x7da1afe38fd0>]]] | keyword[def] identifier[_build_specs] ( identifier[self] , identifier[specs] , identifier[kwargs] , identifier[fp_precision] ):
literal[string]
keyword[if] identifier[specs] keyword[is] keyword[None] :
identifier[overrides] = identifier[param] . identifier[ParamOverrides] ( identifier[self] , identifier[kwargs] ,
identifier[allow_extra_keywords] = keyword[True] )
identifier[extra_kwargs] = identifier[overrides] . identifier[extra_keywords] ()
identifier[kwargs] = identifier[dict] ([( identifier[k] , identifier[v] ) keyword[for] ( identifier[k] , identifier[v] ) keyword[in] identifier[kwargs] . identifier[items] ()
keyword[if] identifier[k] keyword[not] keyword[in] identifier[extra_kwargs] ])
identifier[rounded_specs] = identifier[list] ( identifier[self] . identifier[round_floats] ([ identifier[extra_kwargs] ],
identifier[fp_precision] ))
keyword[if] identifier[extra_kwargs] =={}: keyword[return] [], identifier[kwargs] , keyword[True]
keyword[else] : keyword[return] identifier[rounded_specs] , identifier[kwargs] , keyword[False]
keyword[return] identifier[list] ( identifier[self] . identifier[round_floats] ( identifier[specs] , identifier[fp_precision] )), identifier[kwargs] , keyword[True] | def _build_specs(self, specs, kwargs, fp_precision):
"""
Returns the specs, the remaining kwargs and whether or not the
constructor was called with kwarg or explicit specs.
"""
if specs is None:
overrides = param.ParamOverrides(self, kwargs, allow_extra_keywords=True)
extra_kwargs = overrides.extra_keywords()
kwargs = dict([(k, v) for (k, v) in kwargs.items() if k not in extra_kwargs])
rounded_specs = list(self.round_floats([extra_kwargs], fp_precision))
if extra_kwargs == {}:
return ([], kwargs, True) # depends on [control=['if'], data=[]]
else:
return (rounded_specs, kwargs, False) # depends on [control=['if'], data=[]]
return (list(self.round_floats(specs, fp_precision)), kwargs, True) |
def parse(self, string):
"""
Parse runtime path representation to list.
:param string string: runtime path string
:return: list of runtime paths
:rtype: list of string
"""
var, eq, values = string.strip().partition('=')
assert var == 'runtimepath'
assert eq == '='
return values.split(',') | def function[parse, parameter[self, string]]:
constant[
Parse runtime path representation to list.
:param string string: runtime path string
:return: list of runtime paths
:rtype: list of string
]
<ast.Tuple object at 0x7da2049627d0> assign[=] call[call[name[string].strip, parameter[]].partition, parameter[constant[=]]]
assert[compare[name[var] equal[==] constant[runtimepath]]]
assert[compare[name[eq] equal[==] constant[=]]]
return[call[name[values].split, parameter[constant[,]]]] | keyword[def] identifier[parse] ( identifier[self] , identifier[string] ):
literal[string]
identifier[var] , identifier[eq] , identifier[values] = identifier[string] . identifier[strip] (). identifier[partition] ( literal[string] )
keyword[assert] identifier[var] == literal[string]
keyword[assert] identifier[eq] == literal[string]
keyword[return] identifier[values] . identifier[split] ( literal[string] ) | def parse(self, string):
"""
Parse runtime path representation to list.
:param string string: runtime path string
:return: list of runtime paths
:rtype: list of string
"""
(var, eq, values) = string.strip().partition('=')
assert var == 'runtimepath'
assert eq == '='
return values.split(',') |
def run(self, scale, accuracy='integrate', **kwargs):
"""Return the Wilson coefficients (as wcxf.WC instance) evolved to the
scale `scale`.
Parameters:
- `scale`: scale in GeV
- accuracy: whether to use the numerical solution to the RGE
('integrate', the default, slow but precise) or the leading logarithmic
approximation ('leadinglog', approximate but much faster).
"""
if accuracy == 'integrate':
C_out = self._rgevolve(scale, **kwargs)
elif accuracy == 'leadinglog':
C_out = self._rgevolve_leadinglog(scale)
else:
raise ValueError("'{}' is not a valid value of 'accuracy' (must be either 'integrate' or 'leadinglog').".format(accuracy))
return self._to_wcxf(C_out, scale) | def function[run, parameter[self, scale, accuracy]]:
constant[Return the Wilson coefficients (as wcxf.WC instance) evolved to the
scale `scale`.
Parameters:
- `scale`: scale in GeV
- accuracy: whether to use the numerical solution to the RGE
('integrate', the default, slow but precise) or the leading logarithmic
approximation ('leadinglog', approximate but much faster).
]
if compare[name[accuracy] equal[==] constant[integrate]] begin[:]
variable[C_out] assign[=] call[name[self]._rgevolve, parameter[name[scale]]]
return[call[name[self]._to_wcxf, parameter[name[C_out], name[scale]]]] | keyword[def] identifier[run] ( identifier[self] , identifier[scale] , identifier[accuracy] = literal[string] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[accuracy] == literal[string] :
identifier[C_out] = identifier[self] . identifier[_rgevolve] ( identifier[scale] ,** identifier[kwargs] )
keyword[elif] identifier[accuracy] == literal[string] :
identifier[C_out] = identifier[self] . identifier[_rgevolve_leadinglog] ( identifier[scale] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[accuracy] ))
keyword[return] identifier[self] . identifier[_to_wcxf] ( identifier[C_out] , identifier[scale] ) | def run(self, scale, accuracy='integrate', **kwargs):
"""Return the Wilson coefficients (as wcxf.WC instance) evolved to the
scale `scale`.
Parameters:
- `scale`: scale in GeV
- accuracy: whether to use the numerical solution to the RGE
('integrate', the default, slow but precise) or the leading logarithmic
approximation ('leadinglog', approximate but much faster).
"""
if accuracy == 'integrate':
C_out = self._rgevolve(scale, **kwargs) # depends on [control=['if'], data=[]]
elif accuracy == 'leadinglog':
C_out = self._rgevolve_leadinglog(scale) # depends on [control=['if'], data=[]]
else:
raise ValueError("'{}' is not a valid value of 'accuracy' (must be either 'integrate' or 'leadinglog').".format(accuracy))
return self._to_wcxf(C_out, scale) |
def get_edge_values_from_dict(self, node_value_dict=None, include_stem=True):
"""
Enter a dictionary mapping node 'idx' or tuple of tipnames to values
that you want mapped to the stem and descendant edges that node.
Edge values are returned in proper plot order to be entered to the
edge_colors or edge_widths arguments to draw(). To see node idx values
use node_labels=True in draw(). If dictionary keys are integers it is
assumed they are node idxs.
Note: it is safer to use tip labels to identify clades than node idxs
since tree tranformations (e.g., rooting) can change the mapping of
idx values to nodes on the tree.
This function is most convenient for applying values to clades. To
instead map values to specific edges (e.g., a single internal edge)
it will be easier to use tre.get_edge_values() and then to set the
values of the internal edges manually.
Example 1:
tre = toytree.tree("((a,b),(c,d));")
tre.get_edge_values_from_dict({5: 'green', 6: 'red'})
# ['green', 'green', 'green', 'red', 'red', 'red']
Example 2:
tre = toytree.tree("((a,b),(c,d));")
tre.get_edge_values_from_dict({(a, b): 'green', (c, d): 'red'})
# ['green', 'green', 'green', 'red', 'red', 'red']
"""
# map node idxs to the order in which edges are plotted
idxs = {j: i for (i, j) in enumerate(self.get_edge_values())}
values = [None] * self._coords.edges.shape[0]
if node_value_dict is None:
return values
# convert tipname lists to node idxs
rmap = {}
for (key, val) in node_value_dict.items():
if isinstance(key, (str, tuple)):
node = fuzzy_match_tipnames(self, key, None, None, True, False)
rmap[node.idx] = val
else:
rmap[key] = val
node_value_dict = rmap
# map over tree
for node in self.treenode.traverse("levelorder"):
if node.idx in node_value_dict:
# add value to stem edge
if include_stem:
if not node.is_root():
values[idxs[node.idx]] = node_value_dict[node.idx]
# add value to descendants edges
for desc in node.get_descendants():
values[idxs[desc.idx]] = node_value_dict[node.idx]
return values | def function[get_edge_values_from_dict, parameter[self, node_value_dict, include_stem]]:
constant[
Enter a dictionary mapping node 'idx' or tuple of tipnames to values
that you want mapped to the stem and descendant edges that node.
Edge values are returned in proper plot order to be entered to the
edge_colors or edge_widths arguments to draw(). To see node idx values
use node_labels=True in draw(). If dictionary keys are integers it is
assumed they are node idxs.
Note: it is safer to use tip labels to identify clades than node idxs
since tree tranformations (e.g., rooting) can change the mapping of
idx values to nodes on the tree.
This function is most convenient for applying values to clades. To
instead map values to specific edges (e.g., a single internal edge)
it will be easier to use tre.get_edge_values() and then to set the
values of the internal edges manually.
Example 1:
tre = toytree.tree("((a,b),(c,d));")
tre.get_edge_values_from_dict({5: 'green', 6: 'red'})
# ['green', 'green', 'green', 'red', 'red', 'red']
Example 2:
tre = toytree.tree("((a,b),(c,d));")
tre.get_edge_values_from_dict({(a, b): 'green', (c, d): 'red'})
# ['green', 'green', 'green', 'red', 'red', 'red']
]
variable[idxs] assign[=] <ast.DictComp object at 0x7da20c6c6320>
variable[values] assign[=] binary_operation[list[[<ast.Constant object at 0x7da20c6c5a20>]] * call[name[self]._coords.edges.shape][constant[0]]]
if compare[name[node_value_dict] is constant[None]] begin[:]
return[name[values]]
variable[rmap] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da20c6c6440>, <ast.Name object at 0x7da20c6c7850>]]] in starred[call[name[node_value_dict].items, parameter[]]] begin[:]
if call[name[isinstance], parameter[name[key], tuple[[<ast.Name object at 0x7da20c6c5600>, <ast.Name object at 0x7da20c6c6740>]]]] begin[:]
variable[node] assign[=] call[name[fuzzy_match_tipnames], parameter[name[self], name[key], constant[None], constant[None], constant[True], constant[False]]]
call[name[rmap]][name[node].idx] assign[=] name[val]
variable[node_value_dict] assign[=] name[rmap]
for taget[name[node]] in starred[call[name[self].treenode.traverse, parameter[constant[levelorder]]]] begin[:]
if compare[name[node].idx in name[node_value_dict]] begin[:]
if name[include_stem] begin[:]
if <ast.UnaryOp object at 0x7da20c6c6650> begin[:]
call[name[values]][call[name[idxs]][name[node].idx]] assign[=] call[name[node_value_dict]][name[node].idx]
for taget[name[desc]] in starred[call[name[node].get_descendants, parameter[]]] begin[:]
call[name[values]][call[name[idxs]][name[desc].idx]] assign[=] call[name[node_value_dict]][name[node].idx]
return[name[values]] | keyword[def] identifier[get_edge_values_from_dict] ( identifier[self] , identifier[node_value_dict] = keyword[None] , identifier[include_stem] = keyword[True] ):
literal[string]
identifier[idxs] ={ identifier[j] : identifier[i] keyword[for] ( identifier[i] , identifier[j] ) keyword[in] identifier[enumerate] ( identifier[self] . identifier[get_edge_values] ())}
identifier[values] =[ keyword[None] ]* identifier[self] . identifier[_coords] . identifier[edges] . identifier[shape] [ literal[int] ]
keyword[if] identifier[node_value_dict] keyword[is] keyword[None] :
keyword[return] identifier[values]
identifier[rmap] ={}
keyword[for] ( identifier[key] , identifier[val] ) keyword[in] identifier[node_value_dict] . identifier[items] ():
keyword[if] identifier[isinstance] ( identifier[key] ,( identifier[str] , identifier[tuple] )):
identifier[node] = identifier[fuzzy_match_tipnames] ( identifier[self] , identifier[key] , keyword[None] , keyword[None] , keyword[True] , keyword[False] )
identifier[rmap] [ identifier[node] . identifier[idx] ]= identifier[val]
keyword[else] :
identifier[rmap] [ identifier[key] ]= identifier[val]
identifier[node_value_dict] = identifier[rmap]
keyword[for] identifier[node] keyword[in] identifier[self] . identifier[treenode] . identifier[traverse] ( literal[string] ):
keyword[if] identifier[node] . identifier[idx] keyword[in] identifier[node_value_dict] :
keyword[if] identifier[include_stem] :
keyword[if] keyword[not] identifier[node] . identifier[is_root] ():
identifier[values] [ identifier[idxs] [ identifier[node] . identifier[idx] ]]= identifier[node_value_dict] [ identifier[node] . identifier[idx] ]
keyword[for] identifier[desc] keyword[in] identifier[node] . identifier[get_descendants] ():
identifier[values] [ identifier[idxs] [ identifier[desc] . identifier[idx] ]]= identifier[node_value_dict] [ identifier[node] . identifier[idx] ]
keyword[return] identifier[values] | def get_edge_values_from_dict(self, node_value_dict=None, include_stem=True):
"""
Enter a dictionary mapping node 'idx' or tuple of tipnames to values
that you want mapped to the stem and descendant edges that node.
Edge values are returned in proper plot order to be entered to the
edge_colors or edge_widths arguments to draw(). To see node idx values
use node_labels=True in draw(). If dictionary keys are integers it is
assumed they are node idxs.
Note: it is safer to use tip labels to identify clades than node idxs
since tree tranformations (e.g., rooting) can change the mapping of
idx values to nodes on the tree.
This function is most convenient for applying values to clades. To
instead map values to specific edges (e.g., a single internal edge)
it will be easier to use tre.get_edge_values() and then to set the
values of the internal edges manually.
Example 1:
tre = toytree.tree("((a,b),(c,d));")
tre.get_edge_values_from_dict({5: 'green', 6: 'red'})
# ['green', 'green', 'green', 'red', 'red', 'red']
Example 2:
tre = toytree.tree("((a,b),(c,d));")
tre.get_edge_values_from_dict({(a, b): 'green', (c, d): 'red'})
# ['green', 'green', 'green', 'red', 'red', 'red']
"""
# map node idxs to the order in which edges are plotted
idxs = {j: i for (i, j) in enumerate(self.get_edge_values())}
values = [None] * self._coords.edges.shape[0]
if node_value_dict is None:
return values # depends on [control=['if'], data=[]]
# convert tipname lists to node idxs
rmap = {}
for (key, val) in node_value_dict.items():
if isinstance(key, (str, tuple)):
node = fuzzy_match_tipnames(self, key, None, None, True, False)
rmap[node.idx] = val # depends on [control=['if'], data=[]]
else:
rmap[key] = val # depends on [control=['for'], data=[]]
node_value_dict = rmap
# map over tree
for node in self.treenode.traverse('levelorder'):
if node.idx in node_value_dict:
# add value to stem edge
if include_stem:
if not node.is_root():
values[idxs[node.idx]] = node_value_dict[node.idx] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# add value to descendants edges
for desc in node.get_descendants():
values[idxs[desc.idx]] = node_value_dict[node.idx] # depends on [control=['for'], data=['desc']] # depends on [control=['if'], data=['node_value_dict']] # depends on [control=['for'], data=['node']]
return values |
def dump(self, key):
"""
Retrieve object indexed by <key> and return it serialized
"""
try:
obj = self.get(key)
if obj:
return json.dumps(obj)
else:
return None
except:
raise | def function[dump, parameter[self, key]]:
constant[
Retrieve object indexed by <key> and return it serialized
]
<ast.Try object at 0x7da1b13976a0> | keyword[def] identifier[dump] ( identifier[self] , identifier[key] ):
literal[string]
keyword[try] :
identifier[obj] = identifier[self] . identifier[get] ( identifier[key] )
keyword[if] identifier[obj] :
keyword[return] identifier[json] . identifier[dumps] ( identifier[obj] )
keyword[else] :
keyword[return] keyword[None]
keyword[except] :
keyword[raise] | def dump(self, key):
"""
Retrieve object indexed by <key> and return it serialized
"""
try:
obj = self.get(key)
if obj:
return json.dumps(obj) # depends on [control=['if'], data=[]]
else:
return None # depends on [control=['try'], data=[]]
except:
raise # depends on [control=['except'], data=[]] |
async def cluster_meet(self, node_id, host, port):
"""
Force a node cluster to handshake with another node.
Sends to specefied node
"""
return await self.execute_command('CLUSTER MEET', host, port, node_id=node_id) | <ast.AsyncFunctionDef object at 0x7da1b0799e10> | keyword[async] keyword[def] identifier[cluster_meet] ( identifier[self] , identifier[node_id] , identifier[host] , identifier[port] ):
literal[string]
keyword[return] keyword[await] identifier[self] . identifier[execute_command] ( literal[string] , identifier[host] , identifier[port] , identifier[node_id] = identifier[node_id] ) | async def cluster_meet(self, node_id, host, port):
"""
Force a node cluster to handshake with another node.
Sends to specefied node
"""
return await self.execute_command('CLUSTER MEET', host, port, node_id=node_id) |
def convert(self, request, response, data):
"""
Performs the desired Conversion.
:param request: The webob Request object describing the
request.
:param response: The webob Response object describing the
response.
:param data: The data dictionary returned by the prepare()
method.
:returns: A string, the results of which are the desired
conversion.
"""
if self.modifier.param in (None, 'canonical', 'local'):
return str(request.environ['SERVER_PORT'])
elif self.modifier.param == 'remote':
return str(request.environ.get('REMOTE_PORT', '-'))
return "-" | def function[convert, parameter[self, request, response, data]]:
constant[
Performs the desired Conversion.
:param request: The webob Request object describing the
request.
:param response: The webob Response object describing the
response.
:param data: The data dictionary returned by the prepare()
method.
:returns: A string, the results of which are the desired
conversion.
]
if compare[name[self].modifier.param in tuple[[<ast.Constant object at 0x7da2054a72b0>, <ast.Constant object at 0x7da2054a6b60>, <ast.Constant object at 0x7da2054a5240>]]] begin[:]
return[call[name[str], parameter[call[name[request].environ][constant[SERVER_PORT]]]]]
return[constant[-]] | keyword[def] identifier[convert] ( identifier[self] , identifier[request] , identifier[response] , identifier[data] ):
literal[string]
keyword[if] identifier[self] . identifier[modifier] . identifier[param] keyword[in] ( keyword[None] , literal[string] , literal[string] ):
keyword[return] identifier[str] ( identifier[request] . identifier[environ] [ literal[string] ])
keyword[elif] identifier[self] . identifier[modifier] . identifier[param] == literal[string] :
keyword[return] identifier[str] ( identifier[request] . identifier[environ] . identifier[get] ( literal[string] , literal[string] ))
keyword[return] literal[string] | def convert(self, request, response, data):
"""
Performs the desired Conversion.
:param request: The webob Request object describing the
request.
:param response: The webob Response object describing the
response.
:param data: The data dictionary returned by the prepare()
method.
:returns: A string, the results of which are the desired
conversion.
"""
if self.modifier.param in (None, 'canonical', 'local'):
return str(request.environ['SERVER_PORT']) # depends on [control=['if'], data=[]]
elif self.modifier.param == 'remote':
return str(request.environ.get('REMOTE_PORT', '-')) # depends on [control=['if'], data=[]]
return '-' |
def apply_fit_filters(self, choosers, alternatives):
"""
Filter `choosers` and `alternatives` for fitting.
Parameters
----------
choosers : pandas.DataFrame
Table describing the agents making choices, e.g. households.
alternatives : pandas.DataFrame
Table describing the things from which agents are choosing,
e.g. buildings.
Returns
-------
filtered_choosers, filtered_alts : pandas.DataFrame
"""
return super(SegmentedMNLDiscreteChoiceModel, self).apply_fit_filters(
choosers, alternatives) | def function[apply_fit_filters, parameter[self, choosers, alternatives]]:
constant[
Filter `choosers` and `alternatives` for fitting.
Parameters
----------
choosers : pandas.DataFrame
Table describing the agents making choices, e.g. households.
alternatives : pandas.DataFrame
Table describing the things from which agents are choosing,
e.g. buildings.
Returns
-------
filtered_choosers, filtered_alts : pandas.DataFrame
]
return[call[call[name[super], parameter[name[SegmentedMNLDiscreteChoiceModel], name[self]]].apply_fit_filters, parameter[name[choosers], name[alternatives]]]] | keyword[def] identifier[apply_fit_filters] ( identifier[self] , identifier[choosers] , identifier[alternatives] ):
literal[string]
keyword[return] identifier[super] ( identifier[SegmentedMNLDiscreteChoiceModel] , identifier[self] ). identifier[apply_fit_filters] (
identifier[choosers] , identifier[alternatives] ) | def apply_fit_filters(self, choosers, alternatives):
"""
Filter `choosers` and `alternatives` for fitting.
Parameters
----------
choosers : pandas.DataFrame
Table describing the agents making choices, e.g. households.
alternatives : pandas.DataFrame
Table describing the things from which agents are choosing,
e.g. buildings.
Returns
-------
filtered_choosers, filtered_alts : pandas.DataFrame
"""
return super(SegmentedMNLDiscreteChoiceModel, self).apply_fit_filters(choosers, alternatives) |
def mercator_transform(data, lat_bounds, origin='upper', height_out=None):
"""
Transforms an image computed in (longitude,latitude) coordinates into
the a Mercator projection image.
Parameters
----------
data: numpy array or equivalent list-like object.
Must be NxM (mono), NxMx3 (RGB) or NxMx4 (RGBA)
lat_bounds : length 2 tuple
Minimal and maximal value of the latitude of the image.
Bounds must be between -85.051128779806589 and 85.051128779806589
otherwise they will be clipped to that values.
origin : ['upper' | 'lower'], optional, default 'upper'
Place the [0,0] index of the array in the upper left or lower left
corner of the axes.
height_out : int, default None
The expected height of the output.
If None, the height of the input is used.
See https://en.wikipedia.org/wiki/Web_Mercator for more details.
"""
import numpy as np
def mercator(x):
return np.arcsinh(np.tan(x*np.pi/180.))*180./np.pi
array = np.atleast_3d(data).copy()
height, width, nblayers = array.shape
lat_min = max(lat_bounds[0], -85.051128779806589)
lat_max = min(lat_bounds[1], 85.051128779806589)
if height_out is None:
height_out = height
# Eventually flip the image
if origin == 'upper':
array = array[::-1, :, :]
lats = (lat_min + np.linspace(0.5/height, 1.-0.5/height, height) *
(lat_max-lat_min))
latslats = (mercator(lat_min) +
np.linspace(0.5/height_out, 1.-0.5/height_out, height_out) *
(mercator(lat_max)-mercator(lat_min)))
out = np.zeros((height_out, width, nblayers))
for i in range(width):
for j in range(nblayers):
out[:, i, j] = np.interp(latslats, mercator(lats), array[:, i, j])
# Eventually flip the image.
if origin == 'upper':
out = out[::-1, :, :]
return out | def function[mercator_transform, parameter[data, lat_bounds, origin, height_out]]:
constant[
Transforms an image computed in (longitude,latitude) coordinates into
the a Mercator projection image.
Parameters
----------
data: numpy array or equivalent list-like object.
Must be NxM (mono), NxMx3 (RGB) or NxMx4 (RGBA)
lat_bounds : length 2 tuple
Minimal and maximal value of the latitude of the image.
Bounds must be between -85.051128779806589 and 85.051128779806589
otherwise they will be clipped to that values.
origin : ['upper' | 'lower'], optional, default 'upper'
Place the [0,0] index of the array in the upper left or lower left
corner of the axes.
height_out : int, default None
The expected height of the output.
If None, the height of the input is used.
See https://en.wikipedia.org/wiki/Web_Mercator for more details.
]
import module[numpy] as alias[np]
def function[mercator, parameter[x]]:
return[binary_operation[binary_operation[call[name[np].arcsinh, parameter[call[name[np].tan, parameter[binary_operation[binary_operation[name[x] * name[np].pi] / constant[180.0]]]]]] * constant[180.0]] / name[np].pi]]
variable[array] assign[=] call[call[name[np].atleast_3d, parameter[name[data]]].copy, parameter[]]
<ast.Tuple object at 0x7da18f7212a0> assign[=] name[array].shape
variable[lat_min] assign[=] call[name[max], parameter[call[name[lat_bounds]][constant[0]], <ast.UnaryOp object at 0x7da18f723070>]]
variable[lat_max] assign[=] call[name[min], parameter[call[name[lat_bounds]][constant[1]], constant[85.05112877980659]]]
if compare[name[height_out] is constant[None]] begin[:]
variable[height_out] assign[=] name[height]
if compare[name[origin] equal[==] constant[upper]] begin[:]
variable[array] assign[=] call[name[array]][tuple[[<ast.Slice object at 0x7da18f720730>, <ast.Slice object at 0x7da18f7214e0>, <ast.Slice object at 0x7da18f720460>]]]
variable[lats] assign[=] binary_operation[name[lat_min] + binary_operation[call[name[np].linspace, parameter[binary_operation[constant[0.5] / name[height]], binary_operation[constant[1.0] - binary_operation[constant[0.5] / name[height]]], name[height]]] * binary_operation[name[lat_max] - name[lat_min]]]]
variable[latslats] assign[=] binary_operation[call[name[mercator], parameter[name[lat_min]]] + binary_operation[call[name[np].linspace, parameter[binary_operation[constant[0.5] / name[height_out]], binary_operation[constant[1.0] - binary_operation[constant[0.5] / name[height_out]]], name[height_out]]] * binary_operation[call[name[mercator], parameter[name[lat_max]]] - call[name[mercator], parameter[name[lat_min]]]]]]
variable[out] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da18f7205e0>, <ast.Name object at 0x7da18f720b50>, <ast.Name object at 0x7da18f721180>]]]]
for taget[name[i]] in starred[call[name[range], parameter[name[width]]]] begin[:]
for taget[name[j]] in starred[call[name[range], parameter[name[nblayers]]]] begin[:]
call[name[out]][tuple[[<ast.Slice object at 0x7da18f721810>, <ast.Name object at 0x7da18f720fd0>, <ast.Name object at 0x7da18f7217e0>]]] assign[=] call[name[np].interp, parameter[name[latslats], call[name[mercator], parameter[name[lats]]], call[name[array]][tuple[[<ast.Slice object at 0x7da18f7212d0>, <ast.Name object at 0x7da18f721690>, <ast.Name object at 0x7da20e9b2830>]]]]]
if compare[name[origin] equal[==] constant[upper]] begin[:]
variable[out] assign[=] call[name[out]][tuple[[<ast.Slice object at 0x7da20e9b33a0>, <ast.Slice object at 0x7da20e9b2860>, <ast.Slice object at 0x7da20e9b0610>]]]
return[name[out]] | keyword[def] identifier[mercator_transform] ( identifier[data] , identifier[lat_bounds] , identifier[origin] = literal[string] , identifier[height_out] = keyword[None] ):
literal[string]
keyword[import] identifier[numpy] keyword[as] identifier[np]
keyword[def] identifier[mercator] ( identifier[x] ):
keyword[return] identifier[np] . identifier[arcsinh] ( identifier[np] . identifier[tan] ( identifier[x] * identifier[np] . identifier[pi] / literal[int] ))* literal[int] / identifier[np] . identifier[pi]
identifier[array] = identifier[np] . identifier[atleast_3d] ( identifier[data] ). identifier[copy] ()
identifier[height] , identifier[width] , identifier[nblayers] = identifier[array] . identifier[shape]
identifier[lat_min] = identifier[max] ( identifier[lat_bounds] [ literal[int] ],- literal[int] )
identifier[lat_max] = identifier[min] ( identifier[lat_bounds] [ literal[int] ], literal[int] )
keyword[if] identifier[height_out] keyword[is] keyword[None] :
identifier[height_out] = identifier[height]
keyword[if] identifier[origin] == literal[string] :
identifier[array] = identifier[array] [::- literal[int] ,:,:]
identifier[lats] =( identifier[lat_min] + identifier[np] . identifier[linspace] ( literal[int] / identifier[height] , literal[int] - literal[int] / identifier[height] , identifier[height] )*
( identifier[lat_max] - identifier[lat_min] ))
identifier[latslats] =( identifier[mercator] ( identifier[lat_min] )+
identifier[np] . identifier[linspace] ( literal[int] / identifier[height_out] , literal[int] - literal[int] / identifier[height_out] , identifier[height_out] )*
( identifier[mercator] ( identifier[lat_max] )- identifier[mercator] ( identifier[lat_min] )))
identifier[out] = identifier[np] . identifier[zeros] (( identifier[height_out] , identifier[width] , identifier[nblayers] ))
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[width] ):
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[nblayers] ):
identifier[out] [:, identifier[i] , identifier[j] ]= identifier[np] . identifier[interp] ( identifier[latslats] , identifier[mercator] ( identifier[lats] ), identifier[array] [:, identifier[i] , identifier[j] ])
keyword[if] identifier[origin] == literal[string] :
identifier[out] = identifier[out] [::- literal[int] ,:,:]
keyword[return] identifier[out] | def mercator_transform(data, lat_bounds, origin='upper', height_out=None):
"""
Transforms an image computed in (longitude,latitude) coordinates into
the a Mercator projection image.
Parameters
----------
data: numpy array or equivalent list-like object.
Must be NxM (mono), NxMx3 (RGB) or NxMx4 (RGBA)
lat_bounds : length 2 tuple
Minimal and maximal value of the latitude of the image.
Bounds must be between -85.051128779806589 and 85.051128779806589
otherwise they will be clipped to that values.
origin : ['upper' | 'lower'], optional, default 'upper'
Place the [0,0] index of the array in the upper left or lower left
corner of the axes.
height_out : int, default None
The expected height of the output.
If None, the height of the input is used.
See https://en.wikipedia.org/wiki/Web_Mercator for more details.
"""
import numpy as np
def mercator(x):
return np.arcsinh(np.tan(x * np.pi / 180.0)) * 180.0 / np.pi
array = np.atleast_3d(data).copy()
(height, width, nblayers) = array.shape
lat_min = max(lat_bounds[0], -85.05112877980659)
lat_max = min(lat_bounds[1], 85.05112877980659)
if height_out is None:
height_out = height # depends on [control=['if'], data=['height_out']]
# Eventually flip the image
if origin == 'upper':
array = array[::-1, :, :] # depends on [control=['if'], data=[]]
lats = lat_min + np.linspace(0.5 / height, 1.0 - 0.5 / height, height) * (lat_max - lat_min)
latslats = mercator(lat_min) + np.linspace(0.5 / height_out, 1.0 - 0.5 / height_out, height_out) * (mercator(lat_max) - mercator(lat_min))
out = np.zeros((height_out, width, nblayers))
for i in range(width):
for j in range(nblayers):
out[:, i, j] = np.interp(latslats, mercator(lats), array[:, i, j]) # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']]
# Eventually flip the image.
if origin == 'upper':
out = out[::-1, :, :] # depends on [control=['if'], data=[]]
return out |
def add_method_drop_down(self, col_number, col_label):
"""
Add drop-down-menu options for magic_method_codes columns
"""
if self.data_type == 'ages':
method_list = self.contribution.vocab.age_methods
else:
method_list = self.contribution.vocab.age_methods.copy()
method_list.update(self.contribution.vocab.methods)
self.choices[col_number] = (method_list, True) | def function[add_method_drop_down, parameter[self, col_number, col_label]]:
constant[
Add drop-down-menu options for magic_method_codes columns
]
if compare[name[self].data_type equal[==] constant[ages]] begin[:]
variable[method_list] assign[=] name[self].contribution.vocab.age_methods
call[name[self].choices][name[col_number]] assign[=] tuple[[<ast.Name object at 0x7da1b044f550>, <ast.Constant object at 0x7da1b044d0f0>]] | keyword[def] identifier[add_method_drop_down] ( identifier[self] , identifier[col_number] , identifier[col_label] ):
literal[string]
keyword[if] identifier[self] . identifier[data_type] == literal[string] :
identifier[method_list] = identifier[self] . identifier[contribution] . identifier[vocab] . identifier[age_methods]
keyword[else] :
identifier[method_list] = identifier[self] . identifier[contribution] . identifier[vocab] . identifier[age_methods] . identifier[copy] ()
identifier[method_list] . identifier[update] ( identifier[self] . identifier[contribution] . identifier[vocab] . identifier[methods] )
identifier[self] . identifier[choices] [ identifier[col_number] ]=( identifier[method_list] , keyword[True] ) | def add_method_drop_down(self, col_number, col_label):
"""
Add drop-down-menu options for magic_method_codes columns
"""
if self.data_type == 'ages':
method_list = self.contribution.vocab.age_methods # depends on [control=['if'], data=[]]
else:
method_list = self.contribution.vocab.age_methods.copy()
method_list.update(self.contribution.vocab.methods)
self.choices[col_number] = (method_list, True) |
def edit_instance_view(request, semester, pk, profile=None):
"""
View for a manager to edit the details of a particular WorkshiftInstance.
"""
instance = get_object_or_404(WorkshiftInstance, pk=pk)
if instance.weekly_workshift and instance.weekly_workshift.is_manager_shift:
president = Manager.objects.filter(
incumbent__user=request.user,
president=True
).count() > 0
can_edit = request.user.is_superuser or president
message = MESSAGES["PRESIDENTS_ONLY"]
else:
can_edit = utils.can_manage(
request.user, semester=semester, pool=instance.pool,
)
message = MESSAGES["ADMINS_ONLY"]
if not can_edit:
messages.add_message(request, messages.ERROR, message)
return HttpResponseRedirect(semester.get_view_url())
page_name = "Edit " + instance.title
edit_form = WorkshiftInstanceForm(
data=request.POST if "edit" in request.POST else None,
instance=instance,
semester=semester,
edit_hours=False,
)
if "delete" in request.POST:
instance.delete()
return HttpResponseRedirect(wurl(
"workshift:manage",
sem_url=semester.sem_url,
))
elif edit_form.is_valid():
instance = edit_form.save()
return HttpResponseRedirect(instance.get_view_url())
return render_to_response("edit_instance.html", {
"page_name": page_name,
"instance": instance,
"edit_form": edit_form,
}, context_instance=RequestContext(request)) | def function[edit_instance_view, parameter[request, semester, pk, profile]]:
constant[
View for a manager to edit the details of a particular WorkshiftInstance.
]
variable[instance] assign[=] call[name[get_object_or_404], parameter[name[WorkshiftInstance]]]
if <ast.BoolOp object at 0x7da1b149ff40> begin[:]
variable[president] assign[=] compare[call[call[name[Manager].objects.filter, parameter[]].count, parameter[]] greater[>] constant[0]]
variable[can_edit] assign[=] <ast.BoolOp object at 0x7da1b149cc70>
variable[message] assign[=] call[name[MESSAGES]][constant[PRESIDENTS_ONLY]]
if <ast.UnaryOp object at 0x7da1b149f2b0> begin[:]
call[name[messages].add_message, parameter[name[request], name[messages].ERROR, name[message]]]
return[call[name[HttpResponseRedirect], parameter[call[name[semester].get_view_url, parameter[]]]]]
variable[page_name] assign[=] binary_operation[constant[Edit ] + name[instance].title]
variable[edit_form] assign[=] call[name[WorkshiftInstanceForm], parameter[]]
if compare[constant[delete] in name[request].POST] begin[:]
call[name[instance].delete, parameter[]]
return[call[name[HttpResponseRedirect], parameter[call[name[wurl], parameter[constant[workshift:manage]]]]]]
return[call[name[render_to_response], parameter[constant[edit_instance.html], dictionary[[<ast.Constant object at 0x7da1b1455c60>, <ast.Constant object at 0x7da1b1455360>, <ast.Constant object at 0x7da1b1454940>], [<ast.Name object at 0x7da1b1455840>, <ast.Name object at 0x7da1b1455b70>, <ast.Name object at 0x7da1b1454e50>]]]]] | keyword[def] identifier[edit_instance_view] ( identifier[request] , identifier[semester] , identifier[pk] , identifier[profile] = keyword[None] ):
literal[string]
identifier[instance] = identifier[get_object_or_404] ( identifier[WorkshiftInstance] , identifier[pk] = identifier[pk] )
keyword[if] identifier[instance] . identifier[weekly_workshift] keyword[and] identifier[instance] . identifier[weekly_workshift] . identifier[is_manager_shift] :
identifier[president] = identifier[Manager] . identifier[objects] . identifier[filter] (
identifier[incumbent__user] = identifier[request] . identifier[user] ,
identifier[president] = keyword[True]
). identifier[count] ()> literal[int]
identifier[can_edit] = identifier[request] . identifier[user] . identifier[is_superuser] keyword[or] identifier[president]
identifier[message] = identifier[MESSAGES] [ literal[string] ]
keyword[else] :
identifier[can_edit] = identifier[utils] . identifier[can_manage] (
identifier[request] . identifier[user] , identifier[semester] = identifier[semester] , identifier[pool] = identifier[instance] . identifier[pool] ,
)
identifier[message] = identifier[MESSAGES] [ literal[string] ]
keyword[if] keyword[not] identifier[can_edit] :
identifier[messages] . identifier[add_message] ( identifier[request] , identifier[messages] . identifier[ERROR] , identifier[message] )
keyword[return] identifier[HttpResponseRedirect] ( identifier[semester] . identifier[get_view_url] ())
identifier[page_name] = literal[string] + identifier[instance] . identifier[title]
identifier[edit_form] = identifier[WorkshiftInstanceForm] (
identifier[data] = identifier[request] . identifier[POST] keyword[if] literal[string] keyword[in] identifier[request] . identifier[POST] keyword[else] keyword[None] ,
identifier[instance] = identifier[instance] ,
identifier[semester] = identifier[semester] ,
identifier[edit_hours] = keyword[False] ,
)
keyword[if] literal[string] keyword[in] identifier[request] . identifier[POST] :
identifier[instance] . identifier[delete] ()
keyword[return] identifier[HttpResponseRedirect] ( identifier[wurl] (
literal[string] ,
identifier[sem_url] = identifier[semester] . identifier[sem_url] ,
))
keyword[elif] identifier[edit_form] . identifier[is_valid] ():
identifier[instance] = identifier[edit_form] . identifier[save] ()
keyword[return] identifier[HttpResponseRedirect] ( identifier[instance] . identifier[get_view_url] ())
keyword[return] identifier[render_to_response] ( literal[string] ,{
literal[string] : identifier[page_name] ,
literal[string] : identifier[instance] ,
literal[string] : identifier[edit_form] ,
}, identifier[context_instance] = identifier[RequestContext] ( identifier[request] )) | def edit_instance_view(request, semester, pk, profile=None):
"""
View for a manager to edit the details of a particular WorkshiftInstance.
"""
instance = get_object_or_404(WorkshiftInstance, pk=pk)
if instance.weekly_workshift and instance.weekly_workshift.is_manager_shift:
president = Manager.objects.filter(incumbent__user=request.user, president=True).count() > 0
can_edit = request.user.is_superuser or president
message = MESSAGES['PRESIDENTS_ONLY'] # depends on [control=['if'], data=[]]
else:
can_edit = utils.can_manage(request.user, semester=semester, pool=instance.pool)
message = MESSAGES['ADMINS_ONLY']
if not can_edit:
messages.add_message(request, messages.ERROR, message)
return HttpResponseRedirect(semester.get_view_url()) # depends on [control=['if'], data=[]]
page_name = 'Edit ' + instance.title
edit_form = WorkshiftInstanceForm(data=request.POST if 'edit' in request.POST else None, instance=instance, semester=semester, edit_hours=False)
if 'delete' in request.POST:
instance.delete()
return HttpResponseRedirect(wurl('workshift:manage', sem_url=semester.sem_url)) # depends on [control=['if'], data=[]]
elif edit_form.is_valid():
instance = edit_form.save()
return HttpResponseRedirect(instance.get_view_url()) # depends on [control=['if'], data=[]]
return render_to_response('edit_instance.html', {'page_name': page_name, 'instance': instance, 'edit_form': edit_form}, context_instance=RequestContext(request)) |
def _build_pub_key_auth(self, context, nonce, auth_token, public_key):
"""
[MS-CSSP] 3.1.5 Processing Events and Sequencing Rules - Step 3
https://msdn.microsoft.com/en-us/library/cc226791.aspx
This step sends the final SPNEGO token to the server if required and
computes the value for the pubKeyAuth field for the protocol version
negotiated.
The format of the pubKeyAuth field depends on the version that the
server supports.
For version 2 to 4:
The pubKeyAuth field is just wrapped using the authenticated context
For versions 5 to 6:
The pubKeyAuth is a sha256 hash of the server's public key plus a nonce
and a magic string value. This hash is wrapped using the authenticated
context and the nonce is added to the TSRequest alongside the nonce
used in the hash calcs.
:param context: The authenticated context
:param nonce: If versions 5+, the nonce to use in the hash
:param auth_token: If NTLM, this is the last msg (authenticate msg) to
send in the same request
:param public_key: The server's public key
:return: The TSRequest as a byte string to send to the server
"""
ts_request = TSRequest()
if auth_token is not None:
nego_token = NegoToken()
nego_token['negoToken'] = auth_token
ts_request['negoTokens'].append(nego_token)
if nonce is not None:
ts_request['clientNonce'] = nonce
hash_input = b"CredSSP Client-To-Server Binding Hash\x00" + \
nonce + public_key
pub_value = hashlib.sha256(hash_input).digest()
else:
pub_value = public_key
enc_public_key = context.wrap(pub_value)
ts_request['pubKeyAuth'] = enc_public_key
return encoder.encode(ts_request) | def function[_build_pub_key_auth, parameter[self, context, nonce, auth_token, public_key]]:
constant[
[MS-CSSP] 3.1.5 Processing Events and Sequencing Rules - Step 3
https://msdn.microsoft.com/en-us/library/cc226791.aspx
This step sends the final SPNEGO token to the server if required and
computes the value for the pubKeyAuth field for the protocol version
negotiated.
The format of the pubKeyAuth field depends on the version that the
server supports.
For version 2 to 4:
The pubKeyAuth field is just wrapped using the authenticated context
For versions 5 to 6:
The pubKeyAuth is a sha256 hash of the server's public key plus a nonce
and a magic string value. This hash is wrapped using the authenticated
context and the nonce is added to the TSRequest alongside the nonce
used in the hash calcs.
:param context: The authenticated context
:param nonce: If versions 5+, the nonce to use in the hash
:param auth_token: If NTLM, this is the last msg (authenticate msg) to
send in the same request
:param public_key: The server's public key
:return: The TSRequest as a byte string to send to the server
]
variable[ts_request] assign[=] call[name[TSRequest], parameter[]]
if compare[name[auth_token] is_not constant[None]] begin[:]
variable[nego_token] assign[=] call[name[NegoToken], parameter[]]
call[name[nego_token]][constant[negoToken]] assign[=] name[auth_token]
call[call[name[ts_request]][constant[negoTokens]].append, parameter[name[nego_token]]]
if compare[name[nonce] is_not constant[None]] begin[:]
call[name[ts_request]][constant[clientNonce]] assign[=] name[nonce]
variable[hash_input] assign[=] binary_operation[binary_operation[constant[b'CredSSP Client-To-Server Binding Hash\x00'] + name[nonce]] + name[public_key]]
variable[pub_value] assign[=] call[call[name[hashlib].sha256, parameter[name[hash_input]]].digest, parameter[]]
variable[enc_public_key] assign[=] call[name[context].wrap, parameter[name[pub_value]]]
call[name[ts_request]][constant[pubKeyAuth]] assign[=] name[enc_public_key]
return[call[name[encoder].encode, parameter[name[ts_request]]]] | keyword[def] identifier[_build_pub_key_auth] ( identifier[self] , identifier[context] , identifier[nonce] , identifier[auth_token] , identifier[public_key] ):
literal[string]
identifier[ts_request] = identifier[TSRequest] ()
keyword[if] identifier[auth_token] keyword[is] keyword[not] keyword[None] :
identifier[nego_token] = identifier[NegoToken] ()
identifier[nego_token] [ literal[string] ]= identifier[auth_token]
identifier[ts_request] [ literal[string] ]. identifier[append] ( identifier[nego_token] )
keyword[if] identifier[nonce] keyword[is] keyword[not] keyword[None] :
identifier[ts_request] [ literal[string] ]= identifier[nonce]
identifier[hash_input] = literal[string] + identifier[nonce] + identifier[public_key]
identifier[pub_value] = identifier[hashlib] . identifier[sha256] ( identifier[hash_input] ). identifier[digest] ()
keyword[else] :
identifier[pub_value] = identifier[public_key]
identifier[enc_public_key] = identifier[context] . identifier[wrap] ( identifier[pub_value] )
identifier[ts_request] [ literal[string] ]= identifier[enc_public_key]
keyword[return] identifier[encoder] . identifier[encode] ( identifier[ts_request] ) | def _build_pub_key_auth(self, context, nonce, auth_token, public_key):
"""
[MS-CSSP] 3.1.5 Processing Events and Sequencing Rules - Step 3
https://msdn.microsoft.com/en-us/library/cc226791.aspx
This step sends the final SPNEGO token to the server if required and
computes the value for the pubKeyAuth field for the protocol version
negotiated.
The format of the pubKeyAuth field depends on the version that the
server supports.
For version 2 to 4:
The pubKeyAuth field is just wrapped using the authenticated context
For versions 5 to 6:
The pubKeyAuth is a sha256 hash of the server's public key plus a nonce
and a magic string value. This hash is wrapped using the authenticated
context and the nonce is added to the TSRequest alongside the nonce
used in the hash calcs.
:param context: The authenticated context
:param nonce: If versions 5+, the nonce to use in the hash
:param auth_token: If NTLM, this is the last msg (authenticate msg) to
send in the same request
:param public_key: The server's public key
:return: The TSRequest as a byte string to send to the server
"""
ts_request = TSRequest()
if auth_token is not None:
nego_token = NegoToken()
nego_token['negoToken'] = auth_token
ts_request['negoTokens'].append(nego_token) # depends on [control=['if'], data=['auth_token']]
if nonce is not None:
ts_request['clientNonce'] = nonce
hash_input = b'CredSSP Client-To-Server Binding Hash\x00' + nonce + public_key
pub_value = hashlib.sha256(hash_input).digest() # depends on [control=['if'], data=['nonce']]
else:
pub_value = public_key
enc_public_key = context.wrap(pub_value)
ts_request['pubKeyAuth'] = enc_public_key
return encoder.encode(ts_request) |
def betting_market_update(
self,
betting_market_id,
payout_condition,
description,
group_id="0.0.0",
account=None,
**kwargs
):
""" Update an event group. This needs to be **proposed**.
:param str betting_market_id: Id of the betting market to update
:param list payout_condition: Internationalized names, e.g.
``[['de', 'Foo'], ['en', 'bar']]``
:param list description: Internationalized descriptions, e.g.
``[['de', 'Foo'], ['en', 'bar']]``
:param str group_id: Group ID to create the market for (defaults to
*relative* id ``0.0.0``)
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
assert isinstance(payout_condition, list)
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account)
market = BettingMarket(betting_market_id)
if group_id[0] == "1":
# Test if object exists
BettingMarketGroup(group_id)
else:
# Test if object is proposed
test_proposal_in_buffer(
kwargs.get("append_to", self.propbuffer),
"betting_market_group_create",
group_id,
)
op = operations.Betting_market_update(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"betting_market_id": market["id"],
"new_group_id": group_id,
"new_description": description,
"new_payout_condition": payout_condition,
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active", **kwargs) | def function[betting_market_update, parameter[self, betting_market_id, payout_condition, description, group_id, account]]:
constant[ Update an event group. This needs to be **proposed**.
:param str betting_market_id: Id of the betting market to update
:param list payout_condition: Internationalized names, e.g.
``[['de', 'Foo'], ['en', 'bar']]``
:param list description: Internationalized descriptions, e.g.
``[['de', 'Foo'], ['en', 'bar']]``
:param str group_id: Group ID to create the market for (defaults to
*relative* id ``0.0.0``)
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
]
assert[call[name[isinstance], parameter[name[payout_condition], name[list]]]]
if <ast.UnaryOp object at 0x7da1b1083b50> begin[:]
if compare[constant[default_account] in name[self].config] begin[:]
variable[account] assign[=] call[name[self].config][constant[default_account]]
if <ast.UnaryOp object at 0x7da1b10804f0> begin[:]
<ast.Raise object at 0x7da1b10800d0>
variable[account] assign[=] call[name[Account], parameter[name[account]]]
variable[market] assign[=] call[name[BettingMarket], parameter[name[betting_market_id]]]
if compare[call[name[group_id]][constant[0]] equal[==] constant[1]] begin[:]
call[name[BettingMarketGroup], parameter[name[group_id]]]
variable[op] assign[=] call[name[operations].Betting_market_update, parameter[]]
return[call[name[self].finalizeOp, parameter[name[op], call[name[account]][constant[name]], constant[active]]]] | keyword[def] identifier[betting_market_update] (
identifier[self] ,
identifier[betting_market_id] ,
identifier[payout_condition] ,
identifier[description] ,
identifier[group_id] = literal[string] ,
identifier[account] = keyword[None] ,
** identifier[kwargs]
):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[payout_condition] , identifier[list] )
keyword[if] keyword[not] identifier[account] :
keyword[if] literal[string] keyword[in] identifier[self] . identifier[config] :
identifier[account] = identifier[self] . identifier[config] [ literal[string] ]
keyword[if] keyword[not] identifier[account] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[account] = identifier[Account] ( identifier[account] )
identifier[market] = identifier[BettingMarket] ( identifier[betting_market_id] )
keyword[if] identifier[group_id] [ literal[int] ]== literal[string] :
identifier[BettingMarketGroup] ( identifier[group_id] )
keyword[else] :
identifier[test_proposal_in_buffer] (
identifier[kwargs] . identifier[get] ( literal[string] , identifier[self] . identifier[propbuffer] ),
literal[string] ,
identifier[group_id] ,
)
identifier[op] = identifier[operations] . identifier[Betting_market_update] (
**{
literal[string] :{ literal[string] : literal[int] , literal[string] : literal[string] },
literal[string] : identifier[market] [ literal[string] ],
literal[string] : identifier[group_id] ,
literal[string] : identifier[description] ,
literal[string] : identifier[payout_condition] ,
literal[string] : identifier[self] . identifier[prefix] ,
}
)
keyword[return] identifier[self] . identifier[finalizeOp] ( identifier[op] , identifier[account] [ literal[string] ], literal[string] ,** identifier[kwargs] ) | def betting_market_update(self, betting_market_id, payout_condition, description, group_id='0.0.0', account=None, **kwargs):
""" Update an event group. This needs to be **proposed**.
:param str betting_market_id: Id of the betting market to update
:param list payout_condition: Internationalized names, e.g.
``[['de', 'Foo'], ['en', 'bar']]``
:param list description: Internationalized descriptions, e.g.
``[['de', 'Foo'], ['en', 'bar']]``
:param str group_id: Group ID to create the market for (defaults to
*relative* id ``0.0.0``)
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
assert isinstance(payout_condition, list)
if not account:
if 'default_account' in self.config:
account = self.config['default_account'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not account:
raise ValueError('You need to provide an account') # depends on [control=['if'], data=[]]
account = Account(account)
market = BettingMarket(betting_market_id)
if group_id[0] == '1':
# Test if object exists
BettingMarketGroup(group_id) # depends on [control=['if'], data=[]]
else:
# Test if object is proposed
test_proposal_in_buffer(kwargs.get('append_to', self.propbuffer), 'betting_market_group_create', group_id)
op = operations.Betting_market_update(**{'fee': {'amount': 0, 'asset_id': '1.3.0'}, 'betting_market_id': market['id'], 'new_group_id': group_id, 'new_description': description, 'new_payout_condition': payout_condition, 'prefix': self.prefix})
return self.finalizeOp(op, account['name'], 'active', **kwargs) |
def attributes_to_dict(attributes):
"""Transform a dictionary of attribute instances into a list of Json
objects, i.e., list of key-value pairs.
Parameters
----------
attributes : dict(Attribute)
Dictionary of attribute instances
Returns
-------
list(dict(name:..., value:...))
List of key-value pairs.
"""
result = []
for key in attributes:
result.append({
'name' : key,
'value' : attributes[key].value
})
return result | def function[attributes_to_dict, parameter[attributes]]:
constant[Transform a dictionary of attribute instances into a list of Json
objects, i.e., list of key-value pairs.
Parameters
----------
attributes : dict(Attribute)
Dictionary of attribute instances
Returns
-------
list(dict(name:..., value:...))
List of key-value pairs.
]
variable[result] assign[=] list[[]]
for taget[name[key]] in starred[name[attributes]] begin[:]
call[name[result].append, parameter[dictionary[[<ast.Constant object at 0x7da1b143e440>, <ast.Constant object at 0x7da1b143ef50>], [<ast.Name object at 0x7da1b143ee30>, <ast.Attribute object at 0x7da1b143ed40>]]]]
return[name[result]] | keyword[def] identifier[attributes_to_dict] ( identifier[attributes] ):
literal[string]
identifier[result] =[]
keyword[for] identifier[key] keyword[in] identifier[attributes] :
identifier[result] . identifier[append] ({
literal[string] : identifier[key] ,
literal[string] : identifier[attributes] [ identifier[key] ]. identifier[value]
})
keyword[return] identifier[result] | def attributes_to_dict(attributes):
"""Transform a dictionary of attribute instances into a list of Json
objects, i.e., list of key-value pairs.
Parameters
----------
attributes : dict(Attribute)
Dictionary of attribute instances
Returns
-------
list(dict(name:..., value:...))
List of key-value pairs.
"""
result = []
for key in attributes:
result.append({'name': key, 'value': attributes[key].value}) # depends on [control=['for'], data=['key']]
return result |
def partitionBy(*cols):
"""
Creates a :class:`WindowSpec` with the partitioning defined.
"""
sc = SparkContext._active_spark_context
jspec = sc._jvm.org.apache.spark.sql.expressions.Window.partitionBy(_to_java_cols(cols))
return WindowSpec(jspec) | def function[partitionBy, parameter[]]:
constant[
Creates a :class:`WindowSpec` with the partitioning defined.
]
variable[sc] assign[=] name[SparkContext]._active_spark_context
variable[jspec] assign[=] call[name[sc]._jvm.org.apache.spark.sql.expressions.Window.partitionBy, parameter[call[name[_to_java_cols], parameter[name[cols]]]]]
return[call[name[WindowSpec], parameter[name[jspec]]]] | keyword[def] identifier[partitionBy] (* identifier[cols] ):
literal[string]
identifier[sc] = identifier[SparkContext] . identifier[_active_spark_context]
identifier[jspec] = identifier[sc] . identifier[_jvm] . identifier[org] . identifier[apache] . identifier[spark] . identifier[sql] . identifier[expressions] . identifier[Window] . identifier[partitionBy] ( identifier[_to_java_cols] ( identifier[cols] ))
keyword[return] identifier[WindowSpec] ( identifier[jspec] ) | def partitionBy(*cols):
"""
Creates a :class:`WindowSpec` with the partitioning defined.
"""
sc = SparkContext._active_spark_context
jspec = sc._jvm.org.apache.spark.sql.expressions.Window.partitionBy(_to_java_cols(cols))
return WindowSpec(jspec) |
def _clear_policy(self, lambda_name):
"""
Remove obsolete policy statements to prevent policy from bloating over the limit after repeated updates.
"""
try:
policy_response = self.lambda_client.get_policy(
FunctionName=lambda_name
)
if policy_response['ResponseMetadata']['HTTPStatusCode'] == 200:
statement = json.loads(policy_response['Policy'])['Statement']
for s in statement:
delete_response = self.lambda_client.remove_permission(
FunctionName=lambda_name,
StatementId=s['Sid']
)
if delete_response['ResponseMetadata']['HTTPStatusCode'] != 204:
logger.error('Failed to delete an obsolete policy statement: {}'.format(policy_response))
else:
logger.debug('Failed to load Lambda function policy: {}'.format(policy_response))
except ClientError as e:
if e.args[0].find('ResourceNotFoundException') > -1:
logger.debug('No policy found, must be first run.')
else:
logger.error('Unexpected client error {}'.format(e.args[0])) | def function[_clear_policy, parameter[self, lambda_name]]:
constant[
Remove obsolete policy statements to prevent policy from bloating over the limit after repeated updates.
]
<ast.Try object at 0x7da1b1f727a0> | keyword[def] identifier[_clear_policy] ( identifier[self] , identifier[lambda_name] ):
literal[string]
keyword[try] :
identifier[policy_response] = identifier[self] . identifier[lambda_client] . identifier[get_policy] (
identifier[FunctionName] = identifier[lambda_name]
)
keyword[if] identifier[policy_response] [ literal[string] ][ literal[string] ]== literal[int] :
identifier[statement] = identifier[json] . identifier[loads] ( identifier[policy_response] [ literal[string] ])[ literal[string] ]
keyword[for] identifier[s] keyword[in] identifier[statement] :
identifier[delete_response] = identifier[self] . identifier[lambda_client] . identifier[remove_permission] (
identifier[FunctionName] = identifier[lambda_name] ,
identifier[StatementId] = identifier[s] [ literal[string] ]
)
keyword[if] identifier[delete_response] [ literal[string] ][ literal[string] ]!= literal[int] :
identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[policy_response] ))
keyword[else] :
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[policy_response] ))
keyword[except] identifier[ClientError] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[args] [ literal[int] ]. identifier[find] ( literal[string] )>- literal[int] :
identifier[logger] . identifier[debug] ( literal[string] )
keyword[else] :
identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[e] . identifier[args] [ literal[int] ])) | def _clear_policy(self, lambda_name):
"""
Remove obsolete policy statements to prevent policy from bloating over the limit after repeated updates.
"""
try:
policy_response = self.lambda_client.get_policy(FunctionName=lambda_name)
if policy_response['ResponseMetadata']['HTTPStatusCode'] == 200:
statement = json.loads(policy_response['Policy'])['Statement']
for s in statement:
delete_response = self.lambda_client.remove_permission(FunctionName=lambda_name, StatementId=s['Sid'])
if delete_response['ResponseMetadata']['HTTPStatusCode'] != 204:
logger.error('Failed to delete an obsolete policy statement: {}'.format(policy_response)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['s']] # depends on [control=['if'], data=[]]
else:
logger.debug('Failed to load Lambda function policy: {}'.format(policy_response)) # depends on [control=['try'], data=[]]
except ClientError as e:
if e.args[0].find('ResourceNotFoundException') > -1:
logger.debug('No policy found, must be first run.') # depends on [control=['if'], data=[]]
else:
logger.error('Unexpected client error {}'.format(e.args[0])) # depends on [control=['except'], data=['e']] |
def cpu_throttling(self, cpu_throttling):
"""
Sets the percentage of CPU allowed.
:param cpu_throttling: integer
"""
log.info('QEMU VM "{name}" [{id}] has set the percentage of CPU allowed to {cpu}'.format(name=self._name,
id=self._id,
cpu=cpu_throttling))
self._cpu_throttling = cpu_throttling
self._stop_cpulimit()
if cpu_throttling:
self._set_cpu_throttling() | def function[cpu_throttling, parameter[self, cpu_throttling]]:
constant[
Sets the percentage of CPU allowed.
:param cpu_throttling: integer
]
call[name[log].info, parameter[call[constant[QEMU VM "{name}" [{id}] has set the percentage of CPU allowed to {cpu}].format, parameter[]]]]
name[self]._cpu_throttling assign[=] name[cpu_throttling]
call[name[self]._stop_cpulimit, parameter[]]
if name[cpu_throttling] begin[:]
call[name[self]._set_cpu_throttling, parameter[]] | keyword[def] identifier[cpu_throttling] ( identifier[self] , identifier[cpu_throttling] ):
literal[string]
identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[name] = identifier[self] . identifier[_name] ,
identifier[id] = identifier[self] . identifier[_id] ,
identifier[cpu] = identifier[cpu_throttling] ))
identifier[self] . identifier[_cpu_throttling] = identifier[cpu_throttling]
identifier[self] . identifier[_stop_cpulimit] ()
keyword[if] identifier[cpu_throttling] :
identifier[self] . identifier[_set_cpu_throttling] () | def cpu_throttling(self, cpu_throttling):
"""
Sets the percentage of CPU allowed.
:param cpu_throttling: integer
"""
log.info('QEMU VM "{name}" [{id}] has set the percentage of CPU allowed to {cpu}'.format(name=self._name, id=self._id, cpu=cpu_throttling))
self._cpu_throttling = cpu_throttling
self._stop_cpulimit()
if cpu_throttling:
self._set_cpu_throttling() # depends on [control=['if'], data=[]] |
def write_last_and_beds(pf, GenePositions, ContigStarts):
"""
Write LAST file, query and subject BED files.
"""
qbedfile = pf + "tigs.bed"
sbedfile = pf + "chr.bed"
lastfile = "{}tigs.{}chr.last".format(pf, pf)
qbedfw = open(qbedfile, "w")
sbedfw = open(sbedfile, "w")
lastfw = open(lastfile, "w")
GeneContigs = np.searchsorted(ContigStarts, GenePositions) - 1
for i, (c, gstart) in enumerate(zip(GeneContigs, GenePositions)):
gene = "gene{:05d}".format(i)
tig = "tig{:04d}".format(c)
start = ContigStarts[c]
cstart = gstart - start
print("\t".join(str(x) for x in
(tig, cstart, cstart + 1, gene)), file=qbedfw)
print("\t".join(str(x) for x in
("chr1", gstart, gstart + 1, gene)), file=sbedfw)
lastatoms = [gene, gene, 100] + [0] * 8 + [100]
print("\t".join(str(x) for x in lastatoms), file=lastfw)
qbedfw.close()
sbedfw.close()
lastfw.close() | def function[write_last_and_beds, parameter[pf, GenePositions, ContigStarts]]:
constant[
Write LAST file, query and subject BED files.
]
variable[qbedfile] assign[=] binary_operation[name[pf] + constant[tigs.bed]]
variable[sbedfile] assign[=] binary_operation[name[pf] + constant[chr.bed]]
variable[lastfile] assign[=] call[constant[{}tigs.{}chr.last].format, parameter[name[pf], name[pf]]]
variable[qbedfw] assign[=] call[name[open], parameter[name[qbedfile], constant[w]]]
variable[sbedfw] assign[=] call[name[open], parameter[name[sbedfile], constant[w]]]
variable[lastfw] assign[=] call[name[open], parameter[name[lastfile], constant[w]]]
variable[GeneContigs] assign[=] binary_operation[call[name[np].searchsorted, parameter[name[ContigStarts], name[GenePositions]]] - constant[1]]
for taget[tuple[[<ast.Name object at 0x7da1b09001f0>, <ast.Tuple object at 0x7da1b0902050>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[name[GeneContigs], name[GenePositions]]]]]] begin[:]
variable[gene] assign[=] call[constant[gene{:05d}].format, parameter[name[i]]]
variable[tig] assign[=] call[constant[tig{:04d}].format, parameter[name[c]]]
variable[start] assign[=] call[name[ContigStarts]][name[c]]
variable[cstart] assign[=] binary_operation[name[gstart] - name[start]]
call[name[print], parameter[call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da1b09011b0>]]]]
call[name[print], parameter[call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da1b0901240>]]]]
variable[lastatoms] assign[=] binary_operation[binary_operation[list[[<ast.Name object at 0x7da1b0927700>, <ast.Name object at 0x7da1b0926fe0>, <ast.Constant object at 0x7da1b0927550>]] + binary_operation[list[[<ast.Constant object at 0x7da1b0927d60>]] * constant[8]]] + list[[<ast.Constant object at 0x7da1b09271f0>]]]
call[name[print], parameter[call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da1b0927340>]]]]
call[name[qbedfw].close, parameter[]]
call[name[sbedfw].close, parameter[]]
call[name[lastfw].close, parameter[]] | keyword[def] identifier[write_last_and_beds] ( identifier[pf] , identifier[GenePositions] , identifier[ContigStarts] ):
literal[string]
identifier[qbedfile] = identifier[pf] + literal[string]
identifier[sbedfile] = identifier[pf] + literal[string]
identifier[lastfile] = literal[string] . identifier[format] ( identifier[pf] , identifier[pf] )
identifier[qbedfw] = identifier[open] ( identifier[qbedfile] , literal[string] )
identifier[sbedfw] = identifier[open] ( identifier[sbedfile] , literal[string] )
identifier[lastfw] = identifier[open] ( identifier[lastfile] , literal[string] )
identifier[GeneContigs] = identifier[np] . identifier[searchsorted] ( identifier[ContigStarts] , identifier[GenePositions] )- literal[int]
keyword[for] identifier[i] ,( identifier[c] , identifier[gstart] ) keyword[in] identifier[enumerate] ( identifier[zip] ( identifier[GeneContigs] , identifier[GenePositions] )):
identifier[gene] = literal[string] . identifier[format] ( identifier[i] )
identifier[tig] = literal[string] . identifier[format] ( identifier[c] )
identifier[start] = identifier[ContigStarts] [ identifier[c] ]
identifier[cstart] = identifier[gstart] - identifier[start]
identifier[print] ( literal[string] . identifier[join] ( identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in]
( identifier[tig] , identifier[cstart] , identifier[cstart] + literal[int] , identifier[gene] )), identifier[file] = identifier[qbedfw] )
identifier[print] ( literal[string] . identifier[join] ( identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in]
( literal[string] , identifier[gstart] , identifier[gstart] + literal[int] , identifier[gene] )), identifier[file] = identifier[sbedfw] )
identifier[lastatoms] =[ identifier[gene] , identifier[gene] , literal[int] ]+[ literal[int] ]* literal[int] +[ literal[int] ]
identifier[print] ( literal[string] . identifier[join] ( identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[lastatoms] ), identifier[file] = identifier[lastfw] )
identifier[qbedfw] . identifier[close] ()
identifier[sbedfw] . identifier[close] ()
identifier[lastfw] . identifier[close] () | def write_last_and_beds(pf, GenePositions, ContigStarts):
"""
Write LAST file, query and subject BED files.
"""
qbedfile = pf + 'tigs.bed'
sbedfile = pf + 'chr.bed'
lastfile = '{}tigs.{}chr.last'.format(pf, pf)
qbedfw = open(qbedfile, 'w')
sbedfw = open(sbedfile, 'w')
lastfw = open(lastfile, 'w')
GeneContigs = np.searchsorted(ContigStarts, GenePositions) - 1
for (i, (c, gstart)) in enumerate(zip(GeneContigs, GenePositions)):
gene = 'gene{:05d}'.format(i)
tig = 'tig{:04d}'.format(c)
start = ContigStarts[c]
cstart = gstart - start
print('\t'.join((str(x) for x in (tig, cstart, cstart + 1, gene))), file=qbedfw)
print('\t'.join((str(x) for x in ('chr1', gstart, gstart + 1, gene))), file=sbedfw)
lastatoms = [gene, gene, 100] + [0] * 8 + [100]
print('\t'.join((str(x) for x in lastatoms)), file=lastfw) # depends on [control=['for'], data=[]]
qbedfw.close()
sbedfw.close()
lastfw.close() |
def get_bank(self, *args, **kwargs):
"""Pass through to provider BankLookupSession.get_bank"""
# Implemented from kitosid template for -
# osid.resource.BinLookupSession.get_bin
return Bank(
self._provider_manager,
self._get_provider_session('bank_lookup_session').get_bank(*args, **kwargs),
self._runtime,
self._proxy) | def function[get_bank, parameter[self]]:
constant[Pass through to provider BankLookupSession.get_bank]
return[call[name[Bank], parameter[name[self]._provider_manager, call[call[name[self]._get_provider_session, parameter[constant[bank_lookup_session]]].get_bank, parameter[<ast.Starred object at 0x7da18f58e830>]], name[self]._runtime, name[self]._proxy]]] | keyword[def] identifier[get_bank] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[Bank] (
identifier[self] . identifier[_provider_manager] ,
identifier[self] . identifier[_get_provider_session] ( literal[string] ). identifier[get_bank] (* identifier[args] ,** identifier[kwargs] ),
identifier[self] . identifier[_runtime] ,
identifier[self] . identifier[_proxy] ) | def get_bank(self, *args, **kwargs):
"""Pass through to provider BankLookupSession.get_bank"""
# Implemented from kitosid template for -
# osid.resource.BinLookupSession.get_bin
return Bank(self._provider_manager, self._get_provider_session('bank_lookup_session').get_bank(*args, **kwargs), self._runtime, self._proxy) |
def slices_to_layers(G_coupling,
slice_attr='slice',
vertex_id_attr='id',
edge_type_attr='type',
weight_attr='weight'):
""" Convert a coupling graph of slices to layers of graphs.
This function converts a graph of slices to layers so that they can be used
with this package. This function assumes that the slices are represented by
nodes in ``G_coupling``, and stored in the attribute ``slice_attr``. In other
words, ``G_coupling.vs[slice_attr]`` should contain :class:`ig.Graph` s . The
slices will be converted to layers, and nodes in different slices will be
coupled if the two slices are connected in ``G_coupling``. Nodes in two
connected slices are identified on the basis of the ``vertex_id_attr``, i.e.
if two nodes in two connected slices have an identical value of the
``vertex_id_attr`` they will be coupled. The ``vertex_id_attr`` should hence
be unique in each slice. The weight of the coupling is determined by the
weight of this link in ``G_coupling``, as determined by the ``weight_attr``.
Parameters
----------
G_coupling : :class:`ig.Graph`
The graph connecting the different slices.
slice_attr : string
The vertex attribute which contains the slices.
edge_type_attr : string
The edge attribute to use for indicating the type of link (``interslice``
or ``intraslice``).
weight_attr : string
The edge attribute used to indicate the (coupling) weight.
Returns
-------
G_layers : list of :class:`ig.Graph`
A list of slices converted to layers.
G_interslice : :class:`ig.Graph`
The interslice coupling layer.
G : :class:`ig.Graph`
The complete graph containing all layers and interslice couplings.
Notes
-----
The distinction between slices and layers is not easy to grasp. Slices in
this context refer to graphs that somehow represents different aspects of a
network. The simplest example is probably slices that represents time: there
are different snapshots network across time, and each snapshot is considered
a slice. Some nodes may drop out of the network over time, while others enter
the network. Edges may change over time, or the weight of the links may
change over time. This is just the simplest example of a slice, and there may
be different, more complex possibilities. Below an example with three time
slices:
.. image:: figures/slices.png
Now in order to optimise partitions across these different slices, we
represent them slightly differently, namely as layers. The idea of layers is
that all graphs always are defined on the same set of nodes, and that only
the links differ for different layers. We thus create new nodes as
combinations of original nodes and slices. For example, if node 1 existed in
both slice 1 and in slice 2, we will thus create two nodes to build the
layers: a node 1-1 and a node 1-2. Additionally, if the slices are connected
in the slice graph, the two nodes would also be connected, so there would be
a linke between node 1-1 and 1-2. Different slices will then correspond to
different layers: each layer only contains the link for that particular
slice. In addition, for methods such as :class:`CPMVertexPartition`,
so-called ``node_sizes`` are required, and for them to properly function,
they should be set to 0 (which is handled appropriately in this function, and
stored in the vertex attribute ``node_size``). We thus obtain equally many
layers as we have slices, and we need one more layer for representing the
interslice couplings. For the example provided above, we thus obtain the
following:
.. image:: figures/layers_separate.png
The idea of doing community detection with slices is further detailed in [1].
References
----------
.. [1] Mucha, P. J., Richardson, T., Macon, K., Porter, M. A., & Onnela,
J.-P. (2010). Community structure in time-dependent, multiscale, and
multiplex networks. Science, 328(5980), 876-8.
`10.1126/science.1184819 <http://doi.org/10.1126/science.1184819>`_
See Also
--------
:func:`find_partition_temporal`
:func:`time_slices_to_layers`
"""
if not slice_attr in G_coupling.vertex_attributes():
raise ValueError("Could not find the vertex attribute {0} in the coupling graph.".format(slice_attr))
if not weight_attr in G_coupling.edge_attributes():
raise ValueError("Could not find the edge attribute {0} in the coupling graph.".format(weight_attr))
# Create disjoint union of the time graphs
for v_slice in G_coupling.vs:
H = v_slice[slice_attr]
H.vs[slice_attr] = v_slice.index
if not vertex_id_attr in H.vertex_attributes():
raise ValueError("Could not find the vertex attribute {0} to identify nodes in different slices.".format(vertex_id_attr ))
if not weight_attr in H.edge_attributes():
H.es[weight_attr] = 1
G = disjoint_union_attrs(G_coupling.vs[slice_attr])
G.es[edge_type_attr] = 'intraslice'
for v_slice in G_coupling.vs:
for u_slice in v_slice.neighbors(mode=_ig.OUT):
if v_slice.index < u_slice.index or G_coupling.is_directed():
nodes_v = G.vs.select(lambda v: v[slice_attr]==v_slice.index)[vertex_id_attr]
if len(set(nodes_v)) != len(nodes_v):
err = '\n'.join(
['\t{0} {1} times'.format(item, count) for item, count in Counter(nodes_v).items() if count > 1]
)
raise ValueError('No unique IDs for slice {0}, require unique IDs:\n{1}'.format(v_slice.index, err))
nodes_u = G.vs.select(lambda v: v[slice_attr]==u_slice.index)[vertex_id_attr]
if len(set(nodes_u)) != len(nodes_u):
err = '\n'.join(
['\t{0} {1} times'.format(item, count) for item, count in Counter(nodes_u).items() if count > 1]
)
raise ValueError('No unique IDs for slice {0}, require unique IDs:\n{1}'.format(u_slice.index, err))
common_nodes = set(nodes_v).intersection(set(nodes_u))
nodes_v = sorted([v for v in G.vs if v[slice_attr] == v_slice.index and v[vertex_id_attr] in common_nodes], key=lambda v: v[vertex_id_attr])
nodes_u = sorted([v for v in G.vs if v[slice_attr] == u_slice.index and v[vertex_id_attr] in common_nodes], key=lambda v: v[vertex_id_attr])
edges = zip(nodes_v, nodes_u)
e_start = G.ecount()
G.add_edges(edges)
e_end = G.ecount()
e_idx = range(e_start, e_end)
interslice_weight = G_coupling.es[G_coupling.get_eid(v_slice, u_slice)][weight_attr]
if not interslice_weight is None:
G.es[e_idx][weight_attr] = interslice_weight
G.es[e_idx][edge_type_attr] = 'interslice'
# Convert aggregate graph to individual layers for each time slice.
G_layers = [None]*G_coupling.vcount()
for v_slice in G_coupling.vs:
H = G.subgraph_edges(G.es.select(_within=[v.index for v in G.vs if v[slice_attr] == v_slice.index]), delete_vertices=False)
H.vs['node_size'] = [1 if v[slice_attr] == v_slice.index else 0 for v in H.vs]
G_layers[v_slice.index] = H
# Create one graph for the interslice links.
G_interslice = G.subgraph_edges(G.es.select(type_eq='interslice'), delete_vertices=False)
G_interslice.vs['node_size'] = 0
return G_layers, G_interslice, G | def function[slices_to_layers, parameter[G_coupling, slice_attr, vertex_id_attr, edge_type_attr, weight_attr]]:
constant[ Convert a coupling graph of slices to layers of graphs.
This function converts a graph of slices to layers so that they can be used
with this package. This function assumes that the slices are represented by
nodes in ``G_coupling``, and stored in the attribute ``slice_attr``. In other
words, ``G_coupling.vs[slice_attr]`` should contain :class:`ig.Graph` s . The
slices will be converted to layers, and nodes in different slices will be
coupled if the two slices are connected in ``G_coupling``. Nodes in two
connected slices are identified on the basis of the ``vertex_id_attr``, i.e.
if two nodes in two connected slices have an identical value of the
``vertex_id_attr`` they will be coupled. The ``vertex_id_attr`` should hence
be unique in each slice. The weight of the coupling is determined by the
weight of this link in ``G_coupling``, as determined by the ``weight_attr``.
Parameters
----------
G_coupling : :class:`ig.Graph`
The graph connecting the different slices.
slice_attr : string
The vertex attribute which contains the slices.
edge_type_attr : string
The edge attribute to use for indicating the type of link (``interslice``
or ``intraslice``).
weight_attr : string
The edge attribute used to indicate the (coupling) weight.
Returns
-------
G_layers : list of :class:`ig.Graph`
A list of slices converted to layers.
G_interslice : :class:`ig.Graph`
The interslice coupling layer.
G : :class:`ig.Graph`
The complete graph containing all layers and interslice couplings.
Notes
-----
The distinction between slices and layers is not easy to grasp. Slices in
this context refer to graphs that somehow represents different aspects of a
network. The simplest example is probably slices that represents time: there
are different snapshots network across time, and each snapshot is considered
a slice. Some nodes may drop out of the network over time, while others enter
the network. Edges may change over time, or the weight of the links may
change over time. This is just the simplest example of a slice, and there may
be different, more complex possibilities. Below an example with three time
slices:
.. image:: figures/slices.png
Now in order to optimise partitions across these different slices, we
represent them slightly differently, namely as layers. The idea of layers is
that all graphs always are defined on the same set of nodes, and that only
the links differ for different layers. We thus create new nodes as
combinations of original nodes and slices. For example, if node 1 existed in
both slice 1 and in slice 2, we will thus create two nodes to build the
layers: a node 1-1 and a node 1-2. Additionally, if the slices are connected
in the slice graph, the two nodes would also be connected, so there would be
a linke between node 1-1 and 1-2. Different slices will then correspond to
different layers: each layer only contains the link for that particular
slice. In addition, for methods such as :class:`CPMVertexPartition`,
so-called ``node_sizes`` are required, and for them to properly function,
they should be set to 0 (which is handled appropriately in this function, and
stored in the vertex attribute ``node_size``). We thus obtain equally many
layers as we have slices, and we need one more layer for representing the
interslice couplings. For the example provided above, we thus obtain the
following:
.. image:: figures/layers_separate.png
The idea of doing community detection with slices is further detailed in [1].
References
----------
.. [1] Mucha, P. J., Richardson, T., Macon, K., Porter, M. A., & Onnela,
J.-P. (2010). Community structure in time-dependent, multiscale, and
multiplex networks. Science, 328(5980), 876-8.
`10.1126/science.1184819 <http://doi.org/10.1126/science.1184819>`_
See Also
--------
:func:`find_partition_temporal`
:func:`time_slices_to_layers`
]
if <ast.UnaryOp object at 0x7da18f09d420> begin[:]
<ast.Raise object at 0x7da18f09f310>
if <ast.UnaryOp object at 0x7da18f09db40> begin[:]
<ast.Raise object at 0x7da18f09d510>
for taget[name[v_slice]] in starred[name[G_coupling].vs] begin[:]
variable[H] assign[=] call[name[v_slice]][name[slice_attr]]
call[name[H].vs][name[slice_attr]] assign[=] name[v_slice].index
if <ast.UnaryOp object at 0x7da18f09e6b0> begin[:]
<ast.Raise object at 0x7da18f09e4a0>
if <ast.UnaryOp object at 0x7da18f09cee0> begin[:]
call[name[H].es][name[weight_attr]] assign[=] constant[1]
variable[G] assign[=] call[name[disjoint_union_attrs], parameter[call[name[G_coupling].vs][name[slice_attr]]]]
call[name[G].es][name[edge_type_attr]] assign[=] constant[intraslice]
for taget[name[v_slice]] in starred[name[G_coupling].vs] begin[:]
for taget[name[u_slice]] in starred[call[name[v_slice].neighbors, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da18f09f8e0> begin[:]
variable[nodes_v] assign[=] call[call[name[G].vs.select, parameter[<ast.Lambda object at 0x7da18f09f940>]]][name[vertex_id_attr]]
if compare[call[name[len], parameter[call[name[set], parameter[name[nodes_v]]]]] not_equal[!=] call[name[len], parameter[name[nodes_v]]]] begin[:]
variable[err] assign[=] call[constant[
].join, parameter[<ast.ListComp object at 0x7da18f09e740>]]
<ast.Raise object at 0x7da18f09cc70>
variable[nodes_u] assign[=] call[call[name[G].vs.select, parameter[<ast.Lambda object at 0x7da18f09fc40>]]][name[vertex_id_attr]]
if compare[call[name[len], parameter[call[name[set], parameter[name[nodes_u]]]]] not_equal[!=] call[name[len], parameter[name[nodes_u]]]] begin[:]
variable[err] assign[=] call[constant[
].join, parameter[<ast.ListComp object at 0x7da18f09ce20>]]
<ast.Raise object at 0x7da18f09e5f0>
variable[common_nodes] assign[=] call[call[name[set], parameter[name[nodes_v]]].intersection, parameter[call[name[set], parameter[name[nodes_u]]]]]
variable[nodes_v] assign[=] call[name[sorted], parameter[<ast.ListComp object at 0x7da18f09f580>]]
variable[nodes_u] assign[=] call[name[sorted], parameter[<ast.ListComp object at 0x7da1b2346fe0>]]
variable[edges] assign[=] call[name[zip], parameter[name[nodes_v], name[nodes_u]]]
variable[e_start] assign[=] call[name[G].ecount, parameter[]]
call[name[G].add_edges, parameter[name[edges]]]
variable[e_end] assign[=] call[name[G].ecount, parameter[]]
variable[e_idx] assign[=] call[name[range], parameter[name[e_start], name[e_end]]]
variable[interslice_weight] assign[=] call[call[name[G_coupling].es][call[name[G_coupling].get_eid, parameter[name[v_slice], name[u_slice]]]]][name[weight_attr]]
if <ast.UnaryOp object at 0x7da1b2345090> begin[:]
call[call[name[G].es][name[e_idx]]][name[weight_attr]] assign[=] name[interslice_weight]
call[call[name[G].es][name[e_idx]]][name[edge_type_attr]] assign[=] constant[interslice]
variable[G_layers] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b2346bc0>]] * call[name[G_coupling].vcount, parameter[]]]
for taget[name[v_slice]] in starred[name[G_coupling].vs] begin[:]
variable[H] assign[=] call[name[G].subgraph_edges, parameter[call[name[G].es.select, parameter[]]]]
call[name[H].vs][constant[node_size]] assign[=] <ast.ListComp object at 0x7da1b23448b0>
call[name[G_layers]][name[v_slice].index] assign[=] name[H]
variable[G_interslice] assign[=] call[name[G].subgraph_edges, parameter[call[name[G].es.select, parameter[]]]]
call[name[G_interslice].vs][constant[node_size]] assign[=] constant[0]
return[tuple[[<ast.Name object at 0x7da18eb576d0>, <ast.Name object at 0x7da18eb570a0>, <ast.Name object at 0x7da18eb55d50>]]] | keyword[def] identifier[slices_to_layers] ( identifier[G_coupling] ,
identifier[slice_attr] = literal[string] ,
identifier[vertex_id_attr] = literal[string] ,
identifier[edge_type_attr] = literal[string] ,
identifier[weight_attr] = literal[string] ):
literal[string]
keyword[if] keyword[not] identifier[slice_attr] keyword[in] identifier[G_coupling] . identifier[vertex_attributes] ():
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[slice_attr] ))
keyword[if] keyword[not] identifier[weight_attr] keyword[in] identifier[G_coupling] . identifier[edge_attributes] ():
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[weight_attr] ))
keyword[for] identifier[v_slice] keyword[in] identifier[G_coupling] . identifier[vs] :
identifier[H] = identifier[v_slice] [ identifier[slice_attr] ]
identifier[H] . identifier[vs] [ identifier[slice_attr] ]= identifier[v_slice] . identifier[index]
keyword[if] keyword[not] identifier[vertex_id_attr] keyword[in] identifier[H] . identifier[vertex_attributes] ():
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[vertex_id_attr] ))
keyword[if] keyword[not] identifier[weight_attr] keyword[in] identifier[H] . identifier[edge_attributes] ():
identifier[H] . identifier[es] [ identifier[weight_attr] ]= literal[int]
identifier[G] = identifier[disjoint_union_attrs] ( identifier[G_coupling] . identifier[vs] [ identifier[slice_attr] ])
identifier[G] . identifier[es] [ identifier[edge_type_attr] ]= literal[string]
keyword[for] identifier[v_slice] keyword[in] identifier[G_coupling] . identifier[vs] :
keyword[for] identifier[u_slice] keyword[in] identifier[v_slice] . identifier[neighbors] ( identifier[mode] = identifier[_ig] . identifier[OUT] ):
keyword[if] identifier[v_slice] . identifier[index] < identifier[u_slice] . identifier[index] keyword[or] identifier[G_coupling] . identifier[is_directed] ():
identifier[nodes_v] = identifier[G] . identifier[vs] . identifier[select] ( keyword[lambda] identifier[v] : identifier[v] [ identifier[slice_attr] ]== identifier[v_slice] . identifier[index] )[ identifier[vertex_id_attr] ]
keyword[if] identifier[len] ( identifier[set] ( identifier[nodes_v] ))!= identifier[len] ( identifier[nodes_v] ):
identifier[err] = literal[string] . identifier[join] (
[ literal[string] . identifier[format] ( identifier[item] , identifier[count] ) keyword[for] identifier[item] , identifier[count] keyword[in] identifier[Counter] ( identifier[nodes_v] ). identifier[items] () keyword[if] identifier[count] > literal[int] ]
)
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[v_slice] . identifier[index] , identifier[err] ))
identifier[nodes_u] = identifier[G] . identifier[vs] . identifier[select] ( keyword[lambda] identifier[v] : identifier[v] [ identifier[slice_attr] ]== identifier[u_slice] . identifier[index] )[ identifier[vertex_id_attr] ]
keyword[if] identifier[len] ( identifier[set] ( identifier[nodes_u] ))!= identifier[len] ( identifier[nodes_u] ):
identifier[err] = literal[string] . identifier[join] (
[ literal[string] . identifier[format] ( identifier[item] , identifier[count] ) keyword[for] identifier[item] , identifier[count] keyword[in] identifier[Counter] ( identifier[nodes_u] ). identifier[items] () keyword[if] identifier[count] > literal[int] ]
)
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[u_slice] . identifier[index] , identifier[err] ))
identifier[common_nodes] = identifier[set] ( identifier[nodes_v] ). identifier[intersection] ( identifier[set] ( identifier[nodes_u] ))
identifier[nodes_v] = identifier[sorted] ([ identifier[v] keyword[for] identifier[v] keyword[in] identifier[G] . identifier[vs] keyword[if] identifier[v] [ identifier[slice_attr] ]== identifier[v_slice] . identifier[index] keyword[and] identifier[v] [ identifier[vertex_id_attr] ] keyword[in] identifier[common_nodes] ], identifier[key] = keyword[lambda] identifier[v] : identifier[v] [ identifier[vertex_id_attr] ])
identifier[nodes_u] = identifier[sorted] ([ identifier[v] keyword[for] identifier[v] keyword[in] identifier[G] . identifier[vs] keyword[if] identifier[v] [ identifier[slice_attr] ]== identifier[u_slice] . identifier[index] keyword[and] identifier[v] [ identifier[vertex_id_attr] ] keyword[in] identifier[common_nodes] ], identifier[key] = keyword[lambda] identifier[v] : identifier[v] [ identifier[vertex_id_attr] ])
identifier[edges] = identifier[zip] ( identifier[nodes_v] , identifier[nodes_u] )
identifier[e_start] = identifier[G] . identifier[ecount] ()
identifier[G] . identifier[add_edges] ( identifier[edges] )
identifier[e_end] = identifier[G] . identifier[ecount] ()
identifier[e_idx] = identifier[range] ( identifier[e_start] , identifier[e_end] )
identifier[interslice_weight] = identifier[G_coupling] . identifier[es] [ identifier[G_coupling] . identifier[get_eid] ( identifier[v_slice] , identifier[u_slice] )][ identifier[weight_attr] ]
keyword[if] keyword[not] identifier[interslice_weight] keyword[is] keyword[None] :
identifier[G] . identifier[es] [ identifier[e_idx] ][ identifier[weight_attr] ]= identifier[interslice_weight]
identifier[G] . identifier[es] [ identifier[e_idx] ][ identifier[edge_type_attr] ]= literal[string]
identifier[G_layers] =[ keyword[None] ]* identifier[G_coupling] . identifier[vcount] ()
keyword[for] identifier[v_slice] keyword[in] identifier[G_coupling] . identifier[vs] :
identifier[H] = identifier[G] . identifier[subgraph_edges] ( identifier[G] . identifier[es] . identifier[select] ( identifier[_within] =[ identifier[v] . identifier[index] keyword[for] identifier[v] keyword[in] identifier[G] . identifier[vs] keyword[if] identifier[v] [ identifier[slice_attr] ]== identifier[v_slice] . identifier[index] ]), identifier[delete_vertices] = keyword[False] )
identifier[H] . identifier[vs] [ literal[string] ]=[ literal[int] keyword[if] identifier[v] [ identifier[slice_attr] ]== identifier[v_slice] . identifier[index] keyword[else] literal[int] keyword[for] identifier[v] keyword[in] identifier[H] . identifier[vs] ]
identifier[G_layers] [ identifier[v_slice] . identifier[index] ]= identifier[H]
identifier[G_interslice] = identifier[G] . identifier[subgraph_edges] ( identifier[G] . identifier[es] . identifier[select] ( identifier[type_eq] = literal[string] ), identifier[delete_vertices] = keyword[False] )
identifier[G_interslice] . identifier[vs] [ literal[string] ]= literal[int]
keyword[return] identifier[G_layers] , identifier[G_interslice] , identifier[G] | def slices_to_layers(G_coupling, slice_attr='slice', vertex_id_attr='id', edge_type_attr='type', weight_attr='weight'):
""" Convert a coupling graph of slices to layers of graphs.
This function converts a graph of slices to layers so that they can be used
with this package. This function assumes that the slices are represented by
nodes in ``G_coupling``, and stored in the attribute ``slice_attr``. In other
words, ``G_coupling.vs[slice_attr]`` should contain :class:`ig.Graph` s . The
slices will be converted to layers, and nodes in different slices will be
coupled if the two slices are connected in ``G_coupling``. Nodes in two
connected slices are identified on the basis of the ``vertex_id_attr``, i.e.
if two nodes in two connected slices have an identical value of the
``vertex_id_attr`` they will be coupled. The ``vertex_id_attr`` should hence
be unique in each slice. The weight of the coupling is determined by the
weight of this link in ``G_coupling``, as determined by the ``weight_attr``.
Parameters
----------
G_coupling : :class:`ig.Graph`
The graph connecting the different slices.
slice_attr : string
The vertex attribute which contains the slices.
edge_type_attr : string
The edge attribute to use for indicating the type of link (``interslice``
or ``intraslice``).
weight_attr : string
The edge attribute used to indicate the (coupling) weight.
Returns
-------
G_layers : list of :class:`ig.Graph`
A list of slices converted to layers.
G_interslice : :class:`ig.Graph`
The interslice coupling layer.
G : :class:`ig.Graph`
The complete graph containing all layers and interslice couplings.
Notes
-----
The distinction between slices and layers is not easy to grasp. Slices in
this context refer to graphs that somehow represents different aspects of a
network. The simplest example is probably slices that represents time: there
are different snapshots network across time, and each snapshot is considered
a slice. Some nodes may drop out of the network over time, while others enter
the network. Edges may change over time, or the weight of the links may
change over time. This is just the simplest example of a slice, and there may
be different, more complex possibilities. Below an example with three time
slices:
.. image:: figures/slices.png
Now in order to optimise partitions across these different slices, we
represent them slightly differently, namely as layers. The idea of layers is
that all graphs always are defined on the same set of nodes, and that only
the links differ for different layers. We thus create new nodes as
combinations of original nodes and slices. For example, if node 1 existed in
both slice 1 and in slice 2, we will thus create two nodes to build the
layers: a node 1-1 and a node 1-2. Additionally, if the slices are connected
in the slice graph, the two nodes would also be connected, so there would be
a linke between node 1-1 and 1-2. Different slices will then correspond to
different layers: each layer only contains the link for that particular
slice. In addition, for methods such as :class:`CPMVertexPartition`,
so-called ``node_sizes`` are required, and for them to properly function,
they should be set to 0 (which is handled appropriately in this function, and
stored in the vertex attribute ``node_size``). We thus obtain equally many
layers as we have slices, and we need one more layer for representing the
interslice couplings. For the example provided above, we thus obtain the
following:
.. image:: figures/layers_separate.png
The idea of doing community detection with slices is further detailed in [1].
References
----------
.. [1] Mucha, P. J., Richardson, T., Macon, K., Porter, M. A., & Onnela,
J.-P. (2010). Community structure in time-dependent, multiscale, and
multiplex networks. Science, 328(5980), 876-8.
`10.1126/science.1184819 <http://doi.org/10.1126/science.1184819>`_
See Also
--------
:func:`find_partition_temporal`
:func:`time_slices_to_layers`
"""
if not slice_attr in G_coupling.vertex_attributes():
raise ValueError('Could not find the vertex attribute {0} in the coupling graph.'.format(slice_attr)) # depends on [control=['if'], data=[]]
if not weight_attr in G_coupling.edge_attributes():
raise ValueError('Could not find the edge attribute {0} in the coupling graph.'.format(weight_attr)) # depends on [control=['if'], data=[]]
# Create disjoint union of the time graphs
for v_slice in G_coupling.vs:
H = v_slice[slice_attr]
H.vs[slice_attr] = v_slice.index
if not vertex_id_attr in H.vertex_attributes():
raise ValueError('Could not find the vertex attribute {0} to identify nodes in different slices.'.format(vertex_id_attr)) # depends on [control=['if'], data=[]]
if not weight_attr in H.edge_attributes():
H.es[weight_attr] = 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['v_slice']]
G = disjoint_union_attrs(G_coupling.vs[slice_attr])
G.es[edge_type_attr] = 'intraslice'
for v_slice in G_coupling.vs:
for u_slice in v_slice.neighbors(mode=_ig.OUT):
if v_slice.index < u_slice.index or G_coupling.is_directed():
nodes_v = G.vs.select(lambda v: v[slice_attr] == v_slice.index)[vertex_id_attr]
if len(set(nodes_v)) != len(nodes_v):
err = '\n'.join(['\t{0} {1} times'.format(item, count) for (item, count) in Counter(nodes_v).items() if count > 1])
raise ValueError('No unique IDs for slice {0}, require unique IDs:\n{1}'.format(v_slice.index, err)) # depends on [control=['if'], data=[]]
nodes_u = G.vs.select(lambda v: v[slice_attr] == u_slice.index)[vertex_id_attr]
if len(set(nodes_u)) != len(nodes_u):
err = '\n'.join(['\t{0} {1} times'.format(item, count) for (item, count) in Counter(nodes_u).items() if count > 1])
raise ValueError('No unique IDs for slice {0}, require unique IDs:\n{1}'.format(u_slice.index, err)) # depends on [control=['if'], data=[]]
common_nodes = set(nodes_v).intersection(set(nodes_u))
nodes_v = sorted([v for v in G.vs if v[slice_attr] == v_slice.index and v[vertex_id_attr] in common_nodes], key=lambda v: v[vertex_id_attr])
nodes_u = sorted([v for v in G.vs if v[slice_attr] == u_slice.index and v[vertex_id_attr] in common_nodes], key=lambda v: v[vertex_id_attr])
edges = zip(nodes_v, nodes_u)
e_start = G.ecount()
G.add_edges(edges)
e_end = G.ecount()
e_idx = range(e_start, e_end)
interslice_weight = G_coupling.es[G_coupling.get_eid(v_slice, u_slice)][weight_attr]
if not interslice_weight is None:
G.es[e_idx][weight_attr] = interslice_weight # depends on [control=['if'], data=[]]
G.es[e_idx][edge_type_attr] = 'interslice' # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['u_slice']] # depends on [control=['for'], data=['v_slice']]
# Convert aggregate graph to individual layers for each time slice.
G_layers = [None] * G_coupling.vcount()
for v_slice in G_coupling.vs:
H = G.subgraph_edges(G.es.select(_within=[v.index for v in G.vs if v[slice_attr] == v_slice.index]), delete_vertices=False)
H.vs['node_size'] = [1 if v[slice_attr] == v_slice.index else 0 for v in H.vs]
G_layers[v_slice.index] = H # depends on [control=['for'], data=['v_slice']]
# Create one graph for the interslice links.
G_interslice = G.subgraph_edges(G.es.select(type_eq='interslice'), delete_vertices=False)
G_interslice.vs['node_size'] = 0
return (G_layers, G_interslice, G) |
def process_input(self):
"""Called when socket is read-ready"""
try:
pyngus.read_socket_input(self.connection, self.socket)
except Exception as e:
LOG.error("Exception on socket read: %s", str(e))
self.connection.close_input()
self.connection.close()
self.connection.process(time.time()) | def function[process_input, parameter[self]]:
constant[Called when socket is read-ready]
<ast.Try object at 0x7da1b0241db0>
call[name[self].connection.process, parameter[call[name[time].time, parameter[]]]] | keyword[def] identifier[process_input] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[pyngus] . identifier[read_socket_input] ( identifier[self] . identifier[connection] , identifier[self] . identifier[socket] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[LOG] . identifier[error] ( literal[string] , identifier[str] ( identifier[e] ))
identifier[self] . identifier[connection] . identifier[close_input] ()
identifier[self] . identifier[connection] . identifier[close] ()
identifier[self] . identifier[connection] . identifier[process] ( identifier[time] . identifier[time] ()) | def process_input(self):
"""Called when socket is read-ready"""
try:
pyngus.read_socket_input(self.connection, self.socket) # depends on [control=['try'], data=[]]
except Exception as e:
LOG.error('Exception on socket read: %s', str(e))
self.connection.close_input()
self.connection.close() # depends on [control=['except'], data=['e']]
self.connection.process(time.time()) |
def intercept(actions: dict={}):
"""
Decorates a function and handles any exceptions that may rise.
Args:
actions: A dictionary ``<exception type>: <action>``. Available actions\
are :class:`raises` and :class:`returns`.
Returns:
Any value declared using a :class:`returns` action.
Raises:
AnyException: if AnyException is declared together with a
:class:`raises` action.
InterceptorError: if the decorator is called with something different
from a :class:`returns` or :class:`raises` action.
Interceptors can be declared inline to return a value or raise an exception
when the declared exception is risen:
>>> @intercept({
... TypeError: returns('intercepted!')
... })
... def fails(foo):
... if foo:
... raise TypeError('inner exception')
... return 'ok'
>>> fails(False)
'ok'
>>> fails(True)
'intercepted!'
>>> @intercept({
... TypeError: raises(Exception('intercepted!'))
... })
... def fail():
... raise TypeError('inner exception')
>>> fail()
Traceback (most recent call last):
...
Exception: intercepted!
But they can also be declared and then used later on:
>>> intercept0r = intercept({
... TypeError: returns('intercepted!')
... })
>>> @intercept0r
... def fail():
... raise TypeError('raising error')
>>> fail()
'intercepted!'
You can declare also an action that captures the risen exception by passing
a callable to the action. This is useful to create a custom error message:
>>> @intercept({
... TypeError: returns(lambda e: 'intercepted {}'.format(e))
... })
... def fail():
... raise TypeError('inner exception')
>>> fail()
'intercepted inner exception'
Or to convert captured exceptions into custom errors:
>>> class CustomError(Exception):
... pass
>>> @intercept({
... TypeError: raises(lambda e: CustomError(e))
... })
... def fail():
... raise TypeError('inner exception')
>>> fail()
Traceback (most recent call last):
...
intercept.CustomError: inner exception
"""
for action in actions.values():
if type(action) is not returns and type(action) is not raises:
raise InterceptorError('Actions must be declared as `returns` or `raises`')
def decorated(f):
def wrapped(*args, **kargs):
try:
return f(*args, **kargs)
except Exception as e:
if e.__class__ in actions:
return actions[e.__class__](e)
else:
raise
return wrapped
return decorated | def function[intercept, parameter[actions]]:
constant[
Decorates a function and handles any exceptions that may rise.
Args:
actions: A dictionary ``<exception type>: <action>``. Available actions are :class:`raises` and :class:`returns`.
Returns:
Any value declared using a :class:`returns` action.
Raises:
AnyException: if AnyException is declared together with a
:class:`raises` action.
InterceptorError: if the decorator is called with something different
from a :class:`returns` or :class:`raises` action.
Interceptors can be declared inline to return a value or raise an exception
when the declared exception is risen:
>>> @intercept({
... TypeError: returns('intercepted!')
... })
... def fails(foo):
... if foo:
... raise TypeError('inner exception')
... return 'ok'
>>> fails(False)
'ok'
>>> fails(True)
'intercepted!'
>>> @intercept({
... TypeError: raises(Exception('intercepted!'))
... })
... def fail():
... raise TypeError('inner exception')
>>> fail()
Traceback (most recent call last):
...
Exception: intercepted!
But they can also be declared and then used later on:
>>> intercept0r = intercept({
... TypeError: returns('intercepted!')
... })
>>> @intercept0r
... def fail():
... raise TypeError('raising error')
>>> fail()
'intercepted!'
You can declare also an action that captures the risen exception by passing
a callable to the action. This is useful to create a custom error message:
>>> @intercept({
... TypeError: returns(lambda e: 'intercepted {}'.format(e))
... })
... def fail():
... raise TypeError('inner exception')
>>> fail()
'intercepted inner exception'
Or to convert captured exceptions into custom errors:
>>> class CustomError(Exception):
... pass
>>> @intercept({
... TypeError: raises(lambda e: CustomError(e))
... })
... def fail():
... raise TypeError('inner exception')
>>> fail()
Traceback (most recent call last):
...
intercept.CustomError: inner exception
]
for taget[name[action]] in starred[call[name[actions].values, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da20c6c40d0> begin[:]
<ast.Raise object at 0x7da20c6c5a80>
def function[decorated, parameter[f]]:
def function[wrapped, parameter[]]:
<ast.Try object at 0x7da20c6c42e0>
return[name[wrapped]]
return[name[decorated]] | keyword[def] identifier[intercept] ( identifier[actions] : identifier[dict] ={}):
literal[string]
keyword[for] identifier[action] keyword[in] identifier[actions] . identifier[values] ():
keyword[if] identifier[type] ( identifier[action] ) keyword[is] keyword[not] identifier[returns] keyword[and] identifier[type] ( identifier[action] ) keyword[is] keyword[not] identifier[raises] :
keyword[raise] identifier[InterceptorError] ( literal[string] )
keyword[def] identifier[decorated] ( identifier[f] ):
keyword[def] identifier[wrapped] (* identifier[args] ,** identifier[kargs] ):
keyword[try] :
keyword[return] identifier[f] (* identifier[args] ,** identifier[kargs] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[__class__] keyword[in] identifier[actions] :
keyword[return] identifier[actions] [ identifier[e] . identifier[__class__] ]( identifier[e] )
keyword[else] :
keyword[raise]
keyword[return] identifier[wrapped]
keyword[return] identifier[decorated] | def intercept(actions: dict={}):
"""
Decorates a function and handles any exceptions that may rise.
Args:
actions: A dictionary ``<exception type>: <action>``. Available actions are :class:`raises` and :class:`returns`.
Returns:
Any value declared using a :class:`returns` action.
Raises:
AnyException: if AnyException is declared together with a
:class:`raises` action.
InterceptorError: if the decorator is called with something different
from a :class:`returns` or :class:`raises` action.
Interceptors can be declared inline to return a value or raise an exception
when the declared exception is risen:
>>> @intercept({
... TypeError: returns('intercepted!')
... })
... def fails(foo):
... if foo:
... raise TypeError('inner exception')
... return 'ok'
>>> fails(False)
'ok'
>>> fails(True)
'intercepted!'
>>> @intercept({
... TypeError: raises(Exception('intercepted!'))
... })
... def fail():
... raise TypeError('inner exception')
>>> fail()
Traceback (most recent call last):
...
Exception: intercepted!
But they can also be declared and then used later on:
>>> intercept0r = intercept({
... TypeError: returns('intercepted!')
... })
>>> @intercept0r
... def fail():
... raise TypeError('raising error')
>>> fail()
'intercepted!'
You can declare also an action that captures the risen exception by passing
a callable to the action. This is useful to create a custom error message:
>>> @intercept({
... TypeError: returns(lambda e: 'intercepted {}'.format(e))
... })
... def fail():
... raise TypeError('inner exception')
>>> fail()
'intercepted inner exception'
Or to convert captured exceptions into custom errors:
>>> class CustomError(Exception):
... pass
>>> @intercept({
... TypeError: raises(lambda e: CustomError(e))
... })
... def fail():
... raise TypeError('inner exception')
>>> fail()
Traceback (most recent call last):
...
intercept.CustomError: inner exception
"""
for action in actions.values():
if type(action) is not returns and type(action) is not raises:
raise InterceptorError('Actions must be declared as `returns` or `raises`') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['action']]
def decorated(f):
def wrapped(*args, **kargs):
try:
return f(*args, **kargs) # depends on [control=['try'], data=[]]
except Exception as e:
if e.__class__ in actions:
return actions[e.__class__](e) # depends on [control=['if'], data=['actions']]
else:
raise # depends on [control=['except'], data=['e']]
return wrapped
return decorated |
def ensure_s3_bucket(s3_client, bucket_name, bucket_region):
"""Ensure an s3 bucket exists, if it does not then create it.
Args:
s3_client (:class:`botocore.client.Client`): An s3 client used to
verify and create the bucket.
bucket_name (str): The bucket being checked/created.
bucket_region (str, optional): The region to create the bucket in. If
not provided, will be determined by s3_client's region.
"""
try:
s3_client.head_bucket(Bucket=bucket_name)
except botocore.exceptions.ClientError as e:
if e.response['Error']['Message'] == "Not Found":
logger.debug("Creating bucket %s.", bucket_name)
create_args = {"Bucket": bucket_name}
location_constraint = s3_bucket_location_constraint(
bucket_region
)
if location_constraint:
create_args["CreateBucketConfiguration"] = {
"LocationConstraint": location_constraint
}
s3_client.create_bucket(**create_args)
elif e.response['Error']['Message'] == "Forbidden":
logger.exception("Access denied for bucket %s. Did " +
"you remember to use a globally unique name?",
bucket_name)
raise
else:
logger.exception("Error creating bucket %s. Error %s",
bucket_name, e.response)
raise | def function[ensure_s3_bucket, parameter[s3_client, bucket_name, bucket_region]]:
constant[Ensure an s3 bucket exists, if it does not then create it.
Args:
s3_client (:class:`botocore.client.Client`): An s3 client used to
verify and create the bucket.
bucket_name (str): The bucket being checked/created.
bucket_region (str, optional): The region to create the bucket in. If
not provided, will be determined by s3_client's region.
]
<ast.Try object at 0x7da18dc072e0> | keyword[def] identifier[ensure_s3_bucket] ( identifier[s3_client] , identifier[bucket_name] , identifier[bucket_region] ):
literal[string]
keyword[try] :
identifier[s3_client] . identifier[head_bucket] ( identifier[Bucket] = identifier[bucket_name] )
keyword[except] identifier[botocore] . identifier[exceptions] . identifier[ClientError] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[response] [ literal[string] ][ literal[string] ]== literal[string] :
identifier[logger] . identifier[debug] ( literal[string] , identifier[bucket_name] )
identifier[create_args] ={ literal[string] : identifier[bucket_name] }
identifier[location_constraint] = identifier[s3_bucket_location_constraint] (
identifier[bucket_region]
)
keyword[if] identifier[location_constraint] :
identifier[create_args] [ literal[string] ]={
literal[string] : identifier[location_constraint]
}
identifier[s3_client] . identifier[create_bucket] (** identifier[create_args] )
keyword[elif] identifier[e] . identifier[response] [ literal[string] ][ literal[string] ]== literal[string] :
identifier[logger] . identifier[exception] ( literal[string] +
literal[string] ,
identifier[bucket_name] )
keyword[raise]
keyword[else] :
identifier[logger] . identifier[exception] ( literal[string] ,
identifier[bucket_name] , identifier[e] . identifier[response] )
keyword[raise] | def ensure_s3_bucket(s3_client, bucket_name, bucket_region):
"""Ensure an s3 bucket exists, if it does not then create it.
Args:
s3_client (:class:`botocore.client.Client`): An s3 client used to
verify and create the bucket.
bucket_name (str): The bucket being checked/created.
bucket_region (str, optional): The region to create the bucket in. If
not provided, will be determined by s3_client's region.
"""
try:
s3_client.head_bucket(Bucket=bucket_name) # depends on [control=['try'], data=[]]
except botocore.exceptions.ClientError as e:
if e.response['Error']['Message'] == 'Not Found':
logger.debug('Creating bucket %s.', bucket_name)
create_args = {'Bucket': bucket_name}
location_constraint = s3_bucket_location_constraint(bucket_region)
if location_constraint:
create_args['CreateBucketConfiguration'] = {'LocationConstraint': location_constraint} # depends on [control=['if'], data=[]]
s3_client.create_bucket(**create_args) # depends on [control=['if'], data=[]]
elif e.response['Error']['Message'] == 'Forbidden':
logger.exception('Access denied for bucket %s. Did ' + 'you remember to use a globally unique name?', bucket_name)
raise # depends on [control=['if'], data=[]]
else:
logger.exception('Error creating bucket %s. Error %s', bucket_name, e.response)
raise # depends on [control=['except'], data=['e']] |
def is_ready(self, node_id, metadata_priority=True):
"""Check whether a node is ready to send more requests.
In addition to connection-level checks, this method also is used to
block additional requests from being sent during a metadata refresh.
Arguments:
node_id (int): id of the node to check
metadata_priority (bool): Mark node as not-ready if a metadata
refresh is required. Default: True
Returns:
bool: True if the node is ready and metadata is not refreshing
"""
if not self._can_send_request(node_id):
return False
# if we need to update our metadata now declare all requests unready to
# make metadata requests first priority
if metadata_priority:
if self._metadata_refresh_in_progress:
return False
if self.cluster.ttl() == 0:
return False
return True | def function[is_ready, parameter[self, node_id, metadata_priority]]:
constant[Check whether a node is ready to send more requests.
In addition to connection-level checks, this method also is used to
block additional requests from being sent during a metadata refresh.
Arguments:
node_id (int): id of the node to check
metadata_priority (bool): Mark node as not-ready if a metadata
refresh is required. Default: True
Returns:
bool: True if the node is ready and metadata is not refreshing
]
if <ast.UnaryOp object at 0x7da1b1c70940> begin[:]
return[constant[False]]
if name[metadata_priority] begin[:]
if name[self]._metadata_refresh_in_progress begin[:]
return[constant[False]]
if compare[call[name[self].cluster.ttl, parameter[]] equal[==] constant[0]] begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[is_ready] ( identifier[self] , identifier[node_id] , identifier[metadata_priority] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_can_send_request] ( identifier[node_id] ):
keyword[return] keyword[False]
keyword[if] identifier[metadata_priority] :
keyword[if] identifier[self] . identifier[_metadata_refresh_in_progress] :
keyword[return] keyword[False]
keyword[if] identifier[self] . identifier[cluster] . identifier[ttl] ()== literal[int] :
keyword[return] keyword[False]
keyword[return] keyword[True] | def is_ready(self, node_id, metadata_priority=True):
"""Check whether a node is ready to send more requests.
In addition to connection-level checks, this method also is used to
block additional requests from being sent during a metadata refresh.
Arguments:
node_id (int): id of the node to check
metadata_priority (bool): Mark node as not-ready if a metadata
refresh is required. Default: True
Returns:
bool: True if the node is ready and metadata is not refreshing
"""
if not self._can_send_request(node_id):
return False # depends on [control=['if'], data=[]]
# if we need to update our metadata now declare all requests unready to
# make metadata requests first priority
if metadata_priority:
if self._metadata_refresh_in_progress:
return False # depends on [control=['if'], data=[]]
if self.cluster.ttl() == 0:
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return True |
def register_new_edge(edge_id, first_char_index, last_char_index, source_node_id, dest_node_id):
"""Factory method, registers new edge.
"""
event = Edge.Created(
originator_id=edge_id,
first_char_index=first_char_index,
last_char_index=last_char_index,
source_node_id=source_node_id,
dest_node_id=dest_node_id,
)
entity = Edge.mutate(event=event)
publish(event)
return entity | def function[register_new_edge, parameter[edge_id, first_char_index, last_char_index, source_node_id, dest_node_id]]:
constant[Factory method, registers new edge.
]
variable[event] assign[=] call[name[Edge].Created, parameter[]]
variable[entity] assign[=] call[name[Edge].mutate, parameter[]]
call[name[publish], parameter[name[event]]]
return[name[entity]] | keyword[def] identifier[register_new_edge] ( identifier[edge_id] , identifier[first_char_index] , identifier[last_char_index] , identifier[source_node_id] , identifier[dest_node_id] ):
literal[string]
identifier[event] = identifier[Edge] . identifier[Created] (
identifier[originator_id] = identifier[edge_id] ,
identifier[first_char_index] = identifier[first_char_index] ,
identifier[last_char_index] = identifier[last_char_index] ,
identifier[source_node_id] = identifier[source_node_id] ,
identifier[dest_node_id] = identifier[dest_node_id] ,
)
identifier[entity] = identifier[Edge] . identifier[mutate] ( identifier[event] = identifier[event] )
identifier[publish] ( identifier[event] )
keyword[return] identifier[entity] | def register_new_edge(edge_id, first_char_index, last_char_index, source_node_id, dest_node_id):
"""Factory method, registers new edge.
"""
event = Edge.Created(originator_id=edge_id, first_char_index=first_char_index, last_char_index=last_char_index, source_node_id=source_node_id, dest_node_id=dest_node_id)
entity = Edge.mutate(event=event)
publish(event)
return entity |
def mine_block(self, *args: Any, **kwargs: Any) -> BaseBlock:
"""
Mines the current block. Proxies to the current Virtual Machine.
See VM. :meth:`~eth.vm.base.VM.mine_block`
"""
mined_block = self.get_vm(self.header).mine_block(*args, **kwargs)
self.validate_block(mined_block)
self.chaindb.persist_block(mined_block)
self.header = self.create_header_from_parent(mined_block.header)
return mined_block | def function[mine_block, parameter[self]]:
constant[
Mines the current block. Proxies to the current Virtual Machine.
See VM. :meth:`~eth.vm.base.VM.mine_block`
]
variable[mined_block] assign[=] call[call[name[self].get_vm, parameter[name[self].header]].mine_block, parameter[<ast.Starred object at 0x7da1b1600fd0>]]
call[name[self].validate_block, parameter[name[mined_block]]]
call[name[self].chaindb.persist_block, parameter[name[mined_block]]]
name[self].header assign[=] call[name[self].create_header_from_parent, parameter[name[mined_block].header]]
return[name[mined_block]] | keyword[def] identifier[mine_block] ( identifier[self] ,* identifier[args] : identifier[Any] ,** identifier[kwargs] : identifier[Any] )-> identifier[BaseBlock] :
literal[string]
identifier[mined_block] = identifier[self] . identifier[get_vm] ( identifier[self] . identifier[header] ). identifier[mine_block] (* identifier[args] ,** identifier[kwargs] )
identifier[self] . identifier[validate_block] ( identifier[mined_block] )
identifier[self] . identifier[chaindb] . identifier[persist_block] ( identifier[mined_block] )
identifier[self] . identifier[header] = identifier[self] . identifier[create_header_from_parent] ( identifier[mined_block] . identifier[header] )
keyword[return] identifier[mined_block] | def mine_block(self, *args: Any, **kwargs: Any) -> BaseBlock:
"""
Mines the current block. Proxies to the current Virtual Machine.
See VM. :meth:`~eth.vm.base.VM.mine_block`
"""
mined_block = self.get_vm(self.header).mine_block(*args, **kwargs)
self.validate_block(mined_block)
self.chaindb.persist_block(mined_block)
self.header = self.create_header_from_parent(mined_block.header)
return mined_block |
def getAllExports(self, plugin_list=None):
"""
Return all the stats to be exported (list).
Default behavor is to export all the stat
if plugin_list is provided, only export stats of given plugin (list)
"""
if plugin_list is None:
# All plugins should be exported
plugin_list = self._plugins
return [self._plugins[p].get_export() for p in self._plugins] | def function[getAllExports, parameter[self, plugin_list]]:
constant[
Return all the stats to be exported (list).
Default behavor is to export all the stat
if plugin_list is provided, only export stats of given plugin (list)
]
if compare[name[plugin_list] is constant[None]] begin[:]
variable[plugin_list] assign[=] name[self]._plugins
return[<ast.ListComp object at 0x7da1b21e13c0>] | keyword[def] identifier[getAllExports] ( identifier[self] , identifier[plugin_list] = keyword[None] ):
literal[string]
keyword[if] identifier[plugin_list] keyword[is] keyword[None] :
identifier[plugin_list] = identifier[self] . identifier[_plugins]
keyword[return] [ identifier[self] . identifier[_plugins] [ identifier[p] ]. identifier[get_export] () keyword[for] identifier[p] keyword[in] identifier[self] . identifier[_plugins] ] | def getAllExports(self, plugin_list=None):
"""
Return all the stats to be exported (list).
Default behavor is to export all the stat
if plugin_list is provided, only export stats of given plugin (list)
"""
if plugin_list is None:
# All plugins should be exported
plugin_list = self._plugins # depends on [control=['if'], data=['plugin_list']]
return [self._plugins[p].get_export() for p in self._plugins] |
def _get_slave_timeout(self, dpid, port):
"""get the timeout time at some port of some datapath."""
slave = self._get_slave(dpid, port)
if slave:
return slave['timeout']
else:
return 0 | def function[_get_slave_timeout, parameter[self, dpid, port]]:
constant[get the timeout time at some port of some datapath.]
variable[slave] assign[=] call[name[self]._get_slave, parameter[name[dpid], name[port]]]
if name[slave] begin[:]
return[call[name[slave]][constant[timeout]]] | keyword[def] identifier[_get_slave_timeout] ( identifier[self] , identifier[dpid] , identifier[port] ):
literal[string]
identifier[slave] = identifier[self] . identifier[_get_slave] ( identifier[dpid] , identifier[port] )
keyword[if] identifier[slave] :
keyword[return] identifier[slave] [ literal[string] ]
keyword[else] :
keyword[return] literal[int] | def _get_slave_timeout(self, dpid, port):
"""get the timeout time at some port of some datapath."""
slave = self._get_slave(dpid, port)
if slave:
return slave['timeout'] # depends on [control=['if'], data=[]]
else:
return 0 |
def smembers(self, key, *, encoding=_NOTSET):
"""Get all the members in a set."""
return self.execute(b'SMEMBERS', key, encoding=encoding) | def function[smembers, parameter[self, key]]:
constant[Get all the members in a set.]
return[call[name[self].execute, parameter[constant[b'SMEMBERS'], name[key]]]] | keyword[def] identifier[smembers] ( identifier[self] , identifier[key] ,*, identifier[encoding] = identifier[_NOTSET] ):
literal[string]
keyword[return] identifier[self] . identifier[execute] ( literal[string] , identifier[key] , identifier[encoding] = identifier[encoding] ) | def smembers(self, key, *, encoding=_NOTSET):
"""Get all the members in a set."""
return self.execute(b'SMEMBERS', key, encoding=encoding) |
def _prevent_core_dump(cls):
"""Prevent the process from generating a core dump."""
try:
# Try to get the current limit
resource.getrlimit(resource.RLIMIT_CORE)
except ValueError:
# System doesn't support the RLIMIT_CORE resource limit
return
else:
# Set the soft and hard limits for core dump size to zero
resource.setrlimit(resource.RLIMIT_CORE, (0, 0)) | def function[_prevent_core_dump, parameter[cls]]:
constant[Prevent the process from generating a core dump.]
<ast.Try object at 0x7da18bccbcd0> | keyword[def] identifier[_prevent_core_dump] ( identifier[cls] ):
literal[string]
keyword[try] :
identifier[resource] . identifier[getrlimit] ( identifier[resource] . identifier[RLIMIT_CORE] )
keyword[except] identifier[ValueError] :
keyword[return]
keyword[else] :
identifier[resource] . identifier[setrlimit] ( identifier[resource] . identifier[RLIMIT_CORE] ,( literal[int] , literal[int] )) | def _prevent_core_dump(cls):
"""Prevent the process from generating a core dump."""
try:
# Try to get the current limit
resource.getrlimit(resource.RLIMIT_CORE) # depends on [control=['try'], data=[]]
except ValueError:
# System doesn't support the RLIMIT_CORE resource limit
return # depends on [control=['except'], data=[]]
else:
# Set the soft and hard limits for core dump size to zero
resource.setrlimit(resource.RLIMIT_CORE, (0, 0)) |
def summary(args):
"""
%prog summary gffile fastafile
Print summary stats, including:
- Gene/Exon/Intron
- Number
- Average size (bp)
- Median size (bp)
- Total length (Mb)
- % of genome
- % GC
"""
p = OptionParser(summary.__doc__)
opts, args = p.parse_args(args)
if len(args) != 2:
sys.exit(not p.print_help())
gff_file, ref = args
s = Fasta(ref)
g = make_index(gff_file)
geneseqs, exonseqs, intronseqs = [], [], [] # Calc % GC
for f in g.features_of_type("gene"):
fid = f.id
fseq = s.sequence({'chr': f.chrom, 'start': f.start, 'stop': f.stop})
geneseqs.append(fseq)
exons = set((c.chrom, c.start, c.stop) for c in g.children(fid, 2) \
if c.featuretype == "exon")
exons = list(exons)
for chrom, start, stop in exons:
fseq = s.sequence({'chr': chrom, 'start': start, 'stop': stop})
exonseqs.append(fseq)
introns = range_interleave(exons)
for chrom, start, stop in introns:
fseq = s.sequence({'chr': chrom, 'start': start, 'stop': stop})
intronseqs.append(fseq)
r = {} # Report
for t, tseqs in zip(("Gene", "Exon", "Intron"), (geneseqs, exonseqs, intronseqs)):
tsizes = [len(x) for x in tseqs]
tsummary = SummaryStats(tsizes, dtype="int")
r[t, "Number"] = tsummary.size
r[t, "Average size (bp)"] = tsummary.mean
r[t, "Median size (bp)"] = tsummary.median
r[t, "Total length (Mb)"] = human_size(tsummary.sum, precision=0, target="Mb")
r[t, "% of genome"] = percentage(tsummary.sum, s.totalsize, precision=0, mode=-1)
r[t, "% GC"] = gc(tseqs)
print(tabulate(r), file=sys.stderr) | def function[summary, parameter[args]]:
constant[
%prog summary gffile fastafile
Print summary stats, including:
- Gene/Exon/Intron
- Number
- Average size (bp)
- Median size (bp)
- Total length (Mb)
- % of genome
- % GC
]
variable[p] assign[=] call[name[OptionParser], parameter[name[summary].__doc__]]
<ast.Tuple object at 0x7da18f00f970> assign[=] call[name[p].parse_args, parameter[name[args]]]
if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[2]] begin[:]
call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da18f00cee0>]]
<ast.Tuple object at 0x7da18f00e830> assign[=] name[args]
variable[s] assign[=] call[name[Fasta], parameter[name[ref]]]
variable[g] assign[=] call[name[make_index], parameter[name[gff_file]]]
<ast.Tuple object at 0x7da18f00c1c0> assign[=] tuple[[<ast.List object at 0x7da18f00c5b0>, <ast.List object at 0x7da18f00f190>, <ast.List object at 0x7da18f00c250>]]
for taget[name[f]] in starred[call[name[g].features_of_type, parameter[constant[gene]]]] begin[:]
variable[fid] assign[=] name[f].id
variable[fseq] assign[=] call[name[s].sequence, parameter[dictionary[[<ast.Constant object at 0x7da18f00d0c0>, <ast.Constant object at 0x7da18f00c2b0>, <ast.Constant object at 0x7da18f00d240>], [<ast.Attribute object at 0x7da18f00e6b0>, <ast.Attribute object at 0x7da18f00f5e0>, <ast.Attribute object at 0x7da18f00e9b0>]]]]
call[name[geneseqs].append, parameter[name[fseq]]]
variable[exons] assign[=] call[name[set], parameter[<ast.GeneratorExp object at 0x7da204622740>]]
variable[exons] assign[=] call[name[list], parameter[name[exons]]]
for taget[tuple[[<ast.Name object at 0x7da2046237f0>, <ast.Name object at 0x7da204620a90>, <ast.Name object at 0x7da204620ac0>]]] in starred[name[exons]] begin[:]
variable[fseq] assign[=] call[name[s].sequence, parameter[dictionary[[<ast.Constant object at 0x7da204620e50>, <ast.Constant object at 0x7da204621fc0>, <ast.Constant object at 0x7da204623f70>], [<ast.Name object at 0x7da204620a30>, <ast.Name object at 0x7da2046225f0>, <ast.Name object at 0x7da204620bb0>]]]]
call[name[exonseqs].append, parameter[name[fseq]]]
variable[introns] assign[=] call[name[range_interleave], parameter[name[exons]]]
for taget[tuple[[<ast.Name object at 0x7da204623250>, <ast.Name object at 0x7da2046216f0>, <ast.Name object at 0x7da204623910>]]] in starred[name[introns]] begin[:]
variable[fseq] assign[=] call[name[s].sequence, parameter[dictionary[[<ast.Constant object at 0x7da204623160>, <ast.Constant object at 0x7da204623a00>, <ast.Constant object at 0x7da2046236d0>], [<ast.Name object at 0x7da204623c40>, <ast.Name object at 0x7da204620e20>, <ast.Name object at 0x7da204623af0>]]]]
call[name[intronseqs].append, parameter[name[fseq]]]
variable[r] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da204620f70>, <ast.Name object at 0x7da204623460>]]] in starred[call[name[zip], parameter[tuple[[<ast.Constant object at 0x7da204620790>, <ast.Constant object at 0x7da2046212a0>, <ast.Constant object at 0x7da2046238b0>]], tuple[[<ast.Name object at 0x7da2046227a0>, <ast.Name object at 0x7da204622020>, <ast.Name object at 0x7da204623b80>]]]]] begin[:]
variable[tsizes] assign[=] <ast.ListComp object at 0x7da204620fd0>
variable[tsummary] assign[=] call[name[SummaryStats], parameter[name[tsizes]]]
call[name[r]][tuple[[<ast.Name object at 0x7da204623d30>, <ast.Constant object at 0x7da204620f10>]]] assign[=] name[tsummary].size
call[name[r]][tuple[[<ast.Name object at 0x7da204621690>, <ast.Constant object at 0x7da204621990>]]] assign[=] name[tsummary].mean
call[name[r]][tuple[[<ast.Name object at 0x7da204622470>, <ast.Constant object at 0x7da204620cd0>]]] assign[=] name[tsummary].median
call[name[r]][tuple[[<ast.Name object at 0x7da2046213f0>, <ast.Constant object at 0x7da2046201f0>]]] assign[=] call[name[human_size], parameter[name[tsummary].sum]]
call[name[r]][tuple[[<ast.Name object at 0x7da204621750>, <ast.Constant object at 0x7da204621f30>]]] assign[=] call[name[percentage], parameter[name[tsummary].sum, name[s].totalsize]]
call[name[r]][tuple[[<ast.Name object at 0x7da2046219c0>, <ast.Constant object at 0x7da204620d00>]]] assign[=] call[name[gc], parameter[name[tseqs]]]
call[name[print], parameter[call[name[tabulate], parameter[name[r]]]]] | keyword[def] identifier[summary] ( identifier[args] ):
literal[string]
identifier[p] = identifier[OptionParser] ( identifier[summary] . identifier[__doc__] )
identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] )
keyword[if] identifier[len] ( identifier[args] )!= literal[int] :
identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ())
identifier[gff_file] , identifier[ref] = identifier[args]
identifier[s] = identifier[Fasta] ( identifier[ref] )
identifier[g] = identifier[make_index] ( identifier[gff_file] )
identifier[geneseqs] , identifier[exonseqs] , identifier[intronseqs] =[],[],[]
keyword[for] identifier[f] keyword[in] identifier[g] . identifier[features_of_type] ( literal[string] ):
identifier[fid] = identifier[f] . identifier[id]
identifier[fseq] = identifier[s] . identifier[sequence] ({ literal[string] : identifier[f] . identifier[chrom] , literal[string] : identifier[f] . identifier[start] , literal[string] : identifier[f] . identifier[stop] })
identifier[geneseqs] . identifier[append] ( identifier[fseq] )
identifier[exons] = identifier[set] (( identifier[c] . identifier[chrom] , identifier[c] . identifier[start] , identifier[c] . identifier[stop] ) keyword[for] identifier[c] keyword[in] identifier[g] . identifier[children] ( identifier[fid] , literal[int] ) keyword[if] identifier[c] . identifier[featuretype] == literal[string] )
identifier[exons] = identifier[list] ( identifier[exons] )
keyword[for] identifier[chrom] , identifier[start] , identifier[stop] keyword[in] identifier[exons] :
identifier[fseq] = identifier[s] . identifier[sequence] ({ literal[string] : identifier[chrom] , literal[string] : identifier[start] , literal[string] : identifier[stop] })
identifier[exonseqs] . identifier[append] ( identifier[fseq] )
identifier[introns] = identifier[range_interleave] ( identifier[exons] )
keyword[for] identifier[chrom] , identifier[start] , identifier[stop] keyword[in] identifier[introns] :
identifier[fseq] = identifier[s] . identifier[sequence] ({ literal[string] : identifier[chrom] , literal[string] : identifier[start] , literal[string] : identifier[stop] })
identifier[intronseqs] . identifier[append] ( identifier[fseq] )
identifier[r] ={}
keyword[for] identifier[t] , identifier[tseqs] keyword[in] identifier[zip] (( literal[string] , literal[string] , literal[string] ),( identifier[geneseqs] , identifier[exonseqs] , identifier[intronseqs] )):
identifier[tsizes] =[ identifier[len] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[tseqs] ]
identifier[tsummary] = identifier[SummaryStats] ( identifier[tsizes] , identifier[dtype] = literal[string] )
identifier[r] [ identifier[t] , literal[string] ]= identifier[tsummary] . identifier[size]
identifier[r] [ identifier[t] , literal[string] ]= identifier[tsummary] . identifier[mean]
identifier[r] [ identifier[t] , literal[string] ]= identifier[tsummary] . identifier[median]
identifier[r] [ identifier[t] , literal[string] ]= identifier[human_size] ( identifier[tsummary] . identifier[sum] , identifier[precision] = literal[int] , identifier[target] = literal[string] )
identifier[r] [ identifier[t] , literal[string] ]= identifier[percentage] ( identifier[tsummary] . identifier[sum] , identifier[s] . identifier[totalsize] , identifier[precision] = literal[int] , identifier[mode] =- literal[int] )
identifier[r] [ identifier[t] , literal[string] ]= identifier[gc] ( identifier[tseqs] )
identifier[print] ( identifier[tabulate] ( identifier[r] ), identifier[file] = identifier[sys] . identifier[stderr] ) | def summary(args):
"""
%prog summary gffile fastafile
Print summary stats, including:
- Gene/Exon/Intron
- Number
- Average size (bp)
- Median size (bp)
- Total length (Mb)
- % of genome
- % GC
"""
p = OptionParser(summary.__doc__)
(opts, args) = p.parse_args(args)
if len(args) != 2:
sys.exit(not p.print_help()) # depends on [control=['if'], data=[]]
(gff_file, ref) = args
s = Fasta(ref)
g = make_index(gff_file)
(geneseqs, exonseqs, intronseqs) = ([], [], []) # Calc % GC
for f in g.features_of_type('gene'):
fid = f.id
fseq = s.sequence({'chr': f.chrom, 'start': f.start, 'stop': f.stop})
geneseqs.append(fseq)
exons = set(((c.chrom, c.start, c.stop) for c in g.children(fid, 2) if c.featuretype == 'exon'))
exons = list(exons)
for (chrom, start, stop) in exons:
fseq = s.sequence({'chr': chrom, 'start': start, 'stop': stop})
exonseqs.append(fseq) # depends on [control=['for'], data=[]]
introns = range_interleave(exons)
for (chrom, start, stop) in introns:
fseq = s.sequence({'chr': chrom, 'start': start, 'stop': stop})
intronseqs.append(fseq) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['f']]
r = {} # Report
for (t, tseqs) in zip(('Gene', 'Exon', 'Intron'), (geneseqs, exonseqs, intronseqs)):
tsizes = [len(x) for x in tseqs]
tsummary = SummaryStats(tsizes, dtype='int')
r[t, 'Number'] = tsummary.size
r[t, 'Average size (bp)'] = tsummary.mean
r[t, 'Median size (bp)'] = tsummary.median
r[t, 'Total length (Mb)'] = human_size(tsummary.sum, precision=0, target='Mb')
r[t, '% of genome'] = percentage(tsummary.sum, s.totalsize, precision=0, mode=-1)
r[t, '% GC'] = gc(tseqs) # depends on [control=['for'], data=[]]
print(tabulate(r), file=sys.stderr) |
def create_plugin(self, name, plugin_data_dir, gzip=False):
"""
Create a new plugin.
Args:
name (string): The name of the plugin. The ``:latest`` tag is
optional, and is the default if omitted.
plugin_data_dir (string): Path to the plugin data directory.
Plugin data directory must contain the ``config.json``
manifest file and the ``rootfs`` directory.
gzip (bool): Compress the context using gzip. Default: False
Returns:
``True`` if successful
"""
url = self._url('/plugins/create')
with utils.create_archive(
root=plugin_data_dir, gzip=gzip,
files=set(utils.build.walk(plugin_data_dir, []))
) as archv:
res = self._post(url, params={'name': name}, data=archv)
self._raise_for_status(res)
return True | def function[create_plugin, parameter[self, name, plugin_data_dir, gzip]]:
constant[
Create a new plugin.
Args:
name (string): The name of the plugin. The ``:latest`` tag is
optional, and is the default if omitted.
plugin_data_dir (string): Path to the plugin data directory.
Plugin data directory must contain the ``config.json``
manifest file and the ``rootfs`` directory.
gzip (bool): Compress the context using gzip. Default: False
Returns:
``True`` if successful
]
variable[url] assign[=] call[name[self]._url, parameter[constant[/plugins/create]]]
with call[name[utils].create_archive, parameter[]] begin[:]
variable[res] assign[=] call[name[self]._post, parameter[name[url]]]
call[name[self]._raise_for_status, parameter[name[res]]]
return[constant[True]] | keyword[def] identifier[create_plugin] ( identifier[self] , identifier[name] , identifier[plugin_data_dir] , identifier[gzip] = keyword[False] ):
literal[string]
identifier[url] = identifier[self] . identifier[_url] ( literal[string] )
keyword[with] identifier[utils] . identifier[create_archive] (
identifier[root] = identifier[plugin_data_dir] , identifier[gzip] = identifier[gzip] ,
identifier[files] = identifier[set] ( identifier[utils] . identifier[build] . identifier[walk] ( identifier[plugin_data_dir] ,[]))
) keyword[as] identifier[archv] :
identifier[res] = identifier[self] . identifier[_post] ( identifier[url] , identifier[params] ={ literal[string] : identifier[name] }, identifier[data] = identifier[archv] )
identifier[self] . identifier[_raise_for_status] ( identifier[res] )
keyword[return] keyword[True] | def create_plugin(self, name, plugin_data_dir, gzip=False):
"""
Create a new plugin.
Args:
name (string): The name of the plugin. The ``:latest`` tag is
optional, and is the default if omitted.
plugin_data_dir (string): Path to the plugin data directory.
Plugin data directory must contain the ``config.json``
manifest file and the ``rootfs`` directory.
gzip (bool): Compress the context using gzip. Default: False
Returns:
``True`` if successful
"""
url = self._url('/plugins/create')
with utils.create_archive(root=plugin_data_dir, gzip=gzip, files=set(utils.build.walk(plugin_data_dir, []))) as archv:
res = self._post(url, params={'name': name}, data=archv) # depends on [control=['with'], data=['archv']]
self._raise_for_status(res)
return True |
def cast(self, dtype: tf.DType) -> 'TensorFluent':
'''Returns a TensorFluent for the cast operation with given `dtype`.
Args:
dtype: The output's data type.
Returns:
A TensorFluent wrapping the cast operation.
'''
if self.dtype == dtype:
return self
t = tf.cast(self.tensor, dtype)
scope = self.scope.as_list()
batch = self.batch
return TensorFluent(t, scope, batch=batch) | def function[cast, parameter[self, dtype]]:
constant[Returns a TensorFluent for the cast operation with given `dtype`.
Args:
dtype: The output's data type.
Returns:
A TensorFluent wrapping the cast operation.
]
if compare[name[self].dtype equal[==] name[dtype]] begin[:]
return[name[self]]
variable[t] assign[=] call[name[tf].cast, parameter[name[self].tensor, name[dtype]]]
variable[scope] assign[=] call[name[self].scope.as_list, parameter[]]
variable[batch] assign[=] name[self].batch
return[call[name[TensorFluent], parameter[name[t], name[scope]]]] | keyword[def] identifier[cast] ( identifier[self] , identifier[dtype] : identifier[tf] . identifier[DType] )-> literal[string] :
literal[string]
keyword[if] identifier[self] . identifier[dtype] == identifier[dtype] :
keyword[return] identifier[self]
identifier[t] = identifier[tf] . identifier[cast] ( identifier[self] . identifier[tensor] , identifier[dtype] )
identifier[scope] = identifier[self] . identifier[scope] . identifier[as_list] ()
identifier[batch] = identifier[self] . identifier[batch]
keyword[return] identifier[TensorFluent] ( identifier[t] , identifier[scope] , identifier[batch] = identifier[batch] ) | def cast(self, dtype: tf.DType) -> 'TensorFluent':
"""Returns a TensorFluent for the cast operation with given `dtype`.
Args:
dtype: The output's data type.
Returns:
A TensorFluent wrapping the cast operation.
"""
if self.dtype == dtype:
return self # depends on [control=['if'], data=[]]
t = tf.cast(self.tensor, dtype)
scope = self.scope.as_list()
batch = self.batch
return TensorFluent(t, scope, batch=batch) |
def _model_predict(self, h, recalculate=False, fit_once=True):
""" Outputs ensemble model predictions for out-of-sample data
Parameters
----------
h : int
How many steps at the end of the series to run the ensemble on
recalculate: boolean
Whether to recalculate the predictions or not
fit_once : boolean
Whether to fit the model once at the beginning, or with every iteration
Returns
----------
- pd.DataFrame of the model predictions, index of dates
"""
if len(self.model_predictions) == 0 or h != self.h or recalculate is True:
for no, model in enumerate(self.model_list):
if no == 0:
model.fit()
result = model.predict(h)
self.predict_index = result.index
result.columns = [model.model_name]
else:
model.fit()
new_frame = model.predict(h)
new_frame.columns = [model.model_name]
result = pd.concat([result,new_frame], axis=1)
self.model_predictions = result
self.h = h
return result, self.predict_index
else:
return self.model_predictions, self.predict_index | def function[_model_predict, parameter[self, h, recalculate, fit_once]]:
constant[ Outputs ensemble model predictions for out-of-sample data
Parameters
----------
h : int
How many steps at the end of the series to run the ensemble on
recalculate: boolean
Whether to recalculate the predictions or not
fit_once : boolean
Whether to fit the model once at the beginning, or with every iteration
Returns
----------
- pd.DataFrame of the model predictions, index of dates
]
if <ast.BoolOp object at 0x7da18f00d510> begin[:]
for taget[tuple[[<ast.Name object at 0x7da18f00f0a0>, <ast.Name object at 0x7da18f00f190>]]] in starred[call[name[enumerate], parameter[name[self].model_list]]] begin[:]
if compare[name[no] equal[==] constant[0]] begin[:]
call[name[model].fit, parameter[]]
variable[result] assign[=] call[name[model].predict, parameter[name[h]]]
name[self].predict_index assign[=] name[result].index
name[result].columns assign[=] list[[<ast.Attribute object at 0x7da18f00ffd0>]]
name[self].model_predictions assign[=] name[result]
name[self].h assign[=] name[h]
return[tuple[[<ast.Name object at 0x7da18f00d450>, <ast.Attribute object at 0x7da18f00f7c0>]]] | keyword[def] identifier[_model_predict] ( identifier[self] , identifier[h] , identifier[recalculate] = keyword[False] , identifier[fit_once] = keyword[True] ):
literal[string]
keyword[if] identifier[len] ( identifier[self] . identifier[model_predictions] )== literal[int] keyword[or] identifier[h] != identifier[self] . identifier[h] keyword[or] identifier[recalculate] keyword[is] keyword[True] :
keyword[for] identifier[no] , identifier[model] keyword[in] identifier[enumerate] ( identifier[self] . identifier[model_list] ):
keyword[if] identifier[no] == literal[int] :
identifier[model] . identifier[fit] ()
identifier[result] = identifier[model] . identifier[predict] ( identifier[h] )
identifier[self] . identifier[predict_index] = identifier[result] . identifier[index]
identifier[result] . identifier[columns] =[ identifier[model] . identifier[model_name] ]
keyword[else] :
identifier[model] . identifier[fit] ()
identifier[new_frame] = identifier[model] . identifier[predict] ( identifier[h] )
identifier[new_frame] . identifier[columns] =[ identifier[model] . identifier[model_name] ]
identifier[result] = identifier[pd] . identifier[concat] ([ identifier[result] , identifier[new_frame] ], identifier[axis] = literal[int] )
identifier[self] . identifier[model_predictions] = identifier[result]
identifier[self] . identifier[h] = identifier[h]
keyword[return] identifier[result] , identifier[self] . identifier[predict_index]
keyword[else] :
keyword[return] identifier[self] . identifier[model_predictions] , identifier[self] . identifier[predict_index] | def _model_predict(self, h, recalculate=False, fit_once=True):
""" Outputs ensemble model predictions for out-of-sample data
Parameters
----------
h : int
How many steps at the end of the series to run the ensemble on
recalculate: boolean
Whether to recalculate the predictions or not
fit_once : boolean
Whether to fit the model once at the beginning, or with every iteration
Returns
----------
- pd.DataFrame of the model predictions, index of dates
"""
if len(self.model_predictions) == 0 or h != self.h or recalculate is True:
for (no, model) in enumerate(self.model_list):
if no == 0:
model.fit()
result = model.predict(h)
self.predict_index = result.index
result.columns = [model.model_name] # depends on [control=['if'], data=[]]
else:
model.fit()
new_frame = model.predict(h)
new_frame.columns = [model.model_name]
result = pd.concat([result, new_frame], axis=1) # depends on [control=['for'], data=[]]
self.model_predictions = result
self.h = h
return (result, self.predict_index) # depends on [control=['if'], data=[]]
else:
return (self.model_predictions, self.predict_index) |
def summary(self):
""" Get the entry's summary text """
if self.get('Summary'):
return self.get('Summary')
body, more, is_markdown = self._entry_content
return TrueCallableProxy(
self._get_summary,
body or more) if is_markdown else CallableProxy(None) | def function[summary, parameter[self]]:
constant[ Get the entry's summary text ]
if call[name[self].get, parameter[constant[Summary]]] begin[:]
return[call[name[self].get, parameter[constant[Summary]]]]
<ast.Tuple object at 0x7da2041d9810> assign[=] name[self]._entry_content
return[<ast.IfExp object at 0x7da2041db430>] | keyword[def] identifier[summary] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[get] ( literal[string] )
identifier[body] , identifier[more] , identifier[is_markdown] = identifier[self] . identifier[_entry_content]
keyword[return] identifier[TrueCallableProxy] (
identifier[self] . identifier[_get_summary] ,
identifier[body] keyword[or] identifier[more] ) keyword[if] identifier[is_markdown] keyword[else] identifier[CallableProxy] ( keyword[None] ) | def summary(self):
""" Get the entry's summary text """
if self.get('Summary'):
return self.get('Summary') # depends on [control=['if'], data=[]]
(body, more, is_markdown) = self._entry_content
return TrueCallableProxy(self._get_summary, body or more) if is_markdown else CallableProxy(None) |
def edit_caption(
self,
caption: str,
parse_mode: str = "",
reply_markup: Union[
"pyrogram.InlineKeyboardMarkup",
"pyrogram.ReplyKeyboardMarkup",
"pyrogram.ReplyKeyboardRemove",
"pyrogram.ForceReply"
] = None
) -> "Message":
"""Bound method *edit_caption* of :obj:`Message <pyrogram.Message>`
Use as a shortcut for:
.. code-block:: python
client.edit_message_caption(
chat_id=message.chat.id,
message_id=message.message_id,
caption="hello"
)
Example:
.. code-block:: python
message.edit_caption("hello")
Args:
caption (``str``):
New caption of the message.
parse_mode (``str``, *optional*):
Use :obj:`MARKDOWN <pyrogram.ParseMode.MARKDOWN>` or :obj:`HTML <pyrogram.ParseMode.HTML>`
if you want Telegram apps to show bold, italic, fixed-width text or inline URLs in your message.
Defaults to Markdown.
reply_markup (:obj:`InlineKeyboardMarkup`, *optional*):
An InlineKeyboardMarkup object.
Returns:
On success, the edited :obj:`Message <pyrogram.Message>` is returned.
Raises:
:class:`RPCError <pyrogram.RPCError>` in case of a Telegram RPC error.
"""
return self._client.edit_message_caption(
chat_id=self.chat.id,
message_id=self.message_id,
caption=caption,
parse_mode=parse_mode,
reply_markup=reply_markup
) | def function[edit_caption, parameter[self, caption, parse_mode, reply_markup]]:
constant[Bound method *edit_caption* of :obj:`Message <pyrogram.Message>`
Use as a shortcut for:
.. code-block:: python
client.edit_message_caption(
chat_id=message.chat.id,
message_id=message.message_id,
caption="hello"
)
Example:
.. code-block:: python
message.edit_caption("hello")
Args:
caption (``str``):
New caption of the message.
parse_mode (``str``, *optional*):
Use :obj:`MARKDOWN <pyrogram.ParseMode.MARKDOWN>` or :obj:`HTML <pyrogram.ParseMode.HTML>`
if you want Telegram apps to show bold, italic, fixed-width text or inline URLs in your message.
Defaults to Markdown.
reply_markup (:obj:`InlineKeyboardMarkup`, *optional*):
An InlineKeyboardMarkup object.
Returns:
On success, the edited :obj:`Message <pyrogram.Message>` is returned.
Raises:
:class:`RPCError <pyrogram.RPCError>` in case of a Telegram RPC error.
]
return[call[name[self]._client.edit_message_caption, parameter[]]] | keyword[def] identifier[edit_caption] (
identifier[self] ,
identifier[caption] : identifier[str] ,
identifier[parse_mode] : identifier[str] = literal[string] ,
identifier[reply_markup] : identifier[Union] [
literal[string] ,
literal[string] ,
literal[string] ,
literal[string]
]= keyword[None]
)-> literal[string] :
literal[string]
keyword[return] identifier[self] . identifier[_client] . identifier[edit_message_caption] (
identifier[chat_id] = identifier[self] . identifier[chat] . identifier[id] ,
identifier[message_id] = identifier[self] . identifier[message_id] ,
identifier[caption] = identifier[caption] ,
identifier[parse_mode] = identifier[parse_mode] ,
identifier[reply_markup] = identifier[reply_markup]
) | def edit_caption(self, caption: str, parse_mode: str='', reply_markup: Union['pyrogram.InlineKeyboardMarkup', 'pyrogram.ReplyKeyboardMarkup', 'pyrogram.ReplyKeyboardRemove', 'pyrogram.ForceReply']=None) -> 'Message':
"""Bound method *edit_caption* of :obj:`Message <pyrogram.Message>`
Use as a shortcut for:
.. code-block:: python
client.edit_message_caption(
chat_id=message.chat.id,
message_id=message.message_id,
caption="hello"
)
Example:
.. code-block:: python
message.edit_caption("hello")
Args:
caption (``str``):
New caption of the message.
parse_mode (``str``, *optional*):
Use :obj:`MARKDOWN <pyrogram.ParseMode.MARKDOWN>` or :obj:`HTML <pyrogram.ParseMode.HTML>`
if you want Telegram apps to show bold, italic, fixed-width text or inline URLs in your message.
Defaults to Markdown.
reply_markup (:obj:`InlineKeyboardMarkup`, *optional*):
An InlineKeyboardMarkup object.
Returns:
On success, the edited :obj:`Message <pyrogram.Message>` is returned.
Raises:
:class:`RPCError <pyrogram.RPCError>` in case of a Telegram RPC error.
"""
return self._client.edit_message_caption(chat_id=self.chat.id, message_id=self.message_id, caption=caption, parse_mode=parse_mode, reply_markup=reply_markup) |
def split_by_number_of_utterances(self, proportions={}, separate_issuers=False):
"""
Split the corpus into subsets with the given number of utterances.
The corpus gets splitted into len(proportions) parts, so the number of utterances are
distributed according to the proportions.
Args:
proportions (dict): A dictionary containing the relative size of the target subsets.
The key is an identifier for the subset.
separate_issuers (bool): If True it makes sure that all utterances of an issuer are in the same subset.
Returns:
(dict): A dictionary containing the subsets with the identifier from the input as key.
Example::
>>> spl = Splitter(corpus)
>>> corpus.num_utterances
100
>>> subsets = spl.split_by_number_of_utterances(proportions={
>>> "train" : 0.6,
>>> "dev" : 0.2,
>>> "test" : 0.2
>>> })
>>> print(subsets)
{'dev': <audiomate.corpus.subview.Subview at 0x104ce7400>,
'test': <audiomate.corpus.subview.Subview at 0x104ce74e0>,
'train': <audiomate.corpus.subview.Subview at 0x104ce7438>}
>>> subsets['train'].num_utterances
60
>>> subsets['test'].num_utterances
20
"""
if separate_issuers:
# Count number of utterances per issuer
issuer_utt_count = collections.defaultdict(int)
issuer_utts = collections.defaultdict(list)
for utterance in self.corpus.utterances.values():
issuer_utt_count[utterance.issuer.idx] += 1
issuer_utts[utterance.issuer.idx].append(utterance.idx)
issuer_utt_count = {k: {'count': int(v)} for k, v in issuer_utt_count.items()}
# Split with total utt duration per issuer as weight
issuer_splits = utils.get_identifiers_splitted_by_weights(issuer_utt_count,
proportions=proportions)
# Collect utterances of all issuers per split
splits = collections.defaultdict(list)
for split_idx, issuer_ids in issuer_splits.items():
for issuer_idx in issuer_ids:
splits[split_idx].extend(issuer_utts[issuer_idx])
else:
utterance_idxs = sorted(list(self.corpus.utterances.keys()))
self.rand.shuffle(utterance_idxs)
splits = utils.split_identifiers(identifiers=utterance_idxs,
proportions=proportions)
return self._subviews_from_utterance_splits(splits) | def function[split_by_number_of_utterances, parameter[self, proportions, separate_issuers]]:
constant[
Split the corpus into subsets with the given number of utterances.
The corpus gets splitted into len(proportions) parts, so the number of utterances are
distributed according to the proportions.
Args:
proportions (dict): A dictionary containing the relative size of the target subsets.
The key is an identifier for the subset.
separate_issuers (bool): If True it makes sure that all utterances of an issuer are in the same subset.
Returns:
(dict): A dictionary containing the subsets with the identifier from the input as key.
Example::
>>> spl = Splitter(corpus)
>>> corpus.num_utterances
100
>>> subsets = spl.split_by_number_of_utterances(proportions={
>>> "train" : 0.6,
>>> "dev" : 0.2,
>>> "test" : 0.2
>>> })
>>> print(subsets)
{'dev': <audiomate.corpus.subview.Subview at 0x104ce7400>,
'test': <audiomate.corpus.subview.Subview at 0x104ce74e0>,
'train': <audiomate.corpus.subview.Subview at 0x104ce7438>}
>>> subsets['train'].num_utterances
60
>>> subsets['test'].num_utterances
20
]
if name[separate_issuers] begin[:]
variable[issuer_utt_count] assign[=] call[name[collections].defaultdict, parameter[name[int]]]
variable[issuer_utts] assign[=] call[name[collections].defaultdict, parameter[name[list]]]
for taget[name[utterance]] in starred[call[name[self].corpus.utterances.values, parameter[]]] begin[:]
<ast.AugAssign object at 0x7da1b0b807c0>
call[call[name[issuer_utts]][name[utterance].issuer.idx].append, parameter[name[utterance].idx]]
variable[issuer_utt_count] assign[=] <ast.DictComp object at 0x7da1b0b41060>
variable[issuer_splits] assign[=] call[name[utils].get_identifiers_splitted_by_weights, parameter[name[issuer_utt_count]]]
variable[splits] assign[=] call[name[collections].defaultdict, parameter[name[list]]]
for taget[tuple[[<ast.Name object at 0x7da1b0b82aa0>, <ast.Name object at 0x7da1b0b810c0>]]] in starred[call[name[issuer_splits].items, parameter[]]] begin[:]
for taget[name[issuer_idx]] in starred[name[issuer_ids]] begin[:]
call[call[name[splits]][name[split_idx]].extend, parameter[call[name[issuer_utts]][name[issuer_idx]]]]
return[call[name[self]._subviews_from_utterance_splits, parameter[name[splits]]]] | keyword[def] identifier[split_by_number_of_utterances] ( identifier[self] , identifier[proportions] ={}, identifier[separate_issuers] = keyword[False] ):
literal[string]
keyword[if] identifier[separate_issuers] :
identifier[issuer_utt_count] = identifier[collections] . identifier[defaultdict] ( identifier[int] )
identifier[issuer_utts] = identifier[collections] . identifier[defaultdict] ( identifier[list] )
keyword[for] identifier[utterance] keyword[in] identifier[self] . identifier[corpus] . identifier[utterances] . identifier[values] ():
identifier[issuer_utt_count] [ identifier[utterance] . identifier[issuer] . identifier[idx] ]+= literal[int]
identifier[issuer_utts] [ identifier[utterance] . identifier[issuer] . identifier[idx] ]. identifier[append] ( identifier[utterance] . identifier[idx] )
identifier[issuer_utt_count] ={ identifier[k] :{ literal[string] : identifier[int] ( identifier[v] )} keyword[for] identifier[k] , identifier[v] keyword[in] identifier[issuer_utt_count] . identifier[items] ()}
identifier[issuer_splits] = identifier[utils] . identifier[get_identifiers_splitted_by_weights] ( identifier[issuer_utt_count] ,
identifier[proportions] = identifier[proportions] )
identifier[splits] = identifier[collections] . identifier[defaultdict] ( identifier[list] )
keyword[for] identifier[split_idx] , identifier[issuer_ids] keyword[in] identifier[issuer_splits] . identifier[items] ():
keyword[for] identifier[issuer_idx] keyword[in] identifier[issuer_ids] :
identifier[splits] [ identifier[split_idx] ]. identifier[extend] ( identifier[issuer_utts] [ identifier[issuer_idx] ])
keyword[else] :
identifier[utterance_idxs] = identifier[sorted] ( identifier[list] ( identifier[self] . identifier[corpus] . identifier[utterances] . identifier[keys] ()))
identifier[self] . identifier[rand] . identifier[shuffle] ( identifier[utterance_idxs] )
identifier[splits] = identifier[utils] . identifier[split_identifiers] ( identifier[identifiers] = identifier[utterance_idxs] ,
identifier[proportions] = identifier[proportions] )
keyword[return] identifier[self] . identifier[_subviews_from_utterance_splits] ( identifier[splits] ) | def split_by_number_of_utterances(self, proportions={}, separate_issuers=False):
"""
Split the corpus into subsets with the given number of utterances.
The corpus gets splitted into len(proportions) parts, so the number of utterances are
distributed according to the proportions.
Args:
proportions (dict): A dictionary containing the relative size of the target subsets.
The key is an identifier for the subset.
separate_issuers (bool): If True it makes sure that all utterances of an issuer are in the same subset.
Returns:
(dict): A dictionary containing the subsets with the identifier from the input as key.
Example::
>>> spl = Splitter(corpus)
>>> corpus.num_utterances
100
>>> subsets = spl.split_by_number_of_utterances(proportions={
>>> "train" : 0.6,
>>> "dev" : 0.2,
>>> "test" : 0.2
>>> })
>>> print(subsets)
{'dev': <audiomate.corpus.subview.Subview at 0x104ce7400>,
'test': <audiomate.corpus.subview.Subview at 0x104ce74e0>,
'train': <audiomate.corpus.subview.Subview at 0x104ce7438>}
>>> subsets['train'].num_utterances
60
>>> subsets['test'].num_utterances
20
"""
if separate_issuers:
# Count number of utterances per issuer
issuer_utt_count = collections.defaultdict(int)
issuer_utts = collections.defaultdict(list)
for utterance in self.corpus.utterances.values():
issuer_utt_count[utterance.issuer.idx] += 1
issuer_utts[utterance.issuer.idx].append(utterance.idx) # depends on [control=['for'], data=['utterance']]
issuer_utt_count = {k: {'count': int(v)} for (k, v) in issuer_utt_count.items()}
# Split with total utt duration per issuer as weight
issuer_splits = utils.get_identifiers_splitted_by_weights(issuer_utt_count, proportions=proportions)
# Collect utterances of all issuers per split
splits = collections.defaultdict(list)
for (split_idx, issuer_ids) in issuer_splits.items():
for issuer_idx in issuer_ids:
splits[split_idx].extend(issuer_utts[issuer_idx]) # depends on [control=['for'], data=['issuer_idx']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
utterance_idxs = sorted(list(self.corpus.utterances.keys()))
self.rand.shuffle(utterance_idxs)
splits = utils.split_identifiers(identifiers=utterance_idxs, proportions=proportions)
return self._subviews_from_utterance_splits(splits) |
def get_nova_endpoint(cls, json_resp, nova_api_version=None):
"""
Parse the service catalog returned by the Identity API for an endpoint matching
the Nova service with the requested version
Sends a CRITICAL service check when no viable candidates are found in the Catalog
"""
nova_version = nova_api_version or DEFAULT_NOVA_API_VERSION
catalog = json_resp.get('token', {}).get('catalog', [])
nova_match = 'novav21' if nova_version == V21_NOVA_API_VERSION else 'nova'
for entry in catalog:
if entry['name'] == nova_match or 'Compute' in entry['name']:
# Collect any endpoints on the public or internal interface
valid_endpoints = {}
for ep in entry['endpoints']:
interface = ep.get('interface', '')
if interface in ['public', 'internal']:
valid_endpoints[interface] = ep['url']
if valid_endpoints:
# Favor public endpoints over internal
nova_endpoint = valid_endpoints.get("public", valid_endpoints.get("internal"))
return nova_endpoint
else:
raise MissingNovaEndpoint() | def function[get_nova_endpoint, parameter[cls, json_resp, nova_api_version]]:
constant[
Parse the service catalog returned by the Identity API for an endpoint matching
the Nova service with the requested version
Sends a CRITICAL service check when no viable candidates are found in the Catalog
]
variable[nova_version] assign[=] <ast.BoolOp object at 0x7da20c7c8370>
variable[catalog] assign[=] call[call[name[json_resp].get, parameter[constant[token], dictionary[[], []]]].get, parameter[constant[catalog], list[[]]]]
variable[nova_match] assign[=] <ast.IfExp object at 0x7da20c7c9690>
for taget[name[entry]] in starred[name[catalog]] begin[:]
if <ast.BoolOp object at 0x7da20c7ca3e0> begin[:]
variable[valid_endpoints] assign[=] dictionary[[], []]
for taget[name[ep]] in starred[call[name[entry]][constant[endpoints]]] begin[:]
variable[interface] assign[=] call[name[ep].get, parameter[constant[interface], constant[]]]
if compare[name[interface] in list[[<ast.Constant object at 0x7da20c7ca920>, <ast.Constant object at 0x7da20c7ca8f0>]]] begin[:]
call[name[valid_endpoints]][name[interface]] assign[=] call[name[ep]][constant[url]]
if name[valid_endpoints] begin[:]
variable[nova_endpoint] assign[=] call[name[valid_endpoints].get, parameter[constant[public], call[name[valid_endpoints].get, parameter[constant[internal]]]]]
return[name[nova_endpoint]] | keyword[def] identifier[get_nova_endpoint] ( identifier[cls] , identifier[json_resp] , identifier[nova_api_version] = keyword[None] ):
literal[string]
identifier[nova_version] = identifier[nova_api_version] keyword[or] identifier[DEFAULT_NOVA_API_VERSION]
identifier[catalog] = identifier[json_resp] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ,[])
identifier[nova_match] = literal[string] keyword[if] identifier[nova_version] == identifier[V21_NOVA_API_VERSION] keyword[else] literal[string]
keyword[for] identifier[entry] keyword[in] identifier[catalog] :
keyword[if] identifier[entry] [ literal[string] ]== identifier[nova_match] keyword[or] literal[string] keyword[in] identifier[entry] [ literal[string] ]:
identifier[valid_endpoints] ={}
keyword[for] identifier[ep] keyword[in] identifier[entry] [ literal[string] ]:
identifier[interface] = identifier[ep] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[interface] keyword[in] [ literal[string] , literal[string] ]:
identifier[valid_endpoints] [ identifier[interface] ]= identifier[ep] [ literal[string] ]
keyword[if] identifier[valid_endpoints] :
identifier[nova_endpoint] = identifier[valid_endpoints] . identifier[get] ( literal[string] , identifier[valid_endpoints] . identifier[get] ( literal[string] ))
keyword[return] identifier[nova_endpoint]
keyword[else] :
keyword[raise] identifier[MissingNovaEndpoint] () | def get_nova_endpoint(cls, json_resp, nova_api_version=None):
"""
Parse the service catalog returned by the Identity API for an endpoint matching
the Nova service with the requested version
Sends a CRITICAL service check when no viable candidates are found in the Catalog
"""
nova_version = nova_api_version or DEFAULT_NOVA_API_VERSION
catalog = json_resp.get('token', {}).get('catalog', [])
nova_match = 'novav21' if nova_version == V21_NOVA_API_VERSION else 'nova'
for entry in catalog:
if entry['name'] == nova_match or 'Compute' in entry['name']:
# Collect any endpoints on the public or internal interface
valid_endpoints = {}
for ep in entry['endpoints']:
interface = ep.get('interface', '')
if interface in ['public', 'internal']:
valid_endpoints[interface] = ep['url'] # depends on [control=['if'], data=['interface']] # depends on [control=['for'], data=['ep']]
if valid_endpoints:
# Favor public endpoints over internal
nova_endpoint = valid_endpoints.get('public', valid_endpoints.get('internal'))
return nova_endpoint # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['entry']]
else:
raise MissingNovaEndpoint() |
def hot_questions(self):
"""获取话题下热门的问题
:return: 话题下的热门动态中的问题,按热门度顺序返回生成器
:rtype: Question.Iterable
"""
from .question import Question
hot_questions_url = Topic_Hot_Questions_Url.format(self.id)
params = {'start': 0, '_xsrf': self.xsrf}
res = self._session.get(hot_questions_url)
soup = BeautifulSoup(res.content)
while True:
questions_duplicate = soup.find_all('a', class_='question_link')
# 如果话题下无问题,则直接返回
if len(questions_duplicate) == 0:
return
# 去除重复的问题
questions = list(set(questions_duplicate))
questions.sort(key=self._get_score, reverse=True)
last_score = soup.find_all(
'div', class_='feed-item')[-1]['data-score']
for q in questions:
question_url = Zhihu_URL + q['href']
question_title = q.text.strip()
question = Question(question_url, question_title,
session=self._session)
yield question
params['offset'] = last_score
res = self._session.post(hot_questions_url, data=params)
gotten_feed_num = res.json()['msg'][0]
# 如果得到问题数量为0则返回
if gotten_feed_num == 0:
return
soup = BeautifulSoup(res.json()['msg'][1]) | def function[hot_questions, parameter[self]]:
constant[获取话题下热门的问题
:return: 话题下的热门动态中的问题,按热门度顺序返回生成器
:rtype: Question.Iterable
]
from relative_module[question] import module[Question]
variable[hot_questions_url] assign[=] call[name[Topic_Hot_Questions_Url].format, parameter[name[self].id]]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da18f09f400>, <ast.Constant object at 0x7da18f09dbd0>], [<ast.Constant object at 0x7da18f09c340>, <ast.Attribute object at 0x7da18f09d5d0>]]
variable[res] assign[=] call[name[self]._session.get, parameter[name[hot_questions_url]]]
variable[soup] assign[=] call[name[BeautifulSoup], parameter[name[res].content]]
while constant[True] begin[:]
variable[questions_duplicate] assign[=] call[name[soup].find_all, parameter[constant[a]]]
if compare[call[name[len], parameter[name[questions_duplicate]]] equal[==] constant[0]] begin[:]
return[None]
variable[questions] assign[=] call[name[list], parameter[call[name[set], parameter[name[questions_duplicate]]]]]
call[name[questions].sort, parameter[]]
variable[last_score] assign[=] call[call[call[name[soup].find_all, parameter[constant[div]]]][<ast.UnaryOp object at 0x7da18f09db40>]][constant[data-score]]
for taget[name[q]] in starred[name[questions]] begin[:]
variable[question_url] assign[=] binary_operation[name[Zhihu_URL] + call[name[q]][constant[href]]]
variable[question_title] assign[=] call[name[q].text.strip, parameter[]]
variable[question] assign[=] call[name[Question], parameter[name[question_url], name[question_title]]]
<ast.Yield object at 0x7da204960640>
call[name[params]][constant[offset]] assign[=] name[last_score]
variable[res] assign[=] call[name[self]._session.post, parameter[name[hot_questions_url]]]
variable[gotten_feed_num] assign[=] call[call[call[name[res].json, parameter[]]][constant[msg]]][constant[0]]
if compare[name[gotten_feed_num] equal[==] constant[0]] begin[:]
return[None]
variable[soup] assign[=] call[name[BeautifulSoup], parameter[call[call[call[name[res].json, parameter[]]][constant[msg]]][constant[1]]]] | keyword[def] identifier[hot_questions] ( identifier[self] ):
literal[string]
keyword[from] . identifier[question] keyword[import] identifier[Question]
identifier[hot_questions_url] = identifier[Topic_Hot_Questions_Url] . identifier[format] ( identifier[self] . identifier[id] )
identifier[params] ={ literal[string] : literal[int] , literal[string] : identifier[self] . identifier[xsrf] }
identifier[res] = identifier[self] . identifier[_session] . identifier[get] ( identifier[hot_questions_url] )
identifier[soup] = identifier[BeautifulSoup] ( identifier[res] . identifier[content] )
keyword[while] keyword[True] :
identifier[questions_duplicate] = identifier[soup] . identifier[find_all] ( literal[string] , identifier[class_] = literal[string] )
keyword[if] identifier[len] ( identifier[questions_duplicate] )== literal[int] :
keyword[return]
identifier[questions] = identifier[list] ( identifier[set] ( identifier[questions_duplicate] ))
identifier[questions] . identifier[sort] ( identifier[key] = identifier[self] . identifier[_get_score] , identifier[reverse] = keyword[True] )
identifier[last_score] = identifier[soup] . identifier[find_all] (
literal[string] , identifier[class_] = literal[string] )[- literal[int] ][ literal[string] ]
keyword[for] identifier[q] keyword[in] identifier[questions] :
identifier[question_url] = identifier[Zhihu_URL] + identifier[q] [ literal[string] ]
identifier[question_title] = identifier[q] . identifier[text] . identifier[strip] ()
identifier[question] = identifier[Question] ( identifier[question_url] , identifier[question_title] ,
identifier[session] = identifier[self] . identifier[_session] )
keyword[yield] identifier[question]
identifier[params] [ literal[string] ]= identifier[last_score]
identifier[res] = identifier[self] . identifier[_session] . identifier[post] ( identifier[hot_questions_url] , identifier[data] = identifier[params] )
identifier[gotten_feed_num] = identifier[res] . identifier[json] ()[ literal[string] ][ literal[int] ]
keyword[if] identifier[gotten_feed_num] == literal[int] :
keyword[return]
identifier[soup] = identifier[BeautifulSoup] ( identifier[res] . identifier[json] ()[ literal[string] ][ literal[int] ]) | def hot_questions(self):
"""获取话题下热门的问题
:return: 话题下的热门动态中的问题,按热门度顺序返回生成器
:rtype: Question.Iterable
"""
from .question import Question
hot_questions_url = Topic_Hot_Questions_Url.format(self.id)
params = {'start': 0, '_xsrf': self.xsrf}
res = self._session.get(hot_questions_url)
soup = BeautifulSoup(res.content)
while True:
questions_duplicate = soup.find_all('a', class_='question_link')
# 如果话题下无问题,则直接返回
if len(questions_duplicate) == 0:
return # depends on [control=['if'], data=[]]
# 去除重复的问题
questions = list(set(questions_duplicate))
questions.sort(key=self._get_score, reverse=True)
last_score = soup.find_all('div', class_='feed-item')[-1]['data-score']
for q in questions:
question_url = Zhihu_URL + q['href']
question_title = q.text.strip()
question = Question(question_url, question_title, session=self._session)
yield question # depends on [control=['for'], data=['q']]
params['offset'] = last_score
res = self._session.post(hot_questions_url, data=params)
gotten_feed_num = res.json()['msg'][0]
# 如果得到问题数量为0则返回
if gotten_feed_num == 0:
return # depends on [control=['if'], data=[]]
soup = BeautifulSoup(res.json()['msg'][1]) # depends on [control=['while'], data=[]] |
def dis(msg, msg_nocr, section, errmsg, x=None, start_line=-1, end_line=None,
relative_pos = False, highlight='light', start_offset=0, end_offset=None,
include_header=False):
"""Disassemble classes, methods, functions, or code.
With no argument, disassemble the last traceback.
"""
lasti = -1
if x is None:
distb()
return None, None
if start_offset is None:
start_offset = 0
mess = ''
if start_line > 1:
mess += "from line %d " % start_line
elif start_offset > 1:
mess = "from offset %d " % start_offset
if end_line:
mess += "to line %d" % end_line
elif end_offset:
mess += "to offset %d" % end_offset
sectioned = False
# Try to dogpaddle to the code object for the type setting x
if hasattr(types, 'InstanceType') and isinstance(x, types.InstanceType):
x = x.__class__
if inspect.ismethod(x):
section("Disassembly of %s: %s" % (x, mess))
sectioned = True
x = x.im_func
elif inspect.isfunction(x) or inspect.isgeneratorfunction(x):
section("Disassembly of %s: %s" % (x, mess))
x = x.func_code
sectioned = True
elif inspect.isgenerator(x):
section("Disassembly of %s: %s" % (x, mess))
frame = x.gi_frame
lasti = frame.f_last_i
x = x.gi_code
sectioned = True
elif inspect.isframe(x):
section("Disassembly of %s: %s" % (x, mess))
sectioned = True
if hasattr(x, 'f_lasti'):
lasti = x.f_lasti
if lasti == -1: lasti = 0
pass
opc = get_opcode(PYTHON_VERSION, IS_PYPY)
x = x.f_code
if include_header:
header_lines = Bytecode(x, opc).info().split("\n")
header = '\n'.join([format_token(Mformat.Comment, h) for h in header_lines])
msg(header)
pass
elif inspect.iscode(x):
pass
if hasattr(x, '__dict__'): # Class or module
items = sorted(x.__dict__.items())
for name, x1 in items:
if isinstance(x1, _have_code):
if not sectioned:
section("Disassembly of %s: " % x)
try:
dis(msg, msg_nocr, section, errmsg, x1,
start_line=start_line, end_line=end_line,
relative_pos = relative_pos)
msg("")
except TypeError:
_, msg, _ = sys.exc_info()
errmsg("Sorry:", msg)
pass
pass
pass
pass
elif hasattr(x, 'co_code'): # Code object
if not sectioned:
section("Disassembly of %s: " % x)
return disassemble(msg, msg_nocr, section, x, lasti=lasti,
start_line=start_line, end_line=end_line,
relative_pos = relative_pos,
highlight = highlight,
start_offset = start_offset,
end_offset = end_offset)
elif isinstance(x, str): # Source code
return disassemble_string(msg, msg_nocr, x,)
else:
errmsg("Don't know how to disassemble %s objects." %
type(x).__name__)
return None, None | def function[dis, parameter[msg, msg_nocr, section, errmsg, x, start_line, end_line, relative_pos, highlight, start_offset, end_offset, include_header]]:
constant[Disassemble classes, methods, functions, or code.
With no argument, disassemble the last traceback.
]
variable[lasti] assign[=] <ast.UnaryOp object at 0x7da1b05b2950>
if compare[name[x] is constant[None]] begin[:]
call[name[distb], parameter[]]
return[tuple[[<ast.Constant object at 0x7da1b05b2710>, <ast.Constant object at 0x7da1b05b26e0>]]]
if compare[name[start_offset] is constant[None]] begin[:]
variable[start_offset] assign[=] constant[0]
variable[mess] assign[=] constant[]
if compare[name[start_line] greater[>] constant[1]] begin[:]
<ast.AugAssign object at 0x7da1b05b23e0>
if name[end_line] begin[:]
<ast.AugAssign object at 0x7da1b05b20e0>
variable[sectioned] assign[=] constant[False]
if <ast.BoolOp object at 0x7da1b05b1de0> begin[:]
variable[x] assign[=] name[x].__class__
if call[name[inspect].ismethod, parameter[name[x]]] begin[:]
call[name[section], parameter[binary_operation[constant[Disassembly of %s: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b05b1930>, <ast.Name object at 0x7da1b05b1900>]]]]]
variable[sectioned] assign[=] constant[True]
variable[x] assign[=] name[x].im_func
if call[name[hasattr], parameter[name[x], constant[__dict__]]] begin[:]
variable[items] assign[=] call[name[sorted], parameter[call[name[x].__dict__.items, parameter[]]]]
for taget[tuple[[<ast.Name object at 0x7da20c992f50>, <ast.Name object at 0x7da20c991e40>]]] in starred[name[items]] begin[:]
if call[name[isinstance], parameter[name[x1], name[_have_code]]] begin[:]
if <ast.UnaryOp object at 0x7da20c9909d0> begin[:]
call[name[section], parameter[binary_operation[constant[Disassembly of %s: ] <ast.Mod object at 0x7da2590d6920> name[x]]]]
<ast.Try object at 0x7da20c9911b0>
pass
pass
pass
return[tuple[[<ast.Constant object at 0x7da18f721660>, <ast.Constant object at 0x7da18f721930>]]] | keyword[def] identifier[dis] ( identifier[msg] , identifier[msg_nocr] , identifier[section] , identifier[errmsg] , identifier[x] = keyword[None] , identifier[start_line] =- literal[int] , identifier[end_line] = keyword[None] ,
identifier[relative_pos] = keyword[False] , identifier[highlight] = literal[string] , identifier[start_offset] = literal[int] , identifier[end_offset] = keyword[None] ,
identifier[include_header] = keyword[False] ):
literal[string]
identifier[lasti] =- literal[int]
keyword[if] identifier[x] keyword[is] keyword[None] :
identifier[distb] ()
keyword[return] keyword[None] , keyword[None]
keyword[if] identifier[start_offset] keyword[is] keyword[None] :
identifier[start_offset] = literal[int]
identifier[mess] = literal[string]
keyword[if] identifier[start_line] > literal[int] :
identifier[mess] += literal[string] % identifier[start_line]
keyword[elif] identifier[start_offset] > literal[int] :
identifier[mess] = literal[string] % identifier[start_offset]
keyword[if] identifier[end_line] :
identifier[mess] += literal[string] % identifier[end_line]
keyword[elif] identifier[end_offset] :
identifier[mess] += literal[string] % identifier[end_offset]
identifier[sectioned] = keyword[False]
keyword[if] identifier[hasattr] ( identifier[types] , literal[string] ) keyword[and] identifier[isinstance] ( identifier[x] , identifier[types] . identifier[InstanceType] ):
identifier[x] = identifier[x] . identifier[__class__]
keyword[if] identifier[inspect] . identifier[ismethod] ( identifier[x] ):
identifier[section] ( literal[string] %( identifier[x] , identifier[mess] ))
identifier[sectioned] = keyword[True]
identifier[x] = identifier[x] . identifier[im_func]
keyword[elif] identifier[inspect] . identifier[isfunction] ( identifier[x] ) keyword[or] identifier[inspect] . identifier[isgeneratorfunction] ( identifier[x] ):
identifier[section] ( literal[string] %( identifier[x] , identifier[mess] ))
identifier[x] = identifier[x] . identifier[func_code]
identifier[sectioned] = keyword[True]
keyword[elif] identifier[inspect] . identifier[isgenerator] ( identifier[x] ):
identifier[section] ( literal[string] %( identifier[x] , identifier[mess] ))
identifier[frame] = identifier[x] . identifier[gi_frame]
identifier[lasti] = identifier[frame] . identifier[f_last_i]
identifier[x] = identifier[x] . identifier[gi_code]
identifier[sectioned] = keyword[True]
keyword[elif] identifier[inspect] . identifier[isframe] ( identifier[x] ):
identifier[section] ( literal[string] %( identifier[x] , identifier[mess] ))
identifier[sectioned] = keyword[True]
keyword[if] identifier[hasattr] ( identifier[x] , literal[string] ):
identifier[lasti] = identifier[x] . identifier[f_lasti]
keyword[if] identifier[lasti] ==- literal[int] : identifier[lasti] = literal[int]
keyword[pass]
identifier[opc] = identifier[get_opcode] ( identifier[PYTHON_VERSION] , identifier[IS_PYPY] )
identifier[x] = identifier[x] . identifier[f_code]
keyword[if] identifier[include_header] :
identifier[header_lines] = identifier[Bytecode] ( identifier[x] , identifier[opc] ). identifier[info] (). identifier[split] ( literal[string] )
identifier[header] = literal[string] . identifier[join] ([ identifier[format_token] ( identifier[Mformat] . identifier[Comment] , identifier[h] ) keyword[for] identifier[h] keyword[in] identifier[header_lines] ])
identifier[msg] ( identifier[header] )
keyword[pass]
keyword[elif] identifier[inspect] . identifier[iscode] ( identifier[x] ):
keyword[pass]
keyword[if] identifier[hasattr] ( identifier[x] , literal[string] ):
identifier[items] = identifier[sorted] ( identifier[x] . identifier[__dict__] . identifier[items] ())
keyword[for] identifier[name] , identifier[x1] keyword[in] identifier[items] :
keyword[if] identifier[isinstance] ( identifier[x1] , identifier[_have_code] ):
keyword[if] keyword[not] identifier[sectioned] :
identifier[section] ( literal[string] % identifier[x] )
keyword[try] :
identifier[dis] ( identifier[msg] , identifier[msg_nocr] , identifier[section] , identifier[errmsg] , identifier[x1] ,
identifier[start_line] = identifier[start_line] , identifier[end_line] = identifier[end_line] ,
identifier[relative_pos] = identifier[relative_pos] )
identifier[msg] ( literal[string] )
keyword[except] identifier[TypeError] :
identifier[_] , identifier[msg] , identifier[_] = identifier[sys] . identifier[exc_info] ()
identifier[errmsg] ( literal[string] , identifier[msg] )
keyword[pass]
keyword[pass]
keyword[pass]
keyword[pass]
keyword[elif] identifier[hasattr] ( identifier[x] , literal[string] ):
keyword[if] keyword[not] identifier[sectioned] :
identifier[section] ( literal[string] % identifier[x] )
keyword[return] identifier[disassemble] ( identifier[msg] , identifier[msg_nocr] , identifier[section] , identifier[x] , identifier[lasti] = identifier[lasti] ,
identifier[start_line] = identifier[start_line] , identifier[end_line] = identifier[end_line] ,
identifier[relative_pos] = identifier[relative_pos] ,
identifier[highlight] = identifier[highlight] ,
identifier[start_offset] = identifier[start_offset] ,
identifier[end_offset] = identifier[end_offset] )
keyword[elif] identifier[isinstance] ( identifier[x] , identifier[str] ):
keyword[return] identifier[disassemble_string] ( identifier[msg] , identifier[msg_nocr] , identifier[x] ,)
keyword[else] :
identifier[errmsg] ( literal[string] %
identifier[type] ( identifier[x] ). identifier[__name__] )
keyword[return] keyword[None] , keyword[None] | def dis(msg, msg_nocr, section, errmsg, x=None, start_line=-1, end_line=None, relative_pos=False, highlight='light', start_offset=0, end_offset=None, include_header=False):
"""Disassemble classes, methods, functions, or code.
With no argument, disassemble the last traceback.
"""
lasti = -1
if x is None:
distb()
return (None, None) # depends on [control=['if'], data=[]]
if start_offset is None:
start_offset = 0 # depends on [control=['if'], data=['start_offset']]
mess = ''
if start_line > 1:
mess += 'from line %d ' % start_line # depends on [control=['if'], data=['start_line']]
elif start_offset > 1:
mess = 'from offset %d ' % start_offset # depends on [control=['if'], data=['start_offset']]
if end_line:
mess += 'to line %d' % end_line # depends on [control=['if'], data=[]]
elif end_offset:
mess += 'to offset %d' % end_offset # depends on [control=['if'], data=[]]
sectioned = False
# Try to dogpaddle to the code object for the type setting x
if hasattr(types, 'InstanceType') and isinstance(x, types.InstanceType):
x = x.__class__ # depends on [control=['if'], data=[]]
if inspect.ismethod(x):
section('Disassembly of %s: %s' % (x, mess))
sectioned = True
x = x.im_func # depends on [control=['if'], data=[]]
elif inspect.isfunction(x) or inspect.isgeneratorfunction(x):
section('Disassembly of %s: %s' % (x, mess))
x = x.func_code
sectioned = True # depends on [control=['if'], data=[]]
elif inspect.isgenerator(x):
section('Disassembly of %s: %s' % (x, mess))
frame = x.gi_frame
lasti = frame.f_last_i
x = x.gi_code
sectioned = True # depends on [control=['if'], data=[]]
elif inspect.isframe(x):
section('Disassembly of %s: %s' % (x, mess))
sectioned = True
if hasattr(x, 'f_lasti'):
lasti = x.f_lasti
if lasti == -1:
lasti = 0 # depends on [control=['if'], data=['lasti']]
pass # depends on [control=['if'], data=[]]
opc = get_opcode(PYTHON_VERSION, IS_PYPY)
x = x.f_code
if include_header:
header_lines = Bytecode(x, opc).info().split('\n')
header = '\n'.join([format_token(Mformat.Comment, h) for h in header_lines])
msg(header) # depends on [control=['if'], data=[]]
pass # depends on [control=['if'], data=[]]
elif inspect.iscode(x):
pass # depends on [control=['if'], data=[]]
if hasattr(x, '__dict__'): # Class or module
items = sorted(x.__dict__.items())
for (name, x1) in items:
if isinstance(x1, _have_code):
if not sectioned:
section('Disassembly of %s: ' % x) # depends on [control=['if'], data=[]]
try:
dis(msg, msg_nocr, section, errmsg, x1, start_line=start_line, end_line=end_line, relative_pos=relative_pos)
msg('') # depends on [control=['try'], data=[]]
except TypeError:
(_, msg, _) = sys.exc_info()
errmsg('Sorry:', msg)
pass # depends on [control=['except'], data=[]]
pass # depends on [control=['if'], data=[]]
pass # depends on [control=['for'], data=[]]
pass # depends on [control=['if'], data=[]]
elif hasattr(x, 'co_code'): # Code object
if not sectioned:
section('Disassembly of %s: ' % x) # depends on [control=['if'], data=[]]
return disassemble(msg, msg_nocr, section, x, lasti=lasti, start_line=start_line, end_line=end_line, relative_pos=relative_pos, highlight=highlight, start_offset=start_offset, end_offset=end_offset) # depends on [control=['if'], data=[]]
elif isinstance(x, str): # Source code
return disassemble_string(msg, msg_nocr, x) # depends on [control=['if'], data=[]]
else:
errmsg("Don't know how to disassemble %s objects." % type(x).__name__)
return (None, None) |
def discover_thread(callback,
timeout=5,
include_invisible=False,
interface_addr=None):
""" Return a started thread with a discovery callback. """
thread = StoppableThread(
target=_discover_thread,
args=(callback, timeout, include_invisible, interface_addr))
thread.start()
return thread | def function[discover_thread, parameter[callback, timeout, include_invisible, interface_addr]]:
constant[ Return a started thread with a discovery callback. ]
variable[thread] assign[=] call[name[StoppableThread], parameter[]]
call[name[thread].start, parameter[]]
return[name[thread]] | keyword[def] identifier[discover_thread] ( identifier[callback] ,
identifier[timeout] = literal[int] ,
identifier[include_invisible] = keyword[False] ,
identifier[interface_addr] = keyword[None] ):
literal[string]
identifier[thread] = identifier[StoppableThread] (
identifier[target] = identifier[_discover_thread] ,
identifier[args] =( identifier[callback] , identifier[timeout] , identifier[include_invisible] , identifier[interface_addr] ))
identifier[thread] . identifier[start] ()
keyword[return] identifier[thread] | def discover_thread(callback, timeout=5, include_invisible=False, interface_addr=None):
""" Return a started thread with a discovery callback. """
thread = StoppableThread(target=_discover_thread, args=(callback, timeout, include_invisible, interface_addr))
thread.start()
return thread |
def get_changes_between_builds(self, project, from_build_id=None, to_build_id=None, top=None):
"""GetChangesBetweenBuilds.
[Preview API] Gets the changes made to the repository between two given builds.
:param str project: Project ID or project name
:param int from_build_id: The ID of the first build.
:param int to_build_id: The ID of the last build.
:param int top: The maximum number of changes to return.
:rtype: [Change]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if from_build_id is not None:
query_parameters['fromBuildId'] = self._serialize.query('from_build_id', from_build_id, 'int')
if to_build_id is not None:
query_parameters['toBuildId'] = self._serialize.query('to_build_id', to_build_id, 'int')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
response = self._send(http_method='GET',
location_id='f10f0ea5-18a1-43ec-a8fb-2042c7be9b43',
version='5.0-preview.2',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[Change]', self._unwrap_collection(response)) | def function[get_changes_between_builds, parameter[self, project, from_build_id, to_build_id, top]]:
constant[GetChangesBetweenBuilds.
[Preview API] Gets the changes made to the repository between two given builds.
:param str project: Project ID or project name
:param int from_build_id: The ID of the first build.
:param int to_build_id: The ID of the last build.
:param int top: The maximum number of changes to return.
:rtype: [Change]
]
variable[route_values] assign[=] dictionary[[], []]
if compare[name[project] is_not constant[None]] begin[:]
call[name[route_values]][constant[project]] assign[=] call[name[self]._serialize.url, parameter[constant[project], name[project], constant[str]]]
variable[query_parameters] assign[=] dictionary[[], []]
if compare[name[from_build_id] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[fromBuildId]] assign[=] call[name[self]._serialize.query, parameter[constant[from_build_id], name[from_build_id], constant[int]]]
if compare[name[to_build_id] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[toBuildId]] assign[=] call[name[self]._serialize.query, parameter[constant[to_build_id], name[to_build_id], constant[int]]]
if compare[name[top] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[$top]] assign[=] call[name[self]._serialize.query, parameter[constant[top], name[top], constant[int]]]
variable[response] assign[=] call[name[self]._send, parameter[]]
return[call[name[self]._deserialize, parameter[constant[[Change]], call[name[self]._unwrap_collection, parameter[name[response]]]]]] | keyword[def] identifier[get_changes_between_builds] ( identifier[self] , identifier[project] , identifier[from_build_id] = keyword[None] , identifier[to_build_id] = keyword[None] , identifier[top] = keyword[None] ):
literal[string]
identifier[route_values] ={}
keyword[if] identifier[project] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[project] , literal[string] )
identifier[query_parameters] ={}
keyword[if] identifier[from_build_id] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[from_build_id] , literal[string] )
keyword[if] identifier[to_build_id] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[to_build_id] , literal[string] )
keyword[if] identifier[top] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[top] , literal[string] )
identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] ,
identifier[location_id] = literal[string] ,
identifier[version] = literal[string] ,
identifier[route_values] = identifier[route_values] ,
identifier[query_parameters] = identifier[query_parameters] )
keyword[return] identifier[self] . identifier[_deserialize] ( literal[string] , identifier[self] . identifier[_unwrap_collection] ( identifier[response] )) | def get_changes_between_builds(self, project, from_build_id=None, to_build_id=None, top=None):
"""GetChangesBetweenBuilds.
[Preview API] Gets the changes made to the repository between two given builds.
:param str project: Project ID or project name
:param int from_build_id: The ID of the first build.
:param int to_build_id: The ID of the last build.
:param int top: The maximum number of changes to return.
:rtype: [Change]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str') # depends on [control=['if'], data=['project']]
query_parameters = {}
if from_build_id is not None:
query_parameters['fromBuildId'] = self._serialize.query('from_build_id', from_build_id, 'int') # depends on [control=['if'], data=['from_build_id']]
if to_build_id is not None:
query_parameters['toBuildId'] = self._serialize.query('to_build_id', to_build_id, 'int') # depends on [control=['if'], data=['to_build_id']]
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int') # depends on [control=['if'], data=['top']]
response = self._send(http_method='GET', location_id='f10f0ea5-18a1-43ec-a8fb-2042c7be9b43', version='5.0-preview.2', route_values=route_values, query_parameters=query_parameters)
return self._deserialize('[Change]', self._unwrap_collection(response)) |
def singleton(the_class):
"""
Decorator for a class to make a singleton out of it.
@type the_class: class
@param the_class: the class that should work as a singleton
@rtype: decorator
@return: decorator
"""
class_instances = {}
def get_instance(*args, **kwargs):
"""
Creating or just return the one and only class instance.
The singleton depends on the parameters used in __init__
@type args: list
@param args: positional arguments of the constructor.
@type kwargs: dict
@param kwargs: named parameters of the constructor.
@rtype: decorated class type
@return: singleton instance of decorated class.
"""
key = (the_class, args, str(kwargs))
if key not in class_instances:
class_instances[key] = the_class(*args, **kwargs)
return class_instances[key]
return get_instance | def function[singleton, parameter[the_class]]:
constant[
Decorator for a class to make a singleton out of it.
@type the_class: class
@param the_class: the class that should work as a singleton
@rtype: decorator
@return: decorator
]
variable[class_instances] assign[=] dictionary[[], []]
def function[get_instance, parameter[]]:
constant[
Creating or just return the one and only class instance.
The singleton depends on the parameters used in __init__
@type args: list
@param args: positional arguments of the constructor.
@type kwargs: dict
@param kwargs: named parameters of the constructor.
@rtype: decorated class type
@return: singleton instance of decorated class.
]
variable[key] assign[=] tuple[[<ast.Name object at 0x7da20eb29cc0>, <ast.Name object at 0x7da18dc990c0>, <ast.Call object at 0x7da18dc98cd0>]]
if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[class_instances]] begin[:]
call[name[class_instances]][name[key]] assign[=] call[name[the_class], parameter[<ast.Starred object at 0x7da18dc98af0>]]
return[call[name[class_instances]][name[key]]]
return[name[get_instance]] | keyword[def] identifier[singleton] ( identifier[the_class] ):
literal[string]
identifier[class_instances] ={}
keyword[def] identifier[get_instance] (* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[key] =( identifier[the_class] , identifier[args] , identifier[str] ( identifier[kwargs] ))
keyword[if] identifier[key] keyword[not] keyword[in] identifier[class_instances] :
identifier[class_instances] [ identifier[key] ]= identifier[the_class] (* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[class_instances] [ identifier[key] ]
keyword[return] identifier[get_instance] | def singleton(the_class):
"""
Decorator for a class to make a singleton out of it.
@type the_class: class
@param the_class: the class that should work as a singleton
@rtype: decorator
@return: decorator
"""
class_instances = {}
def get_instance(*args, **kwargs):
"""
Creating or just return the one and only class instance.
The singleton depends on the parameters used in __init__
@type args: list
@param args: positional arguments of the constructor.
@type kwargs: dict
@param kwargs: named parameters of the constructor.
@rtype: decorated class type
@return: singleton instance of decorated class.
"""
key = (the_class, args, str(kwargs))
if key not in class_instances:
class_instances[key] = the_class(*args, **kwargs) # depends on [control=['if'], data=['key', 'class_instances']]
return class_instances[key]
return get_instance |
async def create_payment_address(wallet_handle: int,
payment_method: str,
config: str) -> str:
"""
Create the payment address for specified payment method
This method generates private part of payment address
and stores it in a secure place. Ideally it should be
secret in libindy wallet (see crypto module).
Note that payment method should be able to resolve this
secret by fully resolvable payment address format.
:param wallet_handle: wallet handle (created by open_wallet).
:param payment_method: Payment method to use (for example, 'sov').
:param config: payment address config as json:
{
seed: <str>, // allows deterministic creation of payment address
}
:return: payment_address: public identifier of payment address in fully resolvable payment address format.
"""
logger = logging.getLogger(__name__)
logger.debug("create_payment_address: >>> wallet_handle: %r, payment_method: %r, config: %r",
wallet_handle,
payment_method,
config)
if not hasattr(create_payment_address, "cb"):
logger.debug("create_payment_address: Creating callback")
create_payment_address.cb = create_cb(CFUNCTYPE(None, c_int32, c_int32, c_char_p))
c_wallet_handle = c_int32(wallet_handle)
c_payment_method = c_char_p(payment_method.encode('utf-8'))
config = c_char_p(config.encode('utf-8'))
request_result = await do_call('indy_create_payment_address',
c_wallet_handle,
c_payment_method,
config,
create_payment_address.cb)
res = request_result.decode()
logger.debug("create_payment_address: <<< res: %r", res)
return res | <ast.AsyncFunctionDef object at 0x7da1b1c784f0> | keyword[async] keyword[def] identifier[create_payment_address] ( identifier[wallet_handle] : identifier[int] ,
identifier[payment_method] : identifier[str] ,
identifier[config] : identifier[str] )-> identifier[str] :
literal[string]
identifier[logger] = identifier[logging] . identifier[getLogger] ( identifier[__name__] )
identifier[logger] . identifier[debug] ( literal[string] ,
identifier[wallet_handle] ,
identifier[payment_method] ,
identifier[config] )
keyword[if] keyword[not] identifier[hasattr] ( identifier[create_payment_address] , literal[string] ):
identifier[logger] . identifier[debug] ( literal[string] )
identifier[create_payment_address] . identifier[cb] = identifier[create_cb] ( identifier[CFUNCTYPE] ( keyword[None] , identifier[c_int32] , identifier[c_int32] , identifier[c_char_p] ))
identifier[c_wallet_handle] = identifier[c_int32] ( identifier[wallet_handle] )
identifier[c_payment_method] = identifier[c_char_p] ( identifier[payment_method] . identifier[encode] ( literal[string] ))
identifier[config] = identifier[c_char_p] ( identifier[config] . identifier[encode] ( literal[string] ))
identifier[request_result] = keyword[await] identifier[do_call] ( literal[string] ,
identifier[c_wallet_handle] ,
identifier[c_payment_method] ,
identifier[config] ,
identifier[create_payment_address] . identifier[cb] )
identifier[res] = identifier[request_result] . identifier[decode] ()
identifier[logger] . identifier[debug] ( literal[string] , identifier[res] )
keyword[return] identifier[res] | async def create_payment_address(wallet_handle: int, payment_method: str, config: str) -> str:
"""
Create the payment address for specified payment method
This method generates private part of payment address
and stores it in a secure place. Ideally it should be
secret in libindy wallet (see crypto module).
Note that payment method should be able to resolve this
secret by fully resolvable payment address format.
:param wallet_handle: wallet handle (created by open_wallet).
:param payment_method: Payment method to use (for example, 'sov').
:param config: payment address config as json:
{
seed: <str>, // allows deterministic creation of payment address
}
:return: payment_address: public identifier of payment address in fully resolvable payment address format.
"""
logger = logging.getLogger(__name__)
logger.debug('create_payment_address: >>> wallet_handle: %r, payment_method: %r, config: %r', wallet_handle, payment_method, config)
if not hasattr(create_payment_address, 'cb'):
logger.debug('create_payment_address: Creating callback')
create_payment_address.cb = create_cb(CFUNCTYPE(None, c_int32, c_int32, c_char_p)) # depends on [control=['if'], data=[]]
c_wallet_handle = c_int32(wallet_handle)
c_payment_method = c_char_p(payment_method.encode('utf-8'))
config = c_char_p(config.encode('utf-8'))
request_result = await do_call('indy_create_payment_address', c_wallet_handle, c_payment_method, config, create_payment_address.cb)
res = request_result.decode()
logger.debug('create_payment_address: <<< res: %r', res)
return res |
def rlstsq(coef_mat, ordinate, order=1, alpha=-1, cross=False):
"""
Least Squares Minimization using Tikhonov regularization.
Includes method for robust generalized cross-validation.
Args:
coef_mat (numpy.ndarray):
Coefficient matrix with shape ``(M, N)``.
ordinate (numpy.ndarray):
Ordinate or "dependent variable" values with shape ``(M,)`` or
``(M, K)``. If ``ordinate`` is two-dimensional, the least-squares
solution is calculated for each of the ``K`` columns of
``ordinate``.
order (int, numpy.ndarray):
If int, it is the order of Tikhonov regularization. If
`numpy.ndarray`, it will be used as regularization matrix.
alpha (float):
Lower threshold for the dampening parameter. The real value is
calculated using generalised cross validation.
cross (bool):
Use cross validation to estimate alpha value.
"""
coef_mat = numpy.array(coef_mat)
ordinate = numpy.array(ordinate)
dim1, dim2 = coef_mat.shape
if cross:
out = numpy.empty((dim1, dim2) + ordinate.shape[1:])
coef_mat_ = numpy.empty((dim1-1, dim2))
ordinate_ = numpy.empty((dim1-1,) + ordinate.shape[1:])
for i in range(dim1):
coef_mat_[:i] = coef_mat[:i]
coef_mat_[i:] = coef_mat[i+1:]
ordinate_[:i] = ordinate[:i]
ordinate_[i:] = ordinate[i+1:]
out[i] = rlstsq(coef_mat_, ordinate_, order, alpha, False)
return numpy.median(out, 0)
if order == 0:
tikhmat = numpy.eye(dim2)
elif order == 1:
tikhmat = numpy.zeros((dim2-1, dim2))
tikhmat[:, :-1] -= numpy.eye(dim2-1)
tikhmat[:, 1:] += numpy.eye(dim2-1)
elif order == 2:
tikhmat = numpy.zeros((dim2-2, dim2))
tikhmat[:, :-2] += numpy.eye(dim2-2)
tikhmat[:, 1:-1] -= 2*numpy.eye(dim2-2)
tikhmat[:, 2:] += numpy.eye(dim2-2)
elif order is None:
tikhmat = numpy.zeros(1)
else:
tikhmat = numpy.array(order)
assert tikhmat.shape[-1] == dim2 or tikhmat.shape in ((), (1,))
if alpha < 0 and order is not None:
gamma = 0.1
def rgcv_error(alpha):
"""Calculate Tikhonov dampening parameter."""
if alpha <= 0:
return numpy.inf
coef_mat_ = numpy.dot(
coef_mat.T, coef_mat)+alpha*(numpy.dot(tikhmat.T, tikhmat))
try:
coef_mat_ = numpy.dot(linalg.inv(coef_mat_), coef_mat.T)
except linalg.LinAlgError:
return numpy.inf
abscissas = numpy.dot(coef_mat_, ordinate)
res2 = numpy.sum((numpy.dot(coef_mat, abscissas)-ordinate)**2)
coef_mat_2 = numpy.dot(coef_mat, coef_mat_)
skew = dim1*res2/numpy.trace(numpy.eye(dim1)-coef_mat_2)**2
mu2 = numpy.sum(coef_mat_2*coef_mat_2.T)/dim1
return (gamma + (1-gamma)*mu2)*skew
alphas = 10.**-numpy.arange(0, 16)
evals = numpy.array([rgcv_error(alpha) for alpha in alphas])
alpha = alphas[numpy.argmin(evals)]
out = linalg.inv(
numpy.dot(coef_mat.T, coef_mat) + alpha*numpy.dot(tikhmat.T, tikhmat))
out = numpy.dot(out, numpy.dot(coef_mat.T, ordinate))
return out | def function[rlstsq, parameter[coef_mat, ordinate, order, alpha, cross]]:
constant[
Least Squares Minimization using Tikhonov regularization.
Includes method for robust generalized cross-validation.
Args:
coef_mat (numpy.ndarray):
Coefficient matrix with shape ``(M, N)``.
ordinate (numpy.ndarray):
Ordinate or "dependent variable" values with shape ``(M,)`` or
``(M, K)``. If ``ordinate`` is two-dimensional, the least-squares
solution is calculated for each of the ``K`` columns of
``ordinate``.
order (int, numpy.ndarray):
If int, it is the order of Tikhonov regularization. If
`numpy.ndarray`, it will be used as regularization matrix.
alpha (float):
Lower threshold for the dampening parameter. The real value is
calculated using generalised cross validation.
cross (bool):
Use cross validation to estimate alpha value.
]
variable[coef_mat] assign[=] call[name[numpy].array, parameter[name[coef_mat]]]
variable[ordinate] assign[=] call[name[numpy].array, parameter[name[ordinate]]]
<ast.Tuple object at 0x7da18dc05a20> assign[=] name[coef_mat].shape
if name[cross] begin[:]
variable[out] assign[=] call[name[numpy].empty, parameter[binary_operation[tuple[[<ast.Name object at 0x7da18dc07070>, <ast.Name object at 0x7da18dc05e70>]] + call[name[ordinate].shape][<ast.Slice object at 0x7da18dc064d0>]]]]
variable[coef_mat_] assign[=] call[name[numpy].empty, parameter[tuple[[<ast.BinOp object at 0x7da18dc04af0>, <ast.Name object at 0x7da18dc07010>]]]]
variable[ordinate_] assign[=] call[name[numpy].empty, parameter[binary_operation[tuple[[<ast.BinOp object at 0x7da18dc04910>]] + call[name[ordinate].shape][<ast.Slice object at 0x7da18dc05870>]]]]
for taget[name[i]] in starred[call[name[range], parameter[name[dim1]]]] begin[:]
call[name[coef_mat_]][<ast.Slice object at 0x7da18dc05d50>] assign[=] call[name[coef_mat]][<ast.Slice object at 0x7da18dc04e50>]
call[name[coef_mat_]][<ast.Slice object at 0x7da18dc07340>] assign[=] call[name[coef_mat]][<ast.Slice object at 0x7da18dc07460>]
call[name[ordinate_]][<ast.Slice object at 0x7da18dc07be0>] assign[=] call[name[ordinate]][<ast.Slice object at 0x7da18dc063e0>]
call[name[ordinate_]][<ast.Slice object at 0x7da18dc07eb0>] assign[=] call[name[ordinate]][<ast.Slice object at 0x7da18dc07d90>]
call[name[out]][name[i]] assign[=] call[name[rlstsq], parameter[name[coef_mat_], name[ordinate_], name[order], name[alpha], constant[False]]]
return[call[name[numpy].median, parameter[name[out], constant[0]]]]
if compare[name[order] equal[==] constant[0]] begin[:]
variable[tikhmat] assign[=] call[name[numpy].eye, parameter[name[dim2]]]
if <ast.BoolOp object at 0x7da18dc04c10> begin[:]
variable[gamma] assign[=] constant[0.1]
def function[rgcv_error, parameter[alpha]]:
constant[Calculate Tikhonov dampening parameter.]
if compare[name[alpha] less_or_equal[<=] constant[0]] begin[:]
return[name[numpy].inf]
variable[coef_mat_] assign[=] binary_operation[call[name[numpy].dot, parameter[name[coef_mat].T, name[coef_mat]]] + binary_operation[name[alpha] * call[name[numpy].dot, parameter[name[tikhmat].T, name[tikhmat]]]]]
<ast.Try object at 0x7da20c992bf0>
variable[abscissas] assign[=] call[name[numpy].dot, parameter[name[coef_mat_], name[ordinate]]]
variable[res2] assign[=] call[name[numpy].sum, parameter[binary_operation[binary_operation[call[name[numpy].dot, parameter[name[coef_mat], name[abscissas]]] - name[ordinate]] ** constant[2]]]]
variable[coef_mat_2] assign[=] call[name[numpy].dot, parameter[name[coef_mat], name[coef_mat_]]]
variable[skew] assign[=] binary_operation[binary_operation[name[dim1] * name[res2]] / binary_operation[call[name[numpy].trace, parameter[binary_operation[call[name[numpy].eye, parameter[name[dim1]]] - name[coef_mat_2]]]] ** constant[2]]]
variable[mu2] assign[=] binary_operation[call[name[numpy].sum, parameter[binary_operation[name[coef_mat_2] * name[coef_mat_2].T]]] / name[dim1]]
return[binary_operation[binary_operation[name[gamma] + binary_operation[binary_operation[constant[1] - name[gamma]] * name[mu2]]] * name[skew]]]
variable[alphas] assign[=] binary_operation[constant[10.0] ** <ast.UnaryOp object at 0x7da20c991630>]
variable[evals] assign[=] call[name[numpy].array, parameter[<ast.ListComp object at 0x7da20c9916c0>]]
variable[alpha] assign[=] call[name[alphas]][call[name[numpy].argmin, parameter[name[evals]]]]
variable[out] assign[=] call[name[linalg].inv, parameter[binary_operation[call[name[numpy].dot, parameter[name[coef_mat].T, name[coef_mat]]] + binary_operation[name[alpha] * call[name[numpy].dot, parameter[name[tikhmat].T, name[tikhmat]]]]]]]
variable[out] assign[=] call[name[numpy].dot, parameter[name[out], call[name[numpy].dot, parameter[name[coef_mat].T, name[ordinate]]]]]
return[name[out]] | keyword[def] identifier[rlstsq] ( identifier[coef_mat] , identifier[ordinate] , identifier[order] = literal[int] , identifier[alpha] =- literal[int] , identifier[cross] = keyword[False] ):
literal[string]
identifier[coef_mat] = identifier[numpy] . identifier[array] ( identifier[coef_mat] )
identifier[ordinate] = identifier[numpy] . identifier[array] ( identifier[ordinate] )
identifier[dim1] , identifier[dim2] = identifier[coef_mat] . identifier[shape]
keyword[if] identifier[cross] :
identifier[out] = identifier[numpy] . identifier[empty] (( identifier[dim1] , identifier[dim2] )+ identifier[ordinate] . identifier[shape] [ literal[int] :])
identifier[coef_mat_] = identifier[numpy] . identifier[empty] (( identifier[dim1] - literal[int] , identifier[dim2] ))
identifier[ordinate_] = identifier[numpy] . identifier[empty] (( identifier[dim1] - literal[int] ,)+ identifier[ordinate] . identifier[shape] [ literal[int] :])
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[dim1] ):
identifier[coef_mat_] [: identifier[i] ]= identifier[coef_mat] [: identifier[i] ]
identifier[coef_mat_] [ identifier[i] :]= identifier[coef_mat] [ identifier[i] + literal[int] :]
identifier[ordinate_] [: identifier[i] ]= identifier[ordinate] [: identifier[i] ]
identifier[ordinate_] [ identifier[i] :]= identifier[ordinate] [ identifier[i] + literal[int] :]
identifier[out] [ identifier[i] ]= identifier[rlstsq] ( identifier[coef_mat_] , identifier[ordinate_] , identifier[order] , identifier[alpha] , keyword[False] )
keyword[return] identifier[numpy] . identifier[median] ( identifier[out] , literal[int] )
keyword[if] identifier[order] == literal[int] :
identifier[tikhmat] = identifier[numpy] . identifier[eye] ( identifier[dim2] )
keyword[elif] identifier[order] == literal[int] :
identifier[tikhmat] = identifier[numpy] . identifier[zeros] (( identifier[dim2] - literal[int] , identifier[dim2] ))
identifier[tikhmat] [:,:- literal[int] ]-= identifier[numpy] . identifier[eye] ( identifier[dim2] - literal[int] )
identifier[tikhmat] [:, literal[int] :]+= identifier[numpy] . identifier[eye] ( identifier[dim2] - literal[int] )
keyword[elif] identifier[order] == literal[int] :
identifier[tikhmat] = identifier[numpy] . identifier[zeros] (( identifier[dim2] - literal[int] , identifier[dim2] ))
identifier[tikhmat] [:,:- literal[int] ]+= identifier[numpy] . identifier[eye] ( identifier[dim2] - literal[int] )
identifier[tikhmat] [:, literal[int] :- literal[int] ]-= literal[int] * identifier[numpy] . identifier[eye] ( identifier[dim2] - literal[int] )
identifier[tikhmat] [:, literal[int] :]+= identifier[numpy] . identifier[eye] ( identifier[dim2] - literal[int] )
keyword[elif] identifier[order] keyword[is] keyword[None] :
identifier[tikhmat] = identifier[numpy] . identifier[zeros] ( literal[int] )
keyword[else] :
identifier[tikhmat] = identifier[numpy] . identifier[array] ( identifier[order] )
keyword[assert] identifier[tikhmat] . identifier[shape] [- literal[int] ]== identifier[dim2] keyword[or] identifier[tikhmat] . identifier[shape] keyword[in] ((),( literal[int] ,))
keyword[if] identifier[alpha] < literal[int] keyword[and] identifier[order] keyword[is] keyword[not] keyword[None] :
identifier[gamma] = literal[int]
keyword[def] identifier[rgcv_error] ( identifier[alpha] ):
literal[string]
keyword[if] identifier[alpha] <= literal[int] :
keyword[return] identifier[numpy] . identifier[inf]
identifier[coef_mat_] = identifier[numpy] . identifier[dot] (
identifier[coef_mat] . identifier[T] , identifier[coef_mat] )+ identifier[alpha] *( identifier[numpy] . identifier[dot] ( identifier[tikhmat] . identifier[T] , identifier[tikhmat] ))
keyword[try] :
identifier[coef_mat_] = identifier[numpy] . identifier[dot] ( identifier[linalg] . identifier[inv] ( identifier[coef_mat_] ), identifier[coef_mat] . identifier[T] )
keyword[except] identifier[linalg] . identifier[LinAlgError] :
keyword[return] identifier[numpy] . identifier[inf]
identifier[abscissas] = identifier[numpy] . identifier[dot] ( identifier[coef_mat_] , identifier[ordinate] )
identifier[res2] = identifier[numpy] . identifier[sum] (( identifier[numpy] . identifier[dot] ( identifier[coef_mat] , identifier[abscissas] )- identifier[ordinate] )** literal[int] )
identifier[coef_mat_2] = identifier[numpy] . identifier[dot] ( identifier[coef_mat] , identifier[coef_mat_] )
identifier[skew] = identifier[dim1] * identifier[res2] / identifier[numpy] . identifier[trace] ( identifier[numpy] . identifier[eye] ( identifier[dim1] )- identifier[coef_mat_2] )** literal[int]
identifier[mu2] = identifier[numpy] . identifier[sum] ( identifier[coef_mat_2] * identifier[coef_mat_2] . identifier[T] )/ identifier[dim1]
keyword[return] ( identifier[gamma] +( literal[int] - identifier[gamma] )* identifier[mu2] )* identifier[skew]
identifier[alphas] = literal[int] **- identifier[numpy] . identifier[arange] ( literal[int] , literal[int] )
identifier[evals] = identifier[numpy] . identifier[array] ([ identifier[rgcv_error] ( identifier[alpha] ) keyword[for] identifier[alpha] keyword[in] identifier[alphas] ])
identifier[alpha] = identifier[alphas] [ identifier[numpy] . identifier[argmin] ( identifier[evals] )]
identifier[out] = identifier[linalg] . identifier[inv] (
identifier[numpy] . identifier[dot] ( identifier[coef_mat] . identifier[T] , identifier[coef_mat] )+ identifier[alpha] * identifier[numpy] . identifier[dot] ( identifier[tikhmat] . identifier[T] , identifier[tikhmat] ))
identifier[out] = identifier[numpy] . identifier[dot] ( identifier[out] , identifier[numpy] . identifier[dot] ( identifier[coef_mat] . identifier[T] , identifier[ordinate] ))
keyword[return] identifier[out] | def rlstsq(coef_mat, ordinate, order=1, alpha=-1, cross=False):
"""
Least Squares Minimization using Tikhonov regularization.
Includes method for robust generalized cross-validation.
Args:
coef_mat (numpy.ndarray):
Coefficient matrix with shape ``(M, N)``.
ordinate (numpy.ndarray):
Ordinate or "dependent variable" values with shape ``(M,)`` or
``(M, K)``. If ``ordinate`` is two-dimensional, the least-squares
solution is calculated for each of the ``K`` columns of
``ordinate``.
order (int, numpy.ndarray):
If int, it is the order of Tikhonov regularization. If
`numpy.ndarray`, it will be used as regularization matrix.
alpha (float):
Lower threshold for the dampening parameter. The real value is
calculated using generalised cross validation.
cross (bool):
Use cross validation to estimate alpha value.
"""
coef_mat = numpy.array(coef_mat)
ordinate = numpy.array(ordinate)
(dim1, dim2) = coef_mat.shape
if cross:
out = numpy.empty((dim1, dim2) + ordinate.shape[1:])
coef_mat_ = numpy.empty((dim1 - 1, dim2))
ordinate_ = numpy.empty((dim1 - 1,) + ordinate.shape[1:])
for i in range(dim1):
coef_mat_[:i] = coef_mat[:i]
coef_mat_[i:] = coef_mat[i + 1:]
ordinate_[:i] = ordinate[:i]
ordinate_[i:] = ordinate[i + 1:]
out[i] = rlstsq(coef_mat_, ordinate_, order, alpha, False) # depends on [control=['for'], data=['i']]
return numpy.median(out, 0) # depends on [control=['if'], data=[]]
if order == 0:
tikhmat = numpy.eye(dim2) # depends on [control=['if'], data=[]]
elif order == 1:
tikhmat = numpy.zeros((dim2 - 1, dim2))
tikhmat[:, :-1] -= numpy.eye(dim2 - 1)
tikhmat[:, 1:] += numpy.eye(dim2 - 1) # depends on [control=['if'], data=[]]
elif order == 2:
tikhmat = numpy.zeros((dim2 - 2, dim2))
tikhmat[:, :-2] += numpy.eye(dim2 - 2)
tikhmat[:, 1:-1] -= 2 * numpy.eye(dim2 - 2)
tikhmat[:, 2:] += numpy.eye(dim2 - 2) # depends on [control=['if'], data=[]]
elif order is None:
tikhmat = numpy.zeros(1) # depends on [control=['if'], data=[]]
else:
tikhmat = numpy.array(order)
assert tikhmat.shape[-1] == dim2 or tikhmat.shape in ((), (1,))
if alpha < 0 and order is not None:
gamma = 0.1
def rgcv_error(alpha):
"""Calculate Tikhonov dampening parameter."""
if alpha <= 0:
return numpy.inf # depends on [control=['if'], data=[]]
coef_mat_ = numpy.dot(coef_mat.T, coef_mat) + alpha * numpy.dot(tikhmat.T, tikhmat)
try:
coef_mat_ = numpy.dot(linalg.inv(coef_mat_), coef_mat.T) # depends on [control=['try'], data=[]]
except linalg.LinAlgError:
return numpy.inf # depends on [control=['except'], data=[]]
abscissas = numpy.dot(coef_mat_, ordinate)
res2 = numpy.sum((numpy.dot(coef_mat, abscissas) - ordinate) ** 2)
coef_mat_2 = numpy.dot(coef_mat, coef_mat_)
skew = dim1 * res2 / numpy.trace(numpy.eye(dim1) - coef_mat_2) ** 2
mu2 = numpy.sum(coef_mat_2 * coef_mat_2.T) / dim1
return (gamma + (1 - gamma) * mu2) * skew
alphas = 10.0 ** (-numpy.arange(0, 16))
evals = numpy.array([rgcv_error(alpha) for alpha in alphas])
alpha = alphas[numpy.argmin(evals)] # depends on [control=['if'], data=[]]
out = linalg.inv(numpy.dot(coef_mat.T, coef_mat) + alpha * numpy.dot(tikhmat.T, tikhmat))
out = numpy.dot(out, numpy.dot(coef_mat.T, ordinate))
return out |
def author_from_git(self):
""" Get the author name from git information. """
self.author = None
try:
encoding = locale.getdefaultlocale()[1]
# launch git command and get answer
cmd = Popen(["git", "config", "--get", "user.name"], stdout=PIPE)
stdoutdata = cmd.communicate().decode(encoding)
if (stdoutdata[0]):
import ipdb;ipdb.set_trace()
author = stdoutdata[0].rstrip(os.linesep)
self.author = author#.decode('utf8')
except ImportError:
pass
except CalledProcessError:
pass
except OSError:
pass
return self.author | def function[author_from_git, parameter[self]]:
constant[ Get the author name from git information. ]
name[self].author assign[=] constant[None]
<ast.Try object at 0x7da1b14602e0>
return[name[self].author] | keyword[def] identifier[author_from_git] ( identifier[self] ):
literal[string]
identifier[self] . identifier[author] = keyword[None]
keyword[try] :
identifier[encoding] = identifier[locale] . identifier[getdefaultlocale] ()[ literal[int] ]
identifier[cmd] = identifier[Popen] ([ literal[string] , literal[string] , literal[string] , literal[string] ], identifier[stdout] = identifier[PIPE] )
identifier[stdoutdata] = identifier[cmd] . identifier[communicate] (). identifier[decode] ( identifier[encoding] )
keyword[if] ( identifier[stdoutdata] [ literal[int] ]):
keyword[import] identifier[ipdb] ; identifier[ipdb] . identifier[set_trace] ()
identifier[author] = identifier[stdoutdata] [ literal[int] ]. identifier[rstrip] ( identifier[os] . identifier[linesep] )
identifier[self] . identifier[author] = identifier[author]
keyword[except] identifier[ImportError] :
keyword[pass]
keyword[except] identifier[CalledProcessError] :
keyword[pass]
keyword[except] identifier[OSError] :
keyword[pass]
keyword[return] identifier[self] . identifier[author] | def author_from_git(self):
""" Get the author name from git information. """
self.author = None
try:
encoding = locale.getdefaultlocale()[1]
# launch git command and get answer
cmd = Popen(['git', 'config', '--get', 'user.name'], stdout=PIPE)
stdoutdata = cmd.communicate().decode(encoding)
if stdoutdata[0]:
import ipdb
ipdb.set_trace()
author = stdoutdata[0].rstrip(os.linesep)
self.author = author #.decode('utf8') # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except ImportError:
pass # depends on [control=['except'], data=[]]
except CalledProcessError:
pass # depends on [control=['except'], data=[]]
except OSError:
pass # depends on [control=['except'], data=[]]
return self.author |
def _set_interface_counters(self, v, load=False):
"""
Setter method for interface_counters, mapped from YANG variable /interface_statistics_state/interface_counters (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_interface_counters is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_interface_counters() directly.
YANG Description: Interface counters
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=interface_counters.interface_counters, is_container='container', presence=False, yang_name="interface-counters", rest_name="interface-counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'interface-interface-counters', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-interface-operational', defining_module='brocade-interface-operational', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """interface_counters must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=interface_counters.interface_counters, is_container='container', presence=False, yang_name="interface-counters", rest_name="interface-counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'interface-interface-counters', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-interface-operational', defining_module='brocade-interface-operational', yang_type='container', is_config=False)""",
})
self.__interface_counters = t
if hasattr(self, '_set'):
self._set() | def function[_set_interface_counters, parameter[self, v, load]]:
constant[
Setter method for interface_counters, mapped from YANG variable /interface_statistics_state/interface_counters (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_interface_counters is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_interface_counters() directly.
YANG Description: Interface counters
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da2054a4100>
name[self].__interface_counters assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_interface_counters] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[interface_counters] . identifier[interface_counters] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[False] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__interface_counters] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_interface_counters(self, v, load=False):
"""
Setter method for interface_counters, mapped from YANG variable /interface_statistics_state/interface_counters (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_interface_counters is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_interface_counters() directly.
YANG Description: Interface counters
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=interface_counters.interface_counters, is_container='container', presence=False, yang_name='interface-counters', rest_name='interface-counters', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'interface-interface-counters', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-interface-operational', defining_module='brocade-interface-operational', yang_type='container', is_config=False) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'interface_counters must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=interface_counters.interface_counters, is_container=\'container\', presence=False, yang_name="interface-counters", rest_name="interface-counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'callpoint\': u\'interface-interface-counters\', u\'cli-suppress-show-path\': None}}, namespace=\'urn:brocade.com:mgmt:brocade-interface-operational\', defining_module=\'brocade-interface-operational\', yang_type=\'container\', is_config=False)'}) # depends on [control=['except'], data=[]]
self.__interface_counters = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def get_default_gwf_api():
"""Return the preferred GWF library
Examples
--------
If you have |LDAStools.frameCPP|_ installed:
>>> from gwpy.timeseries.io.gwf import get_default_gwf_api
>>> get_default_gwf_api()
'framecpp'
Or, if you don't have |lalframe|_:
>>> get_default_gwf_api()
'lalframe'
Otherwise:
>>> get_default_gwf_api()
ImportError: no GWF API available, please install a third-party GWF
library (framecpp, lalframe) and try again
"""
for lib in APIS:
try:
import_gwf_library(lib)
except ImportError:
continue
else:
return lib
raise ImportError("no GWF API available, please install a third-party GWF "
"library ({}) and try again".format(', '.join(APIS))) | def function[get_default_gwf_api, parameter[]]:
constant[Return the preferred GWF library
Examples
--------
If you have |LDAStools.frameCPP|_ installed:
>>> from gwpy.timeseries.io.gwf import get_default_gwf_api
>>> get_default_gwf_api()
'framecpp'
Or, if you don't have |lalframe|_:
>>> get_default_gwf_api()
'lalframe'
Otherwise:
>>> get_default_gwf_api()
ImportError: no GWF API available, please install a third-party GWF
library (framecpp, lalframe) and try again
]
for taget[name[lib]] in starred[name[APIS]] begin[:]
<ast.Try object at 0x7da20e9b2860>
<ast.Raise object at 0x7da204564220> | keyword[def] identifier[get_default_gwf_api] ():
literal[string]
keyword[for] identifier[lib] keyword[in] identifier[APIS] :
keyword[try] :
identifier[import_gwf_library] ( identifier[lib] )
keyword[except] identifier[ImportError] :
keyword[continue]
keyword[else] :
keyword[return] identifier[lib]
keyword[raise] identifier[ImportError] ( literal[string]
literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[APIS] ))) | def get_default_gwf_api():
"""Return the preferred GWF library
Examples
--------
If you have |LDAStools.frameCPP|_ installed:
>>> from gwpy.timeseries.io.gwf import get_default_gwf_api
>>> get_default_gwf_api()
'framecpp'
Or, if you don't have |lalframe|_:
>>> get_default_gwf_api()
'lalframe'
Otherwise:
>>> get_default_gwf_api()
ImportError: no GWF API available, please install a third-party GWF
library (framecpp, lalframe) and try again
"""
for lib in APIS:
try:
import_gwf_library(lib) # depends on [control=['try'], data=[]]
except ImportError:
continue # depends on [control=['except'], data=[]]
else:
return lib # depends on [control=['for'], data=['lib']]
raise ImportError('no GWF API available, please install a third-party GWF library ({}) and try again'.format(', '.join(APIS))) |
def parse(parser=None, args=None):
"""Parse the command line arguments, return an argparse namespace object.
Other projects can call this function and pass in their own ArgumentParser
object (which should have a losser ArgumentParser from make_parser() above
as parent) to do the argument parsing and get the result (this does some
custom post-processing, beyond what argparse's parse_args() does). For
example::
parent_parser = losser.cli.make_parser(...)
parser = argparse.ArgumentParser(parents=[parent_parser])
parser.add_argument(...)
try:
parsed_args = losser.cli.parse(parser=parser)
except losser.cli.CommandLineError as err:
...
:raises CommandLineError: If something went wrong during command-line
parsing. If the exception has a non-empty .message attribute it
contains an error message that hasn't been printed to stdout yet,
otherwise any error message has already been printed.
:raises CommandLineExit: If the result of command-line parsing means that
the command should exit without continuing, but this is not because of
an error (for example if the user passed --help). Any help text will
already have been written to stdout, the exit code that the process
should exit with is in the exception's .code attribute.
CommandLineExit is a subclass of CommandLineError above.
"""
if not parser:
parser = make_parser()
try:
parsed_args = parser.parse_args(args)
except SystemExit as err:
raise CommandLineExit(err.code)
try:
columns = parsed_args.columns
except AttributeError:
columns = collections.OrderedDict()
parsed_args.columns = columns
for title, spec in columns.items():
if "pattern" not in spec:
raise ColumnWithoutPatternError(
'Column "{0}" needs a pattern'.format(title))
# Change length-1 patterns into strings (not lists of one string).
if len(spec["pattern"]) == 1:
spec["pattern"] = spec["pattern"][0]
if columns and parsed_args.columns_file:
raise ColumnsAndColumnsFileError(
"You can't use the --column and --columns options together (yet)")
elif parsed_args.columns_file and not columns:
parsed_args.columns = parsed_args.columns_file
elif (not columns) and (not parsed_args.columns_file):
# Crash if no columns specified.
# In the future we'll support simply converting all JSON fields to CSV
# columns if no columns are specified, and this will be removed.
raise NoColumnsError(
"You must give either a --columns or at least one -c/--column "
"argument")
else:
assert columns
return parsed_args | def function[parse, parameter[parser, args]]:
constant[Parse the command line arguments, return an argparse namespace object.
Other projects can call this function and pass in their own ArgumentParser
object (which should have a losser ArgumentParser from make_parser() above
as parent) to do the argument parsing and get the result (this does some
custom post-processing, beyond what argparse's parse_args() does). For
example::
parent_parser = losser.cli.make_parser(...)
parser = argparse.ArgumentParser(parents=[parent_parser])
parser.add_argument(...)
try:
parsed_args = losser.cli.parse(parser=parser)
except losser.cli.CommandLineError as err:
...
:raises CommandLineError: If something went wrong during command-line
parsing. If the exception has a non-empty .message attribute it
contains an error message that hasn't been printed to stdout yet,
otherwise any error message has already been printed.
:raises CommandLineExit: If the result of command-line parsing means that
the command should exit without continuing, but this is not because of
an error (for example if the user passed --help). Any help text will
already have been written to stdout, the exit code that the process
should exit with is in the exception's .code attribute.
CommandLineExit is a subclass of CommandLineError above.
]
if <ast.UnaryOp object at 0x7da1b2428a30> begin[:]
variable[parser] assign[=] call[name[make_parser], parameter[]]
<ast.Try object at 0x7da1b2429330>
<ast.Try object at 0x7da1b2429e70>
for taget[tuple[[<ast.Name object at 0x7da1b242a0e0>, <ast.Name object at 0x7da1b2428520>]]] in starred[call[name[columns].items, parameter[]]] begin[:]
if compare[constant[pattern] <ast.NotIn object at 0x7da2590d7190> name[spec]] begin[:]
<ast.Raise object at 0x7da1b242b850>
if compare[call[name[len], parameter[call[name[spec]][constant[pattern]]]] equal[==] constant[1]] begin[:]
call[name[spec]][constant[pattern]] assign[=] call[call[name[spec]][constant[pattern]]][constant[0]]
if <ast.BoolOp object at 0x7da1b242b0a0> begin[:]
<ast.Raise object at 0x7da1b242b070>
return[name[parsed_args]] | keyword[def] identifier[parse] ( identifier[parser] = keyword[None] , identifier[args] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[parser] :
identifier[parser] = identifier[make_parser] ()
keyword[try] :
identifier[parsed_args] = identifier[parser] . identifier[parse_args] ( identifier[args] )
keyword[except] identifier[SystemExit] keyword[as] identifier[err] :
keyword[raise] identifier[CommandLineExit] ( identifier[err] . identifier[code] )
keyword[try] :
identifier[columns] = identifier[parsed_args] . identifier[columns]
keyword[except] identifier[AttributeError] :
identifier[columns] = identifier[collections] . identifier[OrderedDict] ()
identifier[parsed_args] . identifier[columns] = identifier[columns]
keyword[for] identifier[title] , identifier[spec] keyword[in] identifier[columns] . identifier[items] ():
keyword[if] literal[string] keyword[not] keyword[in] identifier[spec] :
keyword[raise] identifier[ColumnWithoutPatternError] (
literal[string] . identifier[format] ( identifier[title] ))
keyword[if] identifier[len] ( identifier[spec] [ literal[string] ])== literal[int] :
identifier[spec] [ literal[string] ]= identifier[spec] [ literal[string] ][ literal[int] ]
keyword[if] identifier[columns] keyword[and] identifier[parsed_args] . identifier[columns_file] :
keyword[raise] identifier[ColumnsAndColumnsFileError] (
literal[string] )
keyword[elif] identifier[parsed_args] . identifier[columns_file] keyword[and] keyword[not] identifier[columns] :
identifier[parsed_args] . identifier[columns] = identifier[parsed_args] . identifier[columns_file]
keyword[elif] ( keyword[not] identifier[columns] ) keyword[and] ( keyword[not] identifier[parsed_args] . identifier[columns_file] ):
keyword[raise] identifier[NoColumnsError] (
literal[string]
literal[string] )
keyword[else] :
keyword[assert] identifier[columns]
keyword[return] identifier[parsed_args] | def parse(parser=None, args=None):
"""Parse the command line arguments, return an argparse namespace object.
Other projects can call this function and pass in their own ArgumentParser
object (which should have a losser ArgumentParser from make_parser() above
as parent) to do the argument parsing and get the result (this does some
custom post-processing, beyond what argparse's parse_args() does). For
example::
parent_parser = losser.cli.make_parser(...)
parser = argparse.ArgumentParser(parents=[parent_parser])
parser.add_argument(...)
try:
parsed_args = losser.cli.parse(parser=parser)
except losser.cli.CommandLineError as err:
...
:raises CommandLineError: If something went wrong during command-line
parsing. If the exception has a non-empty .message attribute it
contains an error message that hasn't been printed to stdout yet,
otherwise any error message has already been printed.
:raises CommandLineExit: If the result of command-line parsing means that
the command should exit without continuing, but this is not because of
an error (for example if the user passed --help). Any help text will
already have been written to stdout, the exit code that the process
should exit with is in the exception's .code attribute.
CommandLineExit is a subclass of CommandLineError above.
"""
if not parser:
parser = make_parser() # depends on [control=['if'], data=[]]
try:
parsed_args = parser.parse_args(args) # depends on [control=['try'], data=[]]
except SystemExit as err:
raise CommandLineExit(err.code) # depends on [control=['except'], data=['err']]
try:
columns = parsed_args.columns # depends on [control=['try'], data=[]]
except AttributeError:
columns = collections.OrderedDict()
parsed_args.columns = columns # depends on [control=['except'], data=[]]
for (title, spec) in columns.items():
if 'pattern' not in spec:
raise ColumnWithoutPatternError('Column "{0}" needs a pattern'.format(title)) # depends on [control=['if'], data=[]]
# Change length-1 patterns into strings (not lists of one string).
if len(spec['pattern']) == 1:
spec['pattern'] = spec['pattern'][0] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if columns and parsed_args.columns_file:
raise ColumnsAndColumnsFileError("You can't use the --column and --columns options together (yet)") # depends on [control=['if'], data=[]]
elif parsed_args.columns_file and (not columns):
parsed_args.columns = parsed_args.columns_file # depends on [control=['if'], data=[]]
elif not columns and (not parsed_args.columns_file):
# Crash if no columns specified.
# In the future we'll support simply converting all JSON fields to CSV
# columns if no columns are specified, and this will be removed.
raise NoColumnsError('You must give either a --columns or at least one -c/--column argument') # depends on [control=['if'], data=[]]
else:
assert columns
return parsed_args |
def verifypartialmatch(self, window_name, object_name, partial_text):
"""
Verify partial text
@param window_name: Window name to type in, either full name,
LDTP's name convention, or a Unix glob.
@type window_name: string
@param object_name: Object name to type in, either full name,
LDTP's name convention, or a Unix glob.
@type object_name: string
@param partial_text: Partial text to match
@type object_name: string
@return: 1 on success.
@rtype: integer
"""
try:
if re.search(fnmatch.translate(partial_text),
self.gettextvalue(window_name,
object_name)):
return 1
except:
pass
return 0 | def function[verifypartialmatch, parameter[self, window_name, object_name, partial_text]]:
constant[
Verify partial text
@param window_name: Window name to type in, either full name,
LDTP's name convention, or a Unix glob.
@type window_name: string
@param object_name: Object name to type in, either full name,
LDTP's name convention, or a Unix glob.
@type object_name: string
@param partial_text: Partial text to match
@type object_name: string
@return: 1 on success.
@rtype: integer
]
<ast.Try object at 0x7da18f09d5a0>
return[constant[0]] | keyword[def] identifier[verifypartialmatch] ( identifier[self] , identifier[window_name] , identifier[object_name] , identifier[partial_text] ):
literal[string]
keyword[try] :
keyword[if] identifier[re] . identifier[search] ( identifier[fnmatch] . identifier[translate] ( identifier[partial_text] ),
identifier[self] . identifier[gettextvalue] ( identifier[window_name] ,
identifier[object_name] )):
keyword[return] literal[int]
keyword[except] :
keyword[pass]
keyword[return] literal[int] | def verifypartialmatch(self, window_name, object_name, partial_text):
"""
Verify partial text
@param window_name: Window name to type in, either full name,
LDTP's name convention, or a Unix glob.
@type window_name: string
@param object_name: Object name to type in, either full name,
LDTP's name convention, or a Unix glob.
@type object_name: string
@param partial_text: Partial text to match
@type object_name: string
@return: 1 on success.
@rtype: integer
"""
try:
if re.search(fnmatch.translate(partial_text), self.gettextvalue(window_name, object_name)):
return 1 # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
return 0 |
def serialize_workflow(self):
"""
Serializes the current WF.
Returns:
WF state data.
"""
self.workflow.refresh_waiting_tasks()
return CompactWorkflowSerializer().serialize_workflow(self.workflow,
include_spec=False) | def function[serialize_workflow, parameter[self]]:
constant[
Serializes the current WF.
Returns:
WF state data.
]
call[name[self].workflow.refresh_waiting_tasks, parameter[]]
return[call[call[name[CompactWorkflowSerializer], parameter[]].serialize_workflow, parameter[name[self].workflow]]] | keyword[def] identifier[serialize_workflow] ( identifier[self] ):
literal[string]
identifier[self] . identifier[workflow] . identifier[refresh_waiting_tasks] ()
keyword[return] identifier[CompactWorkflowSerializer] (). identifier[serialize_workflow] ( identifier[self] . identifier[workflow] ,
identifier[include_spec] = keyword[False] ) | def serialize_workflow(self):
"""
Serializes the current WF.
Returns:
WF state data.
"""
self.workflow.refresh_waiting_tasks()
return CompactWorkflowSerializer().serialize_workflow(self.workflow, include_spec=False) |
def is_keyword_version_supported(
keyword_version, inasafe_version=inasafe_keyword_version):
"""Check if the keyword version is supported by this InaSAFE version.
.. versionadded: 3.3
:param keyword_version: String representation of the keyword version.
:type keyword_version: str
:param inasafe_version: String representation of InaSAFE's version.
:type inasafe_version: str
:returns: True if supported, otherwise False.
:rtype: bool
"""
def minor_version(version):
"""Obtain minor version of a version (x.y)
:param version: Version string.
:type version: str
:returns: Minor version.
:rtype: str
"""
version_split = version.split('.')
return version_split[0] + '.' + version_split[1]
# Convert to minor version.
keyword_version = minor_version(keyword_version)
inasafe_version = minor_version(inasafe_version)
if inasafe_version == keyword_version:
return True
if inasafe_version in list(keyword_version_compatibilities.keys()):
if keyword_version in keyword_version_compatibilities[inasafe_version]:
return True
else:
return False
else:
return False | def function[is_keyword_version_supported, parameter[keyword_version, inasafe_version]]:
constant[Check if the keyword version is supported by this InaSAFE version.
.. versionadded: 3.3
:param keyword_version: String representation of the keyword version.
:type keyword_version: str
:param inasafe_version: String representation of InaSAFE's version.
:type inasafe_version: str
:returns: True if supported, otherwise False.
:rtype: bool
]
def function[minor_version, parameter[version]]:
constant[Obtain minor version of a version (x.y)
:param version: Version string.
:type version: str
:returns: Minor version.
:rtype: str
]
variable[version_split] assign[=] call[name[version].split, parameter[constant[.]]]
return[binary_operation[binary_operation[call[name[version_split]][constant[0]] + constant[.]] + call[name[version_split]][constant[1]]]]
variable[keyword_version] assign[=] call[name[minor_version], parameter[name[keyword_version]]]
variable[inasafe_version] assign[=] call[name[minor_version], parameter[name[inasafe_version]]]
if compare[name[inasafe_version] equal[==] name[keyword_version]] begin[:]
return[constant[True]]
if compare[name[inasafe_version] in call[name[list], parameter[call[name[keyword_version_compatibilities].keys, parameter[]]]]] begin[:]
if compare[name[keyword_version] in call[name[keyword_version_compatibilities]][name[inasafe_version]]] begin[:]
return[constant[True]] | keyword[def] identifier[is_keyword_version_supported] (
identifier[keyword_version] , identifier[inasafe_version] = identifier[inasafe_keyword_version] ):
literal[string]
keyword[def] identifier[minor_version] ( identifier[version] ):
literal[string]
identifier[version_split] = identifier[version] . identifier[split] ( literal[string] )
keyword[return] identifier[version_split] [ literal[int] ]+ literal[string] + identifier[version_split] [ literal[int] ]
identifier[keyword_version] = identifier[minor_version] ( identifier[keyword_version] )
identifier[inasafe_version] = identifier[minor_version] ( identifier[inasafe_version] )
keyword[if] identifier[inasafe_version] == identifier[keyword_version] :
keyword[return] keyword[True]
keyword[if] identifier[inasafe_version] keyword[in] identifier[list] ( identifier[keyword_version_compatibilities] . identifier[keys] ()):
keyword[if] identifier[keyword_version] keyword[in] identifier[keyword_version_compatibilities] [ identifier[inasafe_version] ]:
keyword[return] keyword[True]
keyword[else] :
keyword[return] keyword[False]
keyword[else] :
keyword[return] keyword[False] | def is_keyword_version_supported(keyword_version, inasafe_version=inasafe_keyword_version):
"""Check if the keyword version is supported by this InaSAFE version.
.. versionadded: 3.3
:param keyword_version: String representation of the keyword version.
:type keyword_version: str
:param inasafe_version: String representation of InaSAFE's version.
:type inasafe_version: str
:returns: True if supported, otherwise False.
:rtype: bool
"""
def minor_version(version):
"""Obtain minor version of a version (x.y)
:param version: Version string.
:type version: str
:returns: Minor version.
:rtype: str
"""
version_split = version.split('.')
return version_split[0] + '.' + version_split[1]
# Convert to minor version.
keyword_version = minor_version(keyword_version)
inasafe_version = minor_version(inasafe_version)
if inasafe_version == keyword_version:
return True # depends on [control=['if'], data=[]]
if inasafe_version in list(keyword_version_compatibilities.keys()):
if keyword_version in keyword_version_compatibilities[inasafe_version]:
return True # depends on [control=['if'], data=[]]
else:
return False # depends on [control=['if'], data=['inasafe_version']]
else:
return False |
def insert(self, i, x):
"""s.insert(i, x) same as s[i:i] = [x]
Raises TypeError if x isn't a string."""
if not isinstance(x, str):
raise TypeError(
'Members of this object must be strings. '
'You supplied \"%s\"' % type(x))
list.insert(self, i, x) | def function[insert, parameter[self, i, x]]:
constant[s.insert(i, x) same as s[i:i] = [x]
Raises TypeError if x isn't a string.]
if <ast.UnaryOp object at 0x7da20c6aa6b0> begin[:]
<ast.Raise object at 0x7da20c6ab070>
call[name[list].insert, parameter[name[self], name[i], name[x]]] | keyword[def] identifier[insert] ( identifier[self] , identifier[i] , identifier[x] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[x] , identifier[str] ):
keyword[raise] identifier[TypeError] (
literal[string]
literal[string] % identifier[type] ( identifier[x] ))
identifier[list] . identifier[insert] ( identifier[self] , identifier[i] , identifier[x] ) | def insert(self, i, x):
"""s.insert(i, x) same as s[i:i] = [x]
Raises TypeError if x isn't a string."""
if not isinstance(x, str):
raise TypeError('Members of this object must be strings. You supplied "%s"' % type(x)) # depends on [control=['if'], data=[]]
list.insert(self, i, x) |
def uninstall(self,
bug: Bug,
force: bool = False,
noprune: bool = False
) -> None:
"""
Uninstalls all Docker images associated with this bug.
See: `BuildManager.uninstall`
"""
self.__installation.build.uninstall(bug.image,
force=force,
noprune=noprune) | def function[uninstall, parameter[self, bug, force, noprune]]:
constant[
Uninstalls all Docker images associated with this bug.
See: `BuildManager.uninstall`
]
call[name[self].__installation.build.uninstall, parameter[name[bug].image]] | keyword[def] identifier[uninstall] ( identifier[self] ,
identifier[bug] : identifier[Bug] ,
identifier[force] : identifier[bool] = keyword[False] ,
identifier[noprune] : identifier[bool] = keyword[False]
)-> keyword[None] :
literal[string]
identifier[self] . identifier[__installation] . identifier[build] . identifier[uninstall] ( identifier[bug] . identifier[image] ,
identifier[force] = identifier[force] ,
identifier[noprune] = identifier[noprune] ) | def uninstall(self, bug: Bug, force: bool=False, noprune: bool=False) -> None:
"""
Uninstalls all Docker images associated with this bug.
See: `BuildManager.uninstall`
"""
self.__installation.build.uninstall(bug.image, force=force, noprune=noprune) |
def cmd(send, msg, _):
"""Convert a number to the roman numeral equivalent.
Syntax: {command} [number]
"""
if not msg:
msg = randrange(5000)
elif not msg.isdigit():
send("Invalid Number.")
return
send(gen_roman(int(msg))) | def function[cmd, parameter[send, msg, _]]:
constant[Convert a number to the roman numeral equivalent.
Syntax: {command} [number]
]
if <ast.UnaryOp object at 0x7da1b1ff1360> begin[:]
variable[msg] assign[=] call[name[randrange], parameter[constant[5000]]]
call[name[send], parameter[call[name[gen_roman], parameter[call[name[int], parameter[name[msg]]]]]]] | keyword[def] identifier[cmd] ( identifier[send] , identifier[msg] , identifier[_] ):
literal[string]
keyword[if] keyword[not] identifier[msg] :
identifier[msg] = identifier[randrange] ( literal[int] )
keyword[elif] keyword[not] identifier[msg] . identifier[isdigit] ():
identifier[send] ( literal[string] )
keyword[return]
identifier[send] ( identifier[gen_roman] ( identifier[int] ( identifier[msg] ))) | def cmd(send, msg, _):
"""Convert a number to the roman numeral equivalent.
Syntax: {command} [number]
"""
if not msg:
msg = randrange(5000) # depends on [control=['if'], data=[]]
elif not msg.isdigit():
send('Invalid Number.')
return # depends on [control=['if'], data=[]]
send(gen_roman(int(msg))) |
def toString(self, obj):
"""
Convert the given L{Identifier} to a string.
"""
return Box(shareID=obj.shareID.encode('utf-8'),
localpart=obj.localpart.encode('utf-8'),
domain=obj.domain.encode('utf-8')).serialize() | def function[toString, parameter[self, obj]]:
constant[
Convert the given L{Identifier} to a string.
]
return[call[call[name[Box], parameter[]].serialize, parameter[]]] | keyword[def] identifier[toString] ( identifier[self] , identifier[obj] ):
literal[string]
keyword[return] identifier[Box] ( identifier[shareID] = identifier[obj] . identifier[shareID] . identifier[encode] ( literal[string] ),
identifier[localpart] = identifier[obj] . identifier[localpart] . identifier[encode] ( literal[string] ),
identifier[domain] = identifier[obj] . identifier[domain] . identifier[encode] ( literal[string] )). identifier[serialize] () | def toString(self, obj):
"""
Convert the given L{Identifier} to a string.
"""
return Box(shareID=obj.shareID.encode('utf-8'), localpart=obj.localpart.encode('utf-8'), domain=obj.domain.encode('utf-8')).serialize() |
def CheckClientAccess(self, username, client_id):
"""Checks whether a given user can access given client."""
self._CheckAccess(
username, str(client_id),
rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CLIENT) | def function[CheckClientAccess, parameter[self, username, client_id]]:
constant[Checks whether a given user can access given client.]
call[name[self]._CheckAccess, parameter[name[username], call[name[str], parameter[name[client_id]]], name[rdf_objects].ApprovalRequest.ApprovalType.APPROVAL_TYPE_CLIENT]] | keyword[def] identifier[CheckClientAccess] ( identifier[self] , identifier[username] , identifier[client_id] ):
literal[string]
identifier[self] . identifier[_CheckAccess] (
identifier[username] , identifier[str] ( identifier[client_id] ),
identifier[rdf_objects] . identifier[ApprovalRequest] . identifier[ApprovalType] . identifier[APPROVAL_TYPE_CLIENT] ) | def CheckClientAccess(self, username, client_id):
"""Checks whether a given user can access given client."""
self._CheckAccess(username, str(client_id), rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CLIENT) |
def has_key(self, key):
"""Does the key exist?
This method will check to see if it has expired too.
"""
if key in self._dict:
try:
self[key]
return True
except ValueError:
return False
except KeyError:
return False
return False | def function[has_key, parameter[self, key]]:
constant[Does the key exist?
This method will check to see if it has expired too.
]
if compare[name[key] in name[self]._dict] begin[:]
<ast.Try object at 0x7da1b2782440>
return[constant[False]] | keyword[def] identifier[has_key] ( identifier[self] , identifier[key] ):
literal[string]
keyword[if] identifier[key] keyword[in] identifier[self] . identifier[_dict] :
keyword[try] :
identifier[self] [ identifier[key] ]
keyword[return] keyword[True]
keyword[except] identifier[ValueError] :
keyword[return] keyword[False]
keyword[except] identifier[KeyError] :
keyword[return] keyword[False]
keyword[return] keyword[False] | def has_key(self, key):
"""Does the key exist?
This method will check to see if it has expired too.
"""
if key in self._dict:
try:
self[key]
return True # depends on [control=['try'], data=[]]
except ValueError:
return False # depends on [control=['except'], data=[]]
except KeyError:
return False # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['key']]
return False |
def get_blurb(self, name):
"""Get the blurb for a GObject property."""
c_str = gobject_lib.g_param_spec_get_blurb(self._get_pspec(name))
return _to_string(c_str) | def function[get_blurb, parameter[self, name]]:
constant[Get the blurb for a GObject property.]
variable[c_str] assign[=] call[name[gobject_lib].g_param_spec_get_blurb, parameter[call[name[self]._get_pspec, parameter[name[name]]]]]
return[call[name[_to_string], parameter[name[c_str]]]] | keyword[def] identifier[get_blurb] ( identifier[self] , identifier[name] ):
literal[string]
identifier[c_str] = identifier[gobject_lib] . identifier[g_param_spec_get_blurb] ( identifier[self] . identifier[_get_pspec] ( identifier[name] ))
keyword[return] identifier[_to_string] ( identifier[c_str] ) | def get_blurb(self, name):
"""Get the blurb for a GObject property."""
c_str = gobject_lib.g_param_spec_get_blurb(self._get_pspec(name))
return _to_string(c_str) |
def get_by_name(self, name, namespace=None):
"""
name: can be either <namespace>/<dataset_name> or just <dataset_name>
namespace: if specified, will skip name parsing, defaults to current user's username
"""
if not namespace:
namespace, name = get_namespace_from_name(name)
if not namespace:
namespace = AuthConfigManager.get_access_token().username
try:
response = self.request('GET', '%s/%s/%s' % (self.url, namespace, name))
return Dataset.from_dict(response.json())
except NotFoundException:
return None | def function[get_by_name, parameter[self, name, namespace]]:
constant[
name: can be either <namespace>/<dataset_name> or just <dataset_name>
namespace: if specified, will skip name parsing, defaults to current user's username
]
if <ast.UnaryOp object at 0x7da1b0dc3580> begin[:]
<ast.Tuple object at 0x7da1b0dc2380> assign[=] call[name[get_namespace_from_name], parameter[name[name]]]
if <ast.UnaryOp object at 0x7da1b0dc0fd0> begin[:]
variable[namespace] assign[=] call[name[AuthConfigManager].get_access_token, parameter[]].username
<ast.Try object at 0x7da1b0dc2d40> | keyword[def] identifier[get_by_name] ( identifier[self] , identifier[name] , identifier[namespace] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[namespace] :
identifier[namespace] , identifier[name] = identifier[get_namespace_from_name] ( identifier[name] )
keyword[if] keyword[not] identifier[namespace] :
identifier[namespace] = identifier[AuthConfigManager] . identifier[get_access_token] (). identifier[username]
keyword[try] :
identifier[response] = identifier[self] . identifier[request] ( literal[string] , literal[string] %( identifier[self] . identifier[url] , identifier[namespace] , identifier[name] ))
keyword[return] identifier[Dataset] . identifier[from_dict] ( identifier[response] . identifier[json] ())
keyword[except] identifier[NotFoundException] :
keyword[return] keyword[None] | def get_by_name(self, name, namespace=None):
"""
name: can be either <namespace>/<dataset_name> or just <dataset_name>
namespace: if specified, will skip name parsing, defaults to current user's username
"""
if not namespace:
(namespace, name) = get_namespace_from_name(name) # depends on [control=['if'], data=[]]
if not namespace:
namespace = AuthConfigManager.get_access_token().username # depends on [control=['if'], data=[]]
try:
response = self.request('GET', '%s/%s/%s' % (self.url, namespace, name))
return Dataset.from_dict(response.json()) # depends on [control=['try'], data=[]]
except NotFoundException:
return None # depends on [control=['except'], data=[]] |
def grid_linspace(bounds, count):
"""
Return a grid spaced inside a bounding box with edges spaced using np.linspace.
Parameters
---------
bounds: (2,dimension) list of [[min x, min y, etc], [max x, max y, etc]]
count: int, or (dimension,) int, number of samples per side
Returns
-------
grid: (n, dimension) float, points in the specified bounds
"""
bounds = np.asanyarray(bounds, dtype=np.float64)
if len(bounds) != 2:
raise ValueError('bounds must be (2, dimension!')
count = np.asanyarray(count, dtype=np.int)
if count.shape == ():
count = np.tile(count, bounds.shape[1])
grid_elements = [np.linspace(*b, num=c) for b, c in zip(bounds.T, count)]
grid = np.vstack(np.meshgrid(*grid_elements)
).reshape(bounds.shape[1], -1).T
return grid | def function[grid_linspace, parameter[bounds, count]]:
constant[
Return a grid spaced inside a bounding box with edges spaced using np.linspace.
Parameters
---------
bounds: (2,dimension) list of [[min x, min y, etc], [max x, max y, etc]]
count: int, or (dimension,) int, number of samples per side
Returns
-------
grid: (n, dimension) float, points in the specified bounds
]
variable[bounds] assign[=] call[name[np].asanyarray, parameter[name[bounds]]]
if compare[call[name[len], parameter[name[bounds]]] not_equal[!=] constant[2]] begin[:]
<ast.Raise object at 0x7da18f810dc0>
variable[count] assign[=] call[name[np].asanyarray, parameter[name[count]]]
if compare[name[count].shape equal[==] tuple[[]]] begin[:]
variable[count] assign[=] call[name[np].tile, parameter[name[count], call[name[bounds].shape][constant[1]]]]
variable[grid_elements] assign[=] <ast.ListComp object at 0x7da18f8133d0>
variable[grid] assign[=] call[call[name[np].vstack, parameter[call[name[np].meshgrid, parameter[<ast.Starred object at 0x7da18f812f20>]]]].reshape, parameter[call[name[bounds].shape][constant[1]], <ast.UnaryOp object at 0x7da18f8113f0>]].T
return[name[grid]] | keyword[def] identifier[grid_linspace] ( identifier[bounds] , identifier[count] ):
literal[string]
identifier[bounds] = identifier[np] . identifier[asanyarray] ( identifier[bounds] , identifier[dtype] = identifier[np] . identifier[float64] )
keyword[if] identifier[len] ( identifier[bounds] )!= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[count] = identifier[np] . identifier[asanyarray] ( identifier[count] , identifier[dtype] = identifier[np] . identifier[int] )
keyword[if] identifier[count] . identifier[shape] ==():
identifier[count] = identifier[np] . identifier[tile] ( identifier[count] , identifier[bounds] . identifier[shape] [ literal[int] ])
identifier[grid_elements] =[ identifier[np] . identifier[linspace] (* identifier[b] , identifier[num] = identifier[c] ) keyword[for] identifier[b] , identifier[c] keyword[in] identifier[zip] ( identifier[bounds] . identifier[T] , identifier[count] )]
identifier[grid] = identifier[np] . identifier[vstack] ( identifier[np] . identifier[meshgrid] (* identifier[grid_elements] )
). identifier[reshape] ( identifier[bounds] . identifier[shape] [ literal[int] ],- literal[int] ). identifier[T]
keyword[return] identifier[grid] | def grid_linspace(bounds, count):
"""
Return a grid spaced inside a bounding box with edges spaced using np.linspace.
Parameters
---------
bounds: (2,dimension) list of [[min x, min y, etc], [max x, max y, etc]]
count: int, or (dimension,) int, number of samples per side
Returns
-------
grid: (n, dimension) float, points in the specified bounds
"""
bounds = np.asanyarray(bounds, dtype=np.float64)
if len(bounds) != 2:
raise ValueError('bounds must be (2, dimension!') # depends on [control=['if'], data=[]]
count = np.asanyarray(count, dtype=np.int)
if count.shape == ():
count = np.tile(count, bounds.shape[1]) # depends on [control=['if'], data=[]]
grid_elements = [np.linspace(*b, num=c) for (b, c) in zip(bounds.T, count)]
grid = np.vstack(np.meshgrid(*grid_elements)).reshape(bounds.shape[1], -1).T
return grid |
def signed_token_generator(private_pem, **kwargs):
"""
:param private_pem:
"""
def signed_token_generator(request):
request.claims = kwargs
return common.generate_signed_token(private_pem, request)
return signed_token_generator | def function[signed_token_generator, parameter[private_pem]]:
constant[
:param private_pem:
]
def function[signed_token_generator, parameter[request]]:
name[request].claims assign[=] name[kwargs]
return[call[name[common].generate_signed_token, parameter[name[private_pem], name[request]]]]
return[name[signed_token_generator]] | keyword[def] identifier[signed_token_generator] ( identifier[private_pem] ,** identifier[kwargs] ):
literal[string]
keyword[def] identifier[signed_token_generator] ( identifier[request] ):
identifier[request] . identifier[claims] = identifier[kwargs]
keyword[return] identifier[common] . identifier[generate_signed_token] ( identifier[private_pem] , identifier[request] )
keyword[return] identifier[signed_token_generator] | def signed_token_generator(private_pem, **kwargs):
"""
:param private_pem:
"""
def signed_token_generator(request):
request.claims = kwargs
return common.generate_signed_token(private_pem, request)
return signed_token_generator |
def closest_ds_partition(
table, ds, before=True, schema="default",
metastore_conn_id='metastore_default'):
"""
This function finds the date in a list closest to the target date.
An optional parameter can be given to get the closest before or after.
:param table: A hive table name
:type table: str
:param ds: A datestamp ``%Y-%m-%d`` e.g. ``yyyy-mm-dd``
:type ds: list[datetime.date]
:param before: closest before (True), after (False) or either side of ds
:type before: bool or None
:returns: The closest date
:rtype: str or None
>>> tbl = 'airflow.static_babynames_partitioned'
>>> closest_ds_partition(tbl, '2015-01-02')
'2015-01-01'
"""
from airflow.hooks.hive_hooks import HiveMetastoreHook
if '.' in table:
schema, table = table.split('.')
hh = HiveMetastoreHook(metastore_conn_id=metastore_conn_id)
partitions = hh.get_partitions(schema=schema, table_name=table)
if not partitions:
return None
part_vals = [list(p.values())[0] for p in partitions]
if ds in part_vals:
return ds
else:
parts = [datetime.datetime.strptime(pv, '%Y-%m-%d')
for pv in part_vals]
target_dt = datetime.datetime.strptime(ds, '%Y-%m-%d')
closest_ds = _closest_date(target_dt, parts, before_target=before)
return closest_ds.isoformat() | def function[closest_ds_partition, parameter[table, ds, before, schema, metastore_conn_id]]:
constant[
This function finds the date in a list closest to the target date.
An optional parameter can be given to get the closest before or after.
:param table: A hive table name
:type table: str
:param ds: A datestamp ``%Y-%m-%d`` e.g. ``yyyy-mm-dd``
:type ds: list[datetime.date]
:param before: closest before (True), after (False) or either side of ds
:type before: bool or None
:returns: The closest date
:rtype: str or None
>>> tbl = 'airflow.static_babynames_partitioned'
>>> closest_ds_partition(tbl, '2015-01-02')
'2015-01-01'
]
from relative_module[airflow.hooks.hive_hooks] import module[HiveMetastoreHook]
if compare[constant[.] in name[table]] begin[:]
<ast.Tuple object at 0x7da1b055a500> assign[=] call[name[table].split, parameter[constant[.]]]
variable[hh] assign[=] call[name[HiveMetastoreHook], parameter[]]
variable[partitions] assign[=] call[name[hh].get_partitions, parameter[]]
if <ast.UnaryOp object at 0x7da1b05597b0> begin[:]
return[constant[None]]
variable[part_vals] assign[=] <ast.ListComp object at 0x7da1b055ba30>
if compare[name[ds] in name[part_vals]] begin[:]
return[name[ds]] | keyword[def] identifier[closest_ds_partition] (
identifier[table] , identifier[ds] , identifier[before] = keyword[True] , identifier[schema] = literal[string] ,
identifier[metastore_conn_id] = literal[string] ):
literal[string]
keyword[from] identifier[airflow] . identifier[hooks] . identifier[hive_hooks] keyword[import] identifier[HiveMetastoreHook]
keyword[if] literal[string] keyword[in] identifier[table] :
identifier[schema] , identifier[table] = identifier[table] . identifier[split] ( literal[string] )
identifier[hh] = identifier[HiveMetastoreHook] ( identifier[metastore_conn_id] = identifier[metastore_conn_id] )
identifier[partitions] = identifier[hh] . identifier[get_partitions] ( identifier[schema] = identifier[schema] , identifier[table_name] = identifier[table] )
keyword[if] keyword[not] identifier[partitions] :
keyword[return] keyword[None]
identifier[part_vals] =[ identifier[list] ( identifier[p] . identifier[values] ())[ literal[int] ] keyword[for] identifier[p] keyword[in] identifier[partitions] ]
keyword[if] identifier[ds] keyword[in] identifier[part_vals] :
keyword[return] identifier[ds]
keyword[else] :
identifier[parts] =[ identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[pv] , literal[string] )
keyword[for] identifier[pv] keyword[in] identifier[part_vals] ]
identifier[target_dt] = identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[ds] , literal[string] )
identifier[closest_ds] = identifier[_closest_date] ( identifier[target_dt] , identifier[parts] , identifier[before_target] = identifier[before] )
keyword[return] identifier[closest_ds] . identifier[isoformat] () | def closest_ds_partition(table, ds, before=True, schema='default', metastore_conn_id='metastore_default'):
"""
This function finds the date in a list closest to the target date.
An optional parameter can be given to get the closest before or after.
:param table: A hive table name
:type table: str
:param ds: A datestamp ``%Y-%m-%d`` e.g. ``yyyy-mm-dd``
:type ds: list[datetime.date]
:param before: closest before (True), after (False) or either side of ds
:type before: bool or None
:returns: The closest date
:rtype: str or None
>>> tbl = 'airflow.static_babynames_partitioned'
>>> closest_ds_partition(tbl, '2015-01-02')
'2015-01-01'
"""
from airflow.hooks.hive_hooks import HiveMetastoreHook
if '.' in table:
(schema, table) = table.split('.') # depends on [control=['if'], data=['table']]
hh = HiveMetastoreHook(metastore_conn_id=metastore_conn_id)
partitions = hh.get_partitions(schema=schema, table_name=table)
if not partitions:
return None # depends on [control=['if'], data=[]]
part_vals = [list(p.values())[0] for p in partitions]
if ds in part_vals:
return ds # depends on [control=['if'], data=['ds']]
else:
parts = [datetime.datetime.strptime(pv, '%Y-%m-%d') for pv in part_vals]
target_dt = datetime.datetime.strptime(ds, '%Y-%m-%d')
closest_ds = _closest_date(target_dt, parts, before_target=before)
return closest_ds.isoformat() |
def parse(binary, **params):
"""Turns a JSON structure into a python object."""
encoding = params.get('charset', 'UTF-8')
return json.loads(binary, encoding=encoding) | def function[parse, parameter[binary]]:
constant[Turns a JSON structure into a python object.]
variable[encoding] assign[=] call[name[params].get, parameter[constant[charset], constant[UTF-8]]]
return[call[name[json].loads, parameter[name[binary]]]] | keyword[def] identifier[parse] ( identifier[binary] ,** identifier[params] ):
literal[string]
identifier[encoding] = identifier[params] . identifier[get] ( literal[string] , literal[string] )
keyword[return] identifier[json] . identifier[loads] ( identifier[binary] , identifier[encoding] = identifier[encoding] ) | def parse(binary, **params):
"""Turns a JSON structure into a python object."""
encoding = params.get('charset', 'UTF-8')
return json.loads(binary, encoding=encoding) |
def get_branches_permissions(self, project, repository, limit=25):
"""
Get branches permissions from a given repo
:param project:
:param repository:
:param limit:
:return:
"""
url = 'rest/branch-permissions/2.0/projects/{project}/repos/{repository}/restrictions'.format(project=project,
repository=repository)
params = {}
if limit:
params['limit'] = limit
return self.get(url, params=params) | def function[get_branches_permissions, parameter[self, project, repository, limit]]:
constant[
Get branches permissions from a given repo
:param project:
:param repository:
:param limit:
:return:
]
variable[url] assign[=] call[constant[rest/branch-permissions/2.0/projects/{project}/repos/{repository}/restrictions].format, parameter[]]
variable[params] assign[=] dictionary[[], []]
if name[limit] begin[:]
call[name[params]][constant[limit]] assign[=] name[limit]
return[call[name[self].get, parameter[name[url]]]] | keyword[def] identifier[get_branches_permissions] ( identifier[self] , identifier[project] , identifier[repository] , identifier[limit] = literal[int] ):
literal[string]
identifier[url] = literal[string] . identifier[format] ( identifier[project] = identifier[project] ,
identifier[repository] = identifier[repository] )
identifier[params] ={}
keyword[if] identifier[limit] :
identifier[params] [ literal[string] ]= identifier[limit]
keyword[return] identifier[self] . identifier[get] ( identifier[url] , identifier[params] = identifier[params] ) | def get_branches_permissions(self, project, repository, limit=25):
"""
Get branches permissions from a given repo
:param project:
:param repository:
:param limit:
:return:
"""
url = 'rest/branch-permissions/2.0/projects/{project}/repos/{repository}/restrictions'.format(project=project, repository=repository)
params = {}
if limit:
params['limit'] = limit # depends on [control=['if'], data=[]]
return self.get(url, params=params) |
def warped_gp_cubic_sine(max_iters=100):
"""
A test replicating the cubic sine regression problem from
Snelson's paper.
"""
X = (2 * np.pi) * np.random.random(151) - np.pi
Y = np.sin(X) + np.random.normal(0,0.2,151)
Y = np.array([np.power(abs(y),float(1)/3) * (1,-1)[y<0] for y in Y])
X = X[:, None]
Y = Y[:, None]
warp_k = GPy.kern.RBF(1)
warp_f = GPy.util.warping_functions.TanhFunction(n_terms=2)
warp_m = GPy.models.WarpedGP(X, Y, kernel=warp_k, warping_function=warp_f)
warp_m['.*\.d'].constrain_fixed(1.0)
m = GPy.models.GPRegression(X, Y)
m.optimize_restarts(parallel=False, robust=True, num_restarts=5, max_iters=max_iters)
warp_m.optimize_restarts(parallel=False, robust=True, num_restarts=5, max_iters=max_iters)
#m.optimize(max_iters=max_iters)
#warp_m.optimize(max_iters=max_iters)
print(warp_m)
print(warp_m['.*warp.*'])
warp_m.predict_in_warped_space = False
warp_m.plot(title="Warped GP - Latent space")
warp_m.predict_in_warped_space = True
warp_m.plot(title="Warped GP - Warped space")
m.plot(title="Standard GP")
warp_m.plot_warping()
pb.show() | def function[warped_gp_cubic_sine, parameter[max_iters]]:
constant[
A test replicating the cubic sine regression problem from
Snelson's paper.
]
variable[X] assign[=] binary_operation[binary_operation[binary_operation[constant[2] * name[np].pi] * call[name[np].random.random, parameter[constant[151]]]] - name[np].pi]
variable[Y] assign[=] binary_operation[call[name[np].sin, parameter[name[X]]] + call[name[np].random.normal, parameter[constant[0], constant[0.2], constant[151]]]]
variable[Y] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da1b1caf7f0>]]
variable[X] assign[=] call[name[X]][tuple[[<ast.Slice object at 0x7da1b1caf250>, <ast.Constant object at 0x7da1b1caf220>]]]
variable[Y] assign[=] call[name[Y]][tuple[[<ast.Slice object at 0x7da1b26aefe0>, <ast.Constant object at 0x7da1b26adde0>]]]
variable[warp_k] assign[=] call[name[GPy].kern.RBF, parameter[constant[1]]]
variable[warp_f] assign[=] call[name[GPy].util.warping_functions.TanhFunction, parameter[]]
variable[warp_m] assign[=] call[name[GPy].models.WarpedGP, parameter[name[X], name[Y]]]
call[call[name[warp_m]][constant[.*\.d]].constrain_fixed, parameter[constant[1.0]]]
variable[m] assign[=] call[name[GPy].models.GPRegression, parameter[name[X], name[Y]]]
call[name[m].optimize_restarts, parameter[]]
call[name[warp_m].optimize_restarts, parameter[]]
call[name[print], parameter[name[warp_m]]]
call[name[print], parameter[call[name[warp_m]][constant[.*warp.*]]]]
name[warp_m].predict_in_warped_space assign[=] constant[False]
call[name[warp_m].plot, parameter[]]
name[warp_m].predict_in_warped_space assign[=] constant[True]
call[name[warp_m].plot, parameter[]]
call[name[m].plot, parameter[]]
call[name[warp_m].plot_warping, parameter[]]
call[name[pb].show, parameter[]] | keyword[def] identifier[warped_gp_cubic_sine] ( identifier[max_iters] = literal[int] ):
literal[string]
identifier[X] =( literal[int] * identifier[np] . identifier[pi] )* identifier[np] . identifier[random] . identifier[random] ( literal[int] )- identifier[np] . identifier[pi]
identifier[Y] = identifier[np] . identifier[sin] ( identifier[X] )+ identifier[np] . identifier[random] . identifier[normal] ( literal[int] , literal[int] , literal[int] )
identifier[Y] = identifier[np] . identifier[array] ([ identifier[np] . identifier[power] ( identifier[abs] ( identifier[y] ), identifier[float] ( literal[int] )/ literal[int] )*( literal[int] ,- literal[int] )[ identifier[y] < literal[int] ] keyword[for] identifier[y] keyword[in] identifier[Y] ])
identifier[X] = identifier[X] [:, keyword[None] ]
identifier[Y] = identifier[Y] [:, keyword[None] ]
identifier[warp_k] = identifier[GPy] . identifier[kern] . identifier[RBF] ( literal[int] )
identifier[warp_f] = identifier[GPy] . identifier[util] . identifier[warping_functions] . identifier[TanhFunction] ( identifier[n_terms] = literal[int] )
identifier[warp_m] = identifier[GPy] . identifier[models] . identifier[WarpedGP] ( identifier[X] , identifier[Y] , identifier[kernel] = identifier[warp_k] , identifier[warping_function] = identifier[warp_f] )
identifier[warp_m] [ literal[string] ]. identifier[constrain_fixed] ( literal[int] )
identifier[m] = identifier[GPy] . identifier[models] . identifier[GPRegression] ( identifier[X] , identifier[Y] )
identifier[m] . identifier[optimize_restarts] ( identifier[parallel] = keyword[False] , identifier[robust] = keyword[True] , identifier[num_restarts] = literal[int] , identifier[max_iters] = identifier[max_iters] )
identifier[warp_m] . identifier[optimize_restarts] ( identifier[parallel] = keyword[False] , identifier[robust] = keyword[True] , identifier[num_restarts] = literal[int] , identifier[max_iters] = identifier[max_iters] )
identifier[print] ( identifier[warp_m] )
identifier[print] ( identifier[warp_m] [ literal[string] ])
identifier[warp_m] . identifier[predict_in_warped_space] = keyword[False]
identifier[warp_m] . identifier[plot] ( identifier[title] = literal[string] )
identifier[warp_m] . identifier[predict_in_warped_space] = keyword[True]
identifier[warp_m] . identifier[plot] ( identifier[title] = literal[string] )
identifier[m] . identifier[plot] ( identifier[title] = literal[string] )
identifier[warp_m] . identifier[plot_warping] ()
identifier[pb] . identifier[show] () | def warped_gp_cubic_sine(max_iters=100):
"""
A test replicating the cubic sine regression problem from
Snelson's paper.
"""
X = 2 * np.pi * np.random.random(151) - np.pi
Y = np.sin(X) + np.random.normal(0, 0.2, 151)
Y = np.array([np.power(abs(y), float(1) / 3) * (1, -1)[y < 0] for y in Y])
X = X[:, None]
Y = Y[:, None]
warp_k = GPy.kern.RBF(1)
warp_f = GPy.util.warping_functions.TanhFunction(n_terms=2)
warp_m = GPy.models.WarpedGP(X, Y, kernel=warp_k, warping_function=warp_f)
warp_m['.*\\.d'].constrain_fixed(1.0)
m = GPy.models.GPRegression(X, Y)
m.optimize_restarts(parallel=False, robust=True, num_restarts=5, max_iters=max_iters)
warp_m.optimize_restarts(parallel=False, robust=True, num_restarts=5, max_iters=max_iters)
#m.optimize(max_iters=max_iters)
#warp_m.optimize(max_iters=max_iters)
print(warp_m)
print(warp_m['.*warp.*'])
warp_m.predict_in_warped_space = False
warp_m.plot(title='Warped GP - Latent space')
warp_m.predict_in_warped_space = True
warp_m.plot(title='Warped GP - Warped space')
m.plot(title='Standard GP')
warp_m.plot_warping()
pb.show() |
def join(self, a, *args):
"""
Join given arguments into the same set. Accepts one or more arguments.
"""
mapping = self._mapping
set_a = mapping.setdefault(a, [a])
for arg in args:
set_b = mapping.get(arg)
if set_b is None:
set_a.append(arg)
mapping[arg] = set_a
elif set_b is not set_a:
if len(set_b) > len(set_a):
set_a, set_b = set_b, set_a
set_a.extend(set_b)
for elem in set_b:
mapping[elem] = set_a | def function[join, parameter[self, a]]:
constant[
Join given arguments into the same set. Accepts one or more arguments.
]
variable[mapping] assign[=] name[self]._mapping
variable[set_a] assign[=] call[name[mapping].setdefault, parameter[name[a], list[[<ast.Name object at 0x7da20c6e54e0>]]]]
for taget[name[arg]] in starred[name[args]] begin[:]
variable[set_b] assign[=] call[name[mapping].get, parameter[name[arg]]]
if compare[name[set_b] is constant[None]] begin[:]
call[name[set_a].append, parameter[name[arg]]]
call[name[mapping]][name[arg]] assign[=] name[set_a] | keyword[def] identifier[join] ( identifier[self] , identifier[a] ,* identifier[args] ):
literal[string]
identifier[mapping] = identifier[self] . identifier[_mapping]
identifier[set_a] = identifier[mapping] . identifier[setdefault] ( identifier[a] ,[ identifier[a] ])
keyword[for] identifier[arg] keyword[in] identifier[args] :
identifier[set_b] = identifier[mapping] . identifier[get] ( identifier[arg] )
keyword[if] identifier[set_b] keyword[is] keyword[None] :
identifier[set_a] . identifier[append] ( identifier[arg] )
identifier[mapping] [ identifier[arg] ]= identifier[set_a]
keyword[elif] identifier[set_b] keyword[is] keyword[not] identifier[set_a] :
keyword[if] identifier[len] ( identifier[set_b] )> identifier[len] ( identifier[set_a] ):
identifier[set_a] , identifier[set_b] = identifier[set_b] , identifier[set_a]
identifier[set_a] . identifier[extend] ( identifier[set_b] )
keyword[for] identifier[elem] keyword[in] identifier[set_b] :
identifier[mapping] [ identifier[elem] ]= identifier[set_a] | def join(self, a, *args):
"""
Join given arguments into the same set. Accepts one or more arguments.
"""
mapping = self._mapping
set_a = mapping.setdefault(a, [a])
for arg in args:
set_b = mapping.get(arg)
if set_b is None:
set_a.append(arg)
mapping[arg] = set_a # depends on [control=['if'], data=[]]
elif set_b is not set_a:
if len(set_b) > len(set_a):
(set_a, set_b) = (set_b, set_a) # depends on [control=['if'], data=[]]
set_a.extend(set_b)
for elem in set_b:
mapping[elem] = set_a # depends on [control=['for'], data=['elem']] # depends on [control=['if'], data=['set_b', 'set_a']] # depends on [control=['for'], data=['arg']] |
def apply_injectables(self, targets):
"""Given an iterable of `Target` instances, apply their transitive injectables."""
target_types = {type(t) for t in targets}
target_subsystem_deps = {s for s in itertools.chain(*(t.subsystems() for t in target_types))}
for subsystem in target_subsystem_deps:
# TODO: The is_initialized() check is primarily for tests and would be nice to do away with.
if issubclass(subsystem, InjectablesMixin) and subsystem.is_initialized():
subsystem.global_instance().injectables(self) | def function[apply_injectables, parameter[self, targets]]:
constant[Given an iterable of `Target` instances, apply their transitive injectables.]
variable[target_types] assign[=] <ast.SetComp object at 0x7da1b1d6de10>
variable[target_subsystem_deps] assign[=] <ast.SetComp object at 0x7da1b1d6ee90>
for taget[name[subsystem]] in starred[name[target_subsystem_deps]] begin[:]
if <ast.BoolOp object at 0x7da1b1d6f2b0> begin[:]
call[call[name[subsystem].global_instance, parameter[]].injectables, parameter[name[self]]] | keyword[def] identifier[apply_injectables] ( identifier[self] , identifier[targets] ):
literal[string]
identifier[target_types] ={ identifier[type] ( identifier[t] ) keyword[for] identifier[t] keyword[in] identifier[targets] }
identifier[target_subsystem_deps] ={ identifier[s] keyword[for] identifier[s] keyword[in] identifier[itertools] . identifier[chain] (*( identifier[t] . identifier[subsystems] () keyword[for] identifier[t] keyword[in] identifier[target_types] ))}
keyword[for] identifier[subsystem] keyword[in] identifier[target_subsystem_deps] :
keyword[if] identifier[issubclass] ( identifier[subsystem] , identifier[InjectablesMixin] ) keyword[and] identifier[subsystem] . identifier[is_initialized] ():
identifier[subsystem] . identifier[global_instance] (). identifier[injectables] ( identifier[self] ) | def apply_injectables(self, targets):
"""Given an iterable of `Target` instances, apply their transitive injectables."""
target_types = {type(t) for t in targets}
target_subsystem_deps = {s for s in itertools.chain(*(t.subsystems() for t in target_types))}
for subsystem in target_subsystem_deps:
# TODO: The is_initialized() check is primarily for tests and would be nice to do away with.
if issubclass(subsystem, InjectablesMixin) and subsystem.is_initialized():
subsystem.global_instance().injectables(self) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['subsystem']] |
def trigger_event(self, module_name, event):
"""
Trigger an event on a named module.
"""
if module_name:
self._py3_wrapper.events_thread.process_event(module_name, event) | def function[trigger_event, parameter[self, module_name, event]]:
constant[
Trigger an event on a named module.
]
if name[module_name] begin[:]
call[name[self]._py3_wrapper.events_thread.process_event, parameter[name[module_name], name[event]]] | keyword[def] identifier[trigger_event] ( identifier[self] , identifier[module_name] , identifier[event] ):
literal[string]
keyword[if] identifier[module_name] :
identifier[self] . identifier[_py3_wrapper] . identifier[events_thread] . identifier[process_event] ( identifier[module_name] , identifier[event] ) | def trigger_event(self, module_name, event):
"""
Trigger an event on a named module.
"""
if module_name:
self._py3_wrapper.events_thread.process_event(module_name, event) # depends on [control=['if'], data=[]] |
def getdebug(environ=os.environ, true_values=TRUE_VALUES):
'''
Get if app is expected to be ran in debug mode looking at environment
variables.
:param environ: environment dict-like object
:type environ: collections.abc.Mapping
:returns: True if debug contains a true-like string, False otherwise
:rtype: bool
'''
return environ.get('DEBUG', '').lower() in true_values | def function[getdebug, parameter[environ, true_values]]:
constant[
Get if app is expected to be ran in debug mode looking at environment
variables.
:param environ: environment dict-like object
:type environ: collections.abc.Mapping
:returns: True if debug contains a true-like string, False otherwise
:rtype: bool
]
return[compare[call[call[name[environ].get, parameter[constant[DEBUG], constant[]]].lower, parameter[]] in name[true_values]]] | keyword[def] identifier[getdebug] ( identifier[environ] = identifier[os] . identifier[environ] , identifier[true_values] = identifier[TRUE_VALUES] ):
literal[string]
keyword[return] identifier[environ] . identifier[get] ( literal[string] , literal[string] ). identifier[lower] () keyword[in] identifier[true_values] | def getdebug(environ=os.environ, true_values=TRUE_VALUES):
"""
Get if app is expected to be ran in debug mode looking at environment
variables.
:param environ: environment dict-like object
:type environ: collections.abc.Mapping
:returns: True if debug contains a true-like string, False otherwise
:rtype: bool
"""
return environ.get('DEBUG', '').lower() in true_values |
async def write_aldb(self, addr, mem_addr: int, mode: str, group: int,
target, data1=0x00, data2=0x00, data3=0x00):
"""Write a device All-Link record."""
dev_addr = Address(addr)
target_addr = Address(target)
device = self.plm.devices[dev_addr.id]
if device:
_LOGGING.debug('calling device write_aldb')
device.write_aldb(mem_addr, mode, group, target_addr,
data1, data2, data3)
await asyncio.sleep(1, loop=self.loop)
while device.aldb.status == ALDBStatus.LOADING:
await asyncio.sleep(1, loop=self.loop)
self.print_device_aldb(addr) | <ast.AsyncFunctionDef object at 0x7da1b1a463b0> | keyword[async] keyword[def] identifier[write_aldb] ( identifier[self] , identifier[addr] , identifier[mem_addr] : identifier[int] , identifier[mode] : identifier[str] , identifier[group] : identifier[int] ,
identifier[target] , identifier[data1] = literal[int] , identifier[data2] = literal[int] , identifier[data3] = literal[int] ):
literal[string]
identifier[dev_addr] = identifier[Address] ( identifier[addr] )
identifier[target_addr] = identifier[Address] ( identifier[target] )
identifier[device] = identifier[self] . identifier[plm] . identifier[devices] [ identifier[dev_addr] . identifier[id] ]
keyword[if] identifier[device] :
identifier[_LOGGING] . identifier[debug] ( literal[string] )
identifier[device] . identifier[write_aldb] ( identifier[mem_addr] , identifier[mode] , identifier[group] , identifier[target_addr] ,
identifier[data1] , identifier[data2] , identifier[data3] )
keyword[await] identifier[asyncio] . identifier[sleep] ( literal[int] , identifier[loop] = identifier[self] . identifier[loop] )
keyword[while] identifier[device] . identifier[aldb] . identifier[status] == identifier[ALDBStatus] . identifier[LOADING] :
keyword[await] identifier[asyncio] . identifier[sleep] ( literal[int] , identifier[loop] = identifier[self] . identifier[loop] )
identifier[self] . identifier[print_device_aldb] ( identifier[addr] ) | async def write_aldb(self, addr, mem_addr: int, mode: str, group: int, target, data1=0, data2=0, data3=0):
"""Write a device All-Link record."""
dev_addr = Address(addr)
target_addr = Address(target)
device = self.plm.devices[dev_addr.id]
if device:
_LOGGING.debug('calling device write_aldb')
device.write_aldb(mem_addr, mode, group, target_addr, data1, data2, data3)
await asyncio.sleep(1, loop=self.loop)
while device.aldb.status == ALDBStatus.LOADING:
await asyncio.sleep(1, loop=self.loop) # depends on [control=['while'], data=[]]
self.print_device_aldb(addr) # depends on [control=['if'], data=[]] |
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: MessageInteractionContext for this MessageInteractionInstance
:rtype: twilio.rest.proxy.v1.service.session.participant.message_interaction.MessageInteractionContext
"""
if self._context is None:
self._context = MessageInteractionContext(
self._version,
service_sid=self._solution['service_sid'],
session_sid=self._solution['session_sid'],
participant_sid=self._solution['participant_sid'],
sid=self._solution['sid'],
)
return self._context | def function[_proxy, parameter[self]]:
constant[
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: MessageInteractionContext for this MessageInteractionInstance
:rtype: twilio.rest.proxy.v1.service.session.participant.message_interaction.MessageInteractionContext
]
if compare[name[self]._context is constant[None]] begin[:]
name[self]._context assign[=] call[name[MessageInteractionContext], parameter[name[self]._version]]
return[name[self]._context] | keyword[def] identifier[_proxy] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_context] keyword[is] keyword[None] :
identifier[self] . identifier[_context] = identifier[MessageInteractionContext] (
identifier[self] . identifier[_version] ,
identifier[service_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
identifier[session_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
identifier[participant_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
identifier[sid] = identifier[self] . identifier[_solution] [ literal[string] ],
)
keyword[return] identifier[self] . identifier[_context] | def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: MessageInteractionContext for this MessageInteractionInstance
:rtype: twilio.rest.proxy.v1.service.session.participant.message_interaction.MessageInteractionContext
"""
if self._context is None:
self._context = MessageInteractionContext(self._version, service_sid=self._solution['service_sid'], session_sid=self._solution['session_sid'], participant_sid=self._solution['participant_sid'], sid=self._solution['sid']) # depends on [control=['if'], data=[]]
return self._context |
def live_weather(self, live_weather):
"""Prints the live weather in a pretty format"""
summary = live_weather['currently']['summary']
self.summary(summary)
click.echo() | def function[live_weather, parameter[self, live_weather]]:
constant[Prints the live weather in a pretty format]
variable[summary] assign[=] call[call[name[live_weather]][constant[currently]]][constant[summary]]
call[name[self].summary, parameter[name[summary]]]
call[name[click].echo, parameter[]] | keyword[def] identifier[live_weather] ( identifier[self] , identifier[live_weather] ):
literal[string]
identifier[summary] = identifier[live_weather] [ literal[string] ][ literal[string] ]
identifier[self] . identifier[summary] ( identifier[summary] )
identifier[click] . identifier[echo] () | def live_weather(self, live_weather):
"""Prints the live weather in a pretty format"""
summary = live_weather['currently']['summary']
self.summary(summary)
click.echo() |
def _dump_cnt(self):
'''Dump counters to file'''
self._cnt['1h'].dump(os.path.join(self.data_path, 'scheduler.1h'))
self._cnt['1d'].dump(os.path.join(self.data_path, 'scheduler.1d'))
self._cnt['all'].dump(os.path.join(self.data_path, 'scheduler.all')) | def function[_dump_cnt, parameter[self]]:
constant[Dump counters to file]
call[call[name[self]._cnt][constant[1h]].dump, parameter[call[name[os].path.join, parameter[name[self].data_path, constant[scheduler.1h]]]]]
call[call[name[self]._cnt][constant[1d]].dump, parameter[call[name[os].path.join, parameter[name[self].data_path, constant[scheduler.1d]]]]]
call[call[name[self]._cnt][constant[all]].dump, parameter[call[name[os].path.join, parameter[name[self].data_path, constant[scheduler.all]]]]] | keyword[def] identifier[_dump_cnt] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_cnt] [ literal[string] ]. identifier[dump] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[data_path] , literal[string] ))
identifier[self] . identifier[_cnt] [ literal[string] ]. identifier[dump] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[data_path] , literal[string] ))
identifier[self] . identifier[_cnt] [ literal[string] ]. identifier[dump] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[data_path] , literal[string] )) | def _dump_cnt(self):
"""Dump counters to file"""
self._cnt['1h'].dump(os.path.join(self.data_path, 'scheduler.1h'))
self._cnt['1d'].dump(os.path.join(self.data_path, 'scheduler.1d'))
self._cnt['all'].dump(os.path.join(self.data_path, 'scheduler.all')) |
def disconnect_pv_clients(self, mris):
# type: (List[str]) -> None
"""Disconnect anyone listening to any of the given mris"""
for mri in mris:
for pv in self._pvs.pop(mri, {}).values():
# Close pv with force destroy on, this will call
# onLastDisconnect
pv.close(destroy=True, sync=True, timeout=1.0) | def function[disconnect_pv_clients, parameter[self, mris]]:
constant[Disconnect anyone listening to any of the given mris]
for taget[name[mri]] in starred[name[mris]] begin[:]
for taget[name[pv]] in starred[call[call[name[self]._pvs.pop, parameter[name[mri], dictionary[[], []]]].values, parameter[]]] begin[:]
call[name[pv].close, parameter[]] | keyword[def] identifier[disconnect_pv_clients] ( identifier[self] , identifier[mris] ):
literal[string]
keyword[for] identifier[mri] keyword[in] identifier[mris] :
keyword[for] identifier[pv] keyword[in] identifier[self] . identifier[_pvs] . identifier[pop] ( identifier[mri] ,{}). identifier[values] ():
identifier[pv] . identifier[close] ( identifier[destroy] = keyword[True] , identifier[sync] = keyword[True] , identifier[timeout] = literal[int] ) | def disconnect_pv_clients(self, mris):
# type: (List[str]) -> None
'Disconnect anyone listening to any of the given mris'
for mri in mris:
for pv in self._pvs.pop(mri, {}).values():
# Close pv with force destroy on, this will call
# onLastDisconnect
pv.close(destroy=True, sync=True, timeout=1.0) # depends on [control=['for'], data=['pv']] # depends on [control=['for'], data=['mri']] |
def clip_eta(eta, ord, eps):
"""
PyTorch implementation of the clip_eta in utils_tf.
:param eta: Tensor
:param ord: np.inf, 1, or 2
:param eps: float
"""
if ord not in [np.inf, 1, 2]:
raise ValueError('ord must be np.inf, 1, or 2.')
avoid_zero_div = torch.tensor(1e-12, dtype=eta.dtype, device=eta.device)
reduc_ind = list(range(1, len(eta.size())))
if ord == np.inf:
eta = torch.clamp(eta, -eps, eps)
else:
if ord == 1:
# TODO
# raise NotImplementedError("L1 clip is not implemented.")
norm = torch.max(
avoid_zero_div,
torch.sum(torch.abs(eta), dim=reduc_ind, keepdim=True)
)
elif ord == 2:
norm = torch.sqrt(torch.max(
avoid_zero_div,
torch.sum(eta ** 2, dim=reduc_ind, keepdim=True)
))
factor = torch.min(
torch.tensor(1., dtype=eta.dtype, device=eta.device),
eps / norm
)
eta *= factor
return eta | def function[clip_eta, parameter[eta, ord, eps]]:
constant[
PyTorch implementation of the clip_eta in utils_tf.
:param eta: Tensor
:param ord: np.inf, 1, or 2
:param eps: float
]
if compare[name[ord] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Attribute object at 0x7da1b1f76a70>, <ast.Constant object at 0x7da1b1f77f70>, <ast.Constant object at 0x7da1b1f75030>]]] begin[:]
<ast.Raise object at 0x7da1b1f75660>
variable[avoid_zero_div] assign[=] call[name[torch].tensor, parameter[constant[1e-12]]]
variable[reduc_ind] assign[=] call[name[list], parameter[call[name[range], parameter[constant[1], call[name[len], parameter[call[name[eta].size, parameter[]]]]]]]]
if compare[name[ord] equal[==] name[np].inf] begin[:]
variable[eta] assign[=] call[name[torch].clamp, parameter[name[eta], <ast.UnaryOp object at 0x7da1b1f77910>, name[eps]]]
return[name[eta]] | keyword[def] identifier[clip_eta] ( identifier[eta] , identifier[ord] , identifier[eps] ):
literal[string]
keyword[if] identifier[ord] keyword[not] keyword[in] [ identifier[np] . identifier[inf] , literal[int] , literal[int] ]:
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[avoid_zero_div] = identifier[torch] . identifier[tensor] ( literal[int] , identifier[dtype] = identifier[eta] . identifier[dtype] , identifier[device] = identifier[eta] . identifier[device] )
identifier[reduc_ind] = identifier[list] ( identifier[range] ( literal[int] , identifier[len] ( identifier[eta] . identifier[size] ())))
keyword[if] identifier[ord] == identifier[np] . identifier[inf] :
identifier[eta] = identifier[torch] . identifier[clamp] ( identifier[eta] ,- identifier[eps] , identifier[eps] )
keyword[else] :
keyword[if] identifier[ord] == literal[int] :
identifier[norm] = identifier[torch] . identifier[max] (
identifier[avoid_zero_div] ,
identifier[torch] . identifier[sum] ( identifier[torch] . identifier[abs] ( identifier[eta] ), identifier[dim] = identifier[reduc_ind] , identifier[keepdim] = keyword[True] )
)
keyword[elif] identifier[ord] == literal[int] :
identifier[norm] = identifier[torch] . identifier[sqrt] ( identifier[torch] . identifier[max] (
identifier[avoid_zero_div] ,
identifier[torch] . identifier[sum] ( identifier[eta] ** literal[int] , identifier[dim] = identifier[reduc_ind] , identifier[keepdim] = keyword[True] )
))
identifier[factor] = identifier[torch] . identifier[min] (
identifier[torch] . identifier[tensor] ( literal[int] , identifier[dtype] = identifier[eta] . identifier[dtype] , identifier[device] = identifier[eta] . identifier[device] ),
identifier[eps] / identifier[norm]
)
identifier[eta] *= identifier[factor]
keyword[return] identifier[eta] | def clip_eta(eta, ord, eps):
"""
PyTorch implementation of the clip_eta in utils_tf.
:param eta: Tensor
:param ord: np.inf, 1, or 2
:param eps: float
"""
if ord not in [np.inf, 1, 2]:
raise ValueError('ord must be np.inf, 1, or 2.') # depends on [control=['if'], data=[]]
avoid_zero_div = torch.tensor(1e-12, dtype=eta.dtype, device=eta.device)
reduc_ind = list(range(1, len(eta.size())))
if ord == np.inf:
eta = torch.clamp(eta, -eps, eps) # depends on [control=['if'], data=[]]
else:
if ord == 1:
# TODO
# raise NotImplementedError("L1 clip is not implemented.")
norm = torch.max(avoid_zero_div, torch.sum(torch.abs(eta), dim=reduc_ind, keepdim=True)) # depends on [control=['if'], data=[]]
elif ord == 2:
norm = torch.sqrt(torch.max(avoid_zero_div, torch.sum(eta ** 2, dim=reduc_ind, keepdim=True))) # depends on [control=['if'], data=[]]
factor = torch.min(torch.tensor(1.0, dtype=eta.dtype, device=eta.device), eps / norm)
eta *= factor
return eta |
def verify_pubkey_sig(self, message, sig):
'''
Wraps the verify_signature method so we have
additional checks.
:rtype: bool
:return: Success or failure of public key verification
'''
if self.opts['master_sign_key_name']:
path = os.path.join(self.opts['pki_dir'],
self.opts['master_sign_key_name'] + '.pub')
if os.path.isfile(path):
res = verify_signature(path,
message,
binascii.a2b_base64(sig))
else:
log.error(
'Verification public key %s does not exist. You need to '
'copy it from the master to the minions pki directory',
os.path.basename(path)
)
return False
if res:
log.debug(
'Successfully verified signature of master public key '
'with verification public key %s',
self.opts['master_sign_key_name'] + '.pub'
)
return True
else:
log.debug('Failed to verify signature of public key')
return False
else:
log.error(
'Failed to verify the signature of the message because the '
'verification key-pairs name is not defined. Please make '
'sure that master_sign_key_name is defined.'
)
return False | def function[verify_pubkey_sig, parameter[self, message, sig]]:
constant[
Wraps the verify_signature method so we have
additional checks.
:rtype: bool
:return: Success or failure of public key verification
]
if call[name[self].opts][constant[master_sign_key_name]] begin[:]
variable[path] assign[=] call[name[os].path.join, parameter[call[name[self].opts][constant[pki_dir]], binary_operation[call[name[self].opts][constant[master_sign_key_name]] + constant[.pub]]]]
if call[name[os].path.isfile, parameter[name[path]]] begin[:]
variable[res] assign[=] call[name[verify_signature], parameter[name[path], name[message], call[name[binascii].a2b_base64, parameter[name[sig]]]]]
if name[res] begin[:]
call[name[log].debug, parameter[constant[Successfully verified signature of master public key with verification public key %s], binary_operation[call[name[self].opts][constant[master_sign_key_name]] + constant[.pub]]]]
return[constant[True]] | keyword[def] identifier[verify_pubkey_sig] ( identifier[self] , identifier[message] , identifier[sig] ):
literal[string]
keyword[if] identifier[self] . identifier[opts] [ literal[string] ]:
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[opts] [ literal[string] ],
identifier[self] . identifier[opts] [ literal[string] ]+ literal[string] )
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[path] ):
identifier[res] = identifier[verify_signature] ( identifier[path] ,
identifier[message] ,
identifier[binascii] . identifier[a2b_base64] ( identifier[sig] ))
keyword[else] :
identifier[log] . identifier[error] (
literal[string]
literal[string] ,
identifier[os] . identifier[path] . identifier[basename] ( identifier[path] )
)
keyword[return] keyword[False]
keyword[if] identifier[res] :
identifier[log] . identifier[debug] (
literal[string]
literal[string] ,
identifier[self] . identifier[opts] [ literal[string] ]+ literal[string]
)
keyword[return] keyword[True]
keyword[else] :
identifier[log] . identifier[debug] ( literal[string] )
keyword[return] keyword[False]
keyword[else] :
identifier[log] . identifier[error] (
literal[string]
literal[string]
literal[string]
)
keyword[return] keyword[False] | def verify_pubkey_sig(self, message, sig):
"""
Wraps the verify_signature method so we have
additional checks.
:rtype: bool
:return: Success or failure of public key verification
"""
if self.opts['master_sign_key_name']:
path = os.path.join(self.opts['pki_dir'], self.opts['master_sign_key_name'] + '.pub')
if os.path.isfile(path):
res = verify_signature(path, message, binascii.a2b_base64(sig)) # depends on [control=['if'], data=[]]
else:
log.error('Verification public key %s does not exist. You need to copy it from the master to the minions pki directory', os.path.basename(path))
return False
if res:
log.debug('Successfully verified signature of master public key with verification public key %s', self.opts['master_sign_key_name'] + '.pub')
return True # depends on [control=['if'], data=[]]
else:
log.debug('Failed to verify signature of public key')
return False # depends on [control=['if'], data=[]]
else:
log.error('Failed to verify the signature of the message because the verification key-pairs name is not defined. Please make sure that master_sign_key_name is defined.')
return False |
def random_adjspecies_pair(maxlen=None, prevent_stutter=True):
"""
Return an ordered 2-tuple containing a species and a describer.
The letter-count of the pair is guarantee to not exceed `maxlen` if
it is given. If `prevent_stutter` is True, the last letter of the
first item of the pair will be different from the first letter of
the second item.
"""
while True:
pair = _random_adjspecies_pair()
if maxlen and len(''.join(pair)) > maxlen:
continue
if prevent_stutter and pair[0][-1] == pair[1][0]:
continue
return pair | def function[random_adjspecies_pair, parameter[maxlen, prevent_stutter]]:
constant[
Return an ordered 2-tuple containing a species and a describer.
The letter-count of the pair is guarantee to not exceed `maxlen` if
it is given. If `prevent_stutter` is True, the last letter of the
first item of the pair will be different from the first letter of
the second item.
]
while constant[True] begin[:]
variable[pair] assign[=] call[name[_random_adjspecies_pair], parameter[]]
if <ast.BoolOp object at 0x7da1b01a5a20> begin[:]
continue
if <ast.BoolOp object at 0x7da1b01a7cd0> begin[:]
continue
return[name[pair]] | keyword[def] identifier[random_adjspecies_pair] ( identifier[maxlen] = keyword[None] , identifier[prevent_stutter] = keyword[True] ):
literal[string]
keyword[while] keyword[True] :
identifier[pair] = identifier[_random_adjspecies_pair] ()
keyword[if] identifier[maxlen] keyword[and] identifier[len] ( literal[string] . identifier[join] ( identifier[pair] ))> identifier[maxlen] :
keyword[continue]
keyword[if] identifier[prevent_stutter] keyword[and] identifier[pair] [ literal[int] ][- literal[int] ]== identifier[pair] [ literal[int] ][ literal[int] ]:
keyword[continue]
keyword[return] identifier[pair] | def random_adjspecies_pair(maxlen=None, prevent_stutter=True):
"""
Return an ordered 2-tuple containing a species and a describer.
The letter-count of the pair is guarantee to not exceed `maxlen` if
it is given. If `prevent_stutter` is True, the last letter of the
first item of the pair will be different from the first letter of
the second item.
"""
while True:
pair = _random_adjspecies_pair()
if maxlen and len(''.join(pair)) > maxlen:
continue # depends on [control=['if'], data=[]]
if prevent_stutter and pair[0][-1] == pair[1][0]:
continue # depends on [control=['if'], data=[]]
return pair # depends on [control=['while'], data=[]] |
def _deprecated_register_to_python(self, cd, name, converter=None):
"""Register a conversion from OpenMath to Python
This function has two forms. A three-arguments one:
:param cd: A content dictionary name
:type cd: str
:param name: A symbol name
:type name: str
:param converter: A conversion function, or a Python object
:type: Callable, Any
Any object of type ``openmath.OMSymbol``, with content
dictionary equal to ``cd`` and name equal to ``name`` will be converted
using ``converter``. Also, any object of type ``openmath.OMApplication``
whose first child is an ``openmath.OMSymbol`` as above will be converted
using ``converter``. If ``converter`` is a callable, it will be called with the
OpenMath object as parameter; otherwise ``converter`` will be returned.
In the two-argument form
:param cd: A subclass of ``OMAny``
:type cd: type
:param name: A conversion function
:type name: Callable
Any object of type ``cd`` will be passed to ``name()``, and the
result will be returned. This forms is mainly to override default
conversions for basic OpenMath tags (OMInteger, OMString, etc.). It
is discouraged to use it for ``OMSymbol`` and ``OMApplication``.
"""
if converter is None:
if isclass(cd) and issubclass(cd, om.OMAny):
self._conv_to_py[cd] = name
else:
raise TypeError('Two-arguments form expects subclass of openmath.OMAny, found %r' % cd)
else:
if isinstance(cd, str) and isinstance(name, str):
self._conv_sym_to_py[(cd, name)] = converter
else:
raise TypeError('Three-arguments form expects string, found %r' % cd.__class__) | def function[_deprecated_register_to_python, parameter[self, cd, name, converter]]:
constant[Register a conversion from OpenMath to Python
This function has two forms. A three-arguments one:
:param cd: A content dictionary name
:type cd: str
:param name: A symbol name
:type name: str
:param converter: A conversion function, or a Python object
:type: Callable, Any
Any object of type ``openmath.OMSymbol``, with content
dictionary equal to ``cd`` and name equal to ``name`` will be converted
using ``converter``. Also, any object of type ``openmath.OMApplication``
whose first child is an ``openmath.OMSymbol`` as above will be converted
using ``converter``. If ``converter`` is a callable, it will be called with the
OpenMath object as parameter; otherwise ``converter`` will be returned.
In the two-argument form
:param cd: A subclass of ``OMAny``
:type cd: type
:param name: A conversion function
:type name: Callable
Any object of type ``cd`` will be passed to ``name()``, and the
result will be returned. This forms is mainly to override default
conversions for basic OpenMath tags (OMInteger, OMString, etc.). It
is discouraged to use it for ``OMSymbol`` and ``OMApplication``.
]
if compare[name[converter] is constant[None]] begin[:]
if <ast.BoolOp object at 0x7da1b0213a30> begin[:]
call[name[self]._conv_to_py][name[cd]] assign[=] name[name] | keyword[def] identifier[_deprecated_register_to_python] ( identifier[self] , identifier[cd] , identifier[name] , identifier[converter] = keyword[None] ):
literal[string]
keyword[if] identifier[converter] keyword[is] keyword[None] :
keyword[if] identifier[isclass] ( identifier[cd] ) keyword[and] identifier[issubclass] ( identifier[cd] , identifier[om] . identifier[OMAny] ):
identifier[self] . identifier[_conv_to_py] [ identifier[cd] ]= identifier[name]
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] % identifier[cd] )
keyword[else] :
keyword[if] identifier[isinstance] ( identifier[cd] , identifier[str] ) keyword[and] identifier[isinstance] ( identifier[name] , identifier[str] ):
identifier[self] . identifier[_conv_sym_to_py] [( identifier[cd] , identifier[name] )]= identifier[converter]
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] % identifier[cd] . identifier[__class__] ) | def _deprecated_register_to_python(self, cd, name, converter=None):
"""Register a conversion from OpenMath to Python
This function has two forms. A three-arguments one:
:param cd: A content dictionary name
:type cd: str
:param name: A symbol name
:type name: str
:param converter: A conversion function, or a Python object
:type: Callable, Any
Any object of type ``openmath.OMSymbol``, with content
dictionary equal to ``cd`` and name equal to ``name`` will be converted
using ``converter``. Also, any object of type ``openmath.OMApplication``
whose first child is an ``openmath.OMSymbol`` as above will be converted
using ``converter``. If ``converter`` is a callable, it will be called with the
OpenMath object as parameter; otherwise ``converter`` will be returned.
In the two-argument form
:param cd: A subclass of ``OMAny``
:type cd: type
:param name: A conversion function
:type name: Callable
Any object of type ``cd`` will be passed to ``name()``, and the
result will be returned. This forms is mainly to override default
conversions for basic OpenMath tags (OMInteger, OMString, etc.). It
is discouraged to use it for ``OMSymbol`` and ``OMApplication``.
"""
if converter is None:
if isclass(cd) and issubclass(cd, om.OMAny):
self._conv_to_py[cd] = name # depends on [control=['if'], data=[]]
else:
raise TypeError('Two-arguments form expects subclass of openmath.OMAny, found %r' % cd) # depends on [control=['if'], data=[]]
elif isinstance(cd, str) and isinstance(name, str):
self._conv_sym_to_py[cd, name] = converter # depends on [control=['if'], data=[]]
else:
raise TypeError('Three-arguments form expects string, found %r' % cd.__class__) |
def _strip_space(x):
"""Strip the SPACE.element( ... ) part from a repr."""
r = repr(x)
space_repr = '{!r}.element('.format(x.space)
if r.startswith(space_repr) and r.endswith(')'):
r = r[len(space_repr):-1]
return r | def function[_strip_space, parameter[x]]:
constant[Strip the SPACE.element( ... ) part from a repr.]
variable[r] assign[=] call[name[repr], parameter[name[x]]]
variable[space_repr] assign[=] call[constant[{!r}.element(].format, parameter[name[x].space]]
if <ast.BoolOp object at 0x7da1b1e5db10> begin[:]
variable[r] assign[=] call[name[r]][<ast.Slice object at 0x7da1b1ec7340>]
return[name[r]] | keyword[def] identifier[_strip_space] ( identifier[x] ):
literal[string]
identifier[r] = identifier[repr] ( identifier[x] )
identifier[space_repr] = literal[string] . identifier[format] ( identifier[x] . identifier[space] )
keyword[if] identifier[r] . identifier[startswith] ( identifier[space_repr] ) keyword[and] identifier[r] . identifier[endswith] ( literal[string] ):
identifier[r] = identifier[r] [ identifier[len] ( identifier[space_repr] ):- literal[int] ]
keyword[return] identifier[r] | def _strip_space(x):
"""Strip the SPACE.element( ... ) part from a repr."""
r = repr(x)
space_repr = '{!r}.element('.format(x.space)
if r.startswith(space_repr) and r.endswith(')'):
r = r[len(space_repr):-1] # depends on [control=['if'], data=[]]
return r |
def gravatar(hash, size=100, rating='g', default='identicon', include_extension=False, force_default=False):
"""Pass email hash, return Gravatar URL. You can get email hash like this::
import hashlib
avatar_hash = hashlib.md5(email.lower().encode('utf-8')).hexdigest()
Visit https://en.gravatar.com/site/implement/images/ for more information.
:param hash: The email hash used to generate avatar URL.
:param size: The size of the avatar, default to 100 pixel.
:param rating: The rating of the avatar, default to ``g``
:param default: The type of default avatar, default to ``identicon``.
:param include_extension: Append a '.jpg' extension at the end of URL, default to ``False``.
:param force_default: Force to use default avatar, default to ``False``.
"""
if include_extension:
hash += '.jpg'
default = default or current_app.config['AVATARS_GRAVATAR_DEFAULT']
query_string = urlencode({'s': int(size), 'r': rating, 'd': default})
if force_default:
query_string += '&q=y'
return 'https://gravatar.com/avatar/' + hash + '?' + query_string | def function[gravatar, parameter[hash, size, rating, default, include_extension, force_default]]:
constant[Pass email hash, return Gravatar URL. You can get email hash like this::
import hashlib
avatar_hash = hashlib.md5(email.lower().encode('utf-8')).hexdigest()
Visit https://en.gravatar.com/site/implement/images/ for more information.
:param hash: The email hash used to generate avatar URL.
:param size: The size of the avatar, default to 100 pixel.
:param rating: The rating of the avatar, default to ``g``
:param default: The type of default avatar, default to ``identicon``.
:param include_extension: Append a '.jpg' extension at the end of URL, default to ``False``.
:param force_default: Force to use default avatar, default to ``False``.
]
if name[include_extension] begin[:]
<ast.AugAssign object at 0x7da18c4ce170>
variable[default] assign[=] <ast.BoolOp object at 0x7da18c4ce7d0>
variable[query_string] assign[=] call[name[urlencode], parameter[dictionary[[<ast.Constant object at 0x7da18c4ce9e0>, <ast.Constant object at 0x7da18c4cda50>, <ast.Constant object at 0x7da18c4cc880>], [<ast.Call object at 0x7da18c4ccaf0>, <ast.Name object at 0x7da18c4cf700>, <ast.Name object at 0x7da18c4ce5c0>]]]]
if name[force_default] begin[:]
<ast.AugAssign object at 0x7da18c4cf250>
return[binary_operation[binary_operation[binary_operation[constant[https://gravatar.com/avatar/] + name[hash]] + constant[?]] + name[query_string]]] | keyword[def] identifier[gravatar] ( identifier[hash] , identifier[size] = literal[int] , identifier[rating] = literal[string] , identifier[default] = literal[string] , identifier[include_extension] = keyword[False] , identifier[force_default] = keyword[False] ):
literal[string]
keyword[if] identifier[include_extension] :
identifier[hash] += literal[string]
identifier[default] = identifier[default] keyword[or] identifier[current_app] . identifier[config] [ literal[string] ]
identifier[query_string] = identifier[urlencode] ({ literal[string] : identifier[int] ( identifier[size] ), literal[string] : identifier[rating] , literal[string] : identifier[default] })
keyword[if] identifier[force_default] :
identifier[query_string] += literal[string]
keyword[return] literal[string] + identifier[hash] + literal[string] + identifier[query_string] | def gravatar(hash, size=100, rating='g', default='identicon', include_extension=False, force_default=False):
"""Pass email hash, return Gravatar URL. You can get email hash like this::
import hashlib
avatar_hash = hashlib.md5(email.lower().encode('utf-8')).hexdigest()
Visit https://en.gravatar.com/site/implement/images/ for more information.
:param hash: The email hash used to generate avatar URL.
:param size: The size of the avatar, default to 100 pixel.
:param rating: The rating of the avatar, default to ``g``
:param default: The type of default avatar, default to ``identicon``.
:param include_extension: Append a '.jpg' extension at the end of URL, default to ``False``.
:param force_default: Force to use default avatar, default to ``False``.
"""
if include_extension:
hash += '.jpg' # depends on [control=['if'], data=[]]
default = default or current_app.config['AVATARS_GRAVATAR_DEFAULT']
query_string = urlencode({'s': int(size), 'r': rating, 'd': default})
if force_default:
query_string += '&q=y' # depends on [control=['if'], data=[]]
return 'https://gravatar.com/avatar/' + hash + '?' + query_string |
def AddRow(self, *args):
''' Parms are a variable number of Elements '''
NumRows = len(self.Rows) # number of existing rows is our row number
CurrentRowNumber = NumRows # this row's number
CurrentRow = [] # start with a blank row and build up
# ------------------------- Add the elements to a row ------------------------- #
for i, element in enumerate(args): # Loop through list of elements and add them to the row
element.Position = (CurrentRowNumber, i)
element.ParentContainer = self
CurrentRow.append(element)
# ------------------------- Append the row to list of Rows ------------------------- #
self.Rows.append(CurrentRow) | def function[AddRow, parameter[self]]:
constant[ Parms are a variable number of Elements ]
variable[NumRows] assign[=] call[name[len], parameter[name[self].Rows]]
variable[CurrentRowNumber] assign[=] name[NumRows]
variable[CurrentRow] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da20c991420>, <ast.Name object at 0x7da20c991c60>]]] in starred[call[name[enumerate], parameter[name[args]]]] begin[:]
name[element].Position assign[=] tuple[[<ast.Name object at 0x7da20c990310>, <ast.Name object at 0x7da20c991270>]]
name[element].ParentContainer assign[=] name[self]
call[name[CurrentRow].append, parameter[name[element]]]
call[name[self].Rows.append, parameter[name[CurrentRow]]] | keyword[def] identifier[AddRow] ( identifier[self] ,* identifier[args] ):
literal[string]
identifier[NumRows] = identifier[len] ( identifier[self] . identifier[Rows] )
identifier[CurrentRowNumber] = identifier[NumRows]
identifier[CurrentRow] =[]
keyword[for] identifier[i] , identifier[element] keyword[in] identifier[enumerate] ( identifier[args] ):
identifier[element] . identifier[Position] =( identifier[CurrentRowNumber] , identifier[i] )
identifier[element] . identifier[ParentContainer] = identifier[self]
identifier[CurrentRow] . identifier[append] ( identifier[element] )
identifier[self] . identifier[Rows] . identifier[append] ( identifier[CurrentRow] ) | def AddRow(self, *args):
""" Parms are a variable number of Elements """
NumRows = len(self.Rows) # number of existing rows is our row number
CurrentRowNumber = NumRows # this row's number
CurrentRow = [] # start with a blank row and build up
# ------------------------- Add the elements to a row ------------------------- #
for (i, element) in enumerate(args): # Loop through list of elements and add them to the row
element.Position = (CurrentRowNumber, i)
element.ParentContainer = self
CurrentRow.append(element) # depends on [control=['for'], data=[]]
# ------------------------- Append the row to list of Rows ------------------------- #
self.Rows.append(CurrentRow) |
def set_servo_angle(self, goalangle, goaltime, led):
""" Sets the servo angle (in degrees)
Enable torque using torque_on function before calling this
Args:
goalangle (int): The desired angle in degrees, range -150 to 150
goaltime (int): the time taken to move from present
position to goalposition
led (int): the LED color
0x00 LED off
0x04 GREEN
0x08 BLUE
0x10 RED
"""
if (self.servomodel==0x06) or (self.servomodel == 0x04):
goalposition = scale(goalangle, -159.9, 159.6, 10627, 22129)
else:
goalposition = scale(goalangle, -150, 150, 21, 1002)
self.set_servo_position(goalposition, goaltime, led) | def function[set_servo_angle, parameter[self, goalangle, goaltime, led]]:
constant[ Sets the servo angle (in degrees)
Enable torque using torque_on function before calling this
Args:
goalangle (int): The desired angle in degrees, range -150 to 150
goaltime (int): the time taken to move from present
position to goalposition
led (int): the LED color
0x00 LED off
0x04 GREEN
0x08 BLUE
0x10 RED
]
if <ast.BoolOp object at 0x7da1b2528c70> begin[:]
variable[goalposition] assign[=] call[name[scale], parameter[name[goalangle], <ast.UnaryOp object at 0x7da1b2529360>, constant[159.6], constant[10627], constant[22129]]]
call[name[self].set_servo_position, parameter[name[goalposition], name[goaltime], name[led]]] | keyword[def] identifier[set_servo_angle] ( identifier[self] , identifier[goalangle] , identifier[goaltime] , identifier[led] ):
literal[string]
keyword[if] ( identifier[self] . identifier[servomodel] == literal[int] ) keyword[or] ( identifier[self] . identifier[servomodel] == literal[int] ):
identifier[goalposition] = identifier[scale] ( identifier[goalangle] ,- literal[int] , literal[int] , literal[int] , literal[int] )
keyword[else] :
identifier[goalposition] = identifier[scale] ( identifier[goalangle] ,- literal[int] , literal[int] , literal[int] , literal[int] )
identifier[self] . identifier[set_servo_position] ( identifier[goalposition] , identifier[goaltime] , identifier[led] ) | def set_servo_angle(self, goalangle, goaltime, led):
""" Sets the servo angle (in degrees)
Enable torque using torque_on function before calling this
Args:
goalangle (int): The desired angle in degrees, range -150 to 150
goaltime (int): the time taken to move from present
position to goalposition
led (int): the LED color
0x00 LED off
0x04 GREEN
0x08 BLUE
0x10 RED
"""
if self.servomodel == 6 or self.servomodel == 4:
goalposition = scale(goalangle, -159.9, 159.6, 10627, 22129) # depends on [control=['if'], data=[]]
else:
goalposition = scale(goalangle, -150, 150, 21, 1002)
self.set_servo_position(goalposition, goaltime, led) |
def load_template_srcmodule(self, stack, path):
"""
This function actually fills the stack with definitions coming from a template file
"""
srcmodule = _run_resources_file(path, stack)
# Process the loaded module and find the stack elements
elements = self.find_stack_elements(srcmodule)
elements = sorted(elements, key=lambda x: x[:-1])
# Assign a name to each element and add to our dictionaries
for (module_name, el_name, element) in elements:
full_name = self.generate_cfn_name(module_name, el_name)
self.name_stack_element(element, full_name)
self.add_stack_element(element) | def function[load_template_srcmodule, parameter[self, stack, path]]:
constant[
This function actually fills the stack with definitions coming from a template file
]
variable[srcmodule] assign[=] call[name[_run_resources_file], parameter[name[path], name[stack]]]
variable[elements] assign[=] call[name[self].find_stack_elements, parameter[name[srcmodule]]]
variable[elements] assign[=] call[name[sorted], parameter[name[elements]]]
for taget[tuple[[<ast.Name object at 0x7da1b2676500>, <ast.Name object at 0x7da1b2676cb0>, <ast.Name object at 0x7da1b2676d40>]]] in starred[name[elements]] begin[:]
variable[full_name] assign[=] call[name[self].generate_cfn_name, parameter[name[module_name], name[el_name]]]
call[name[self].name_stack_element, parameter[name[element], name[full_name]]]
call[name[self].add_stack_element, parameter[name[element]]] | keyword[def] identifier[load_template_srcmodule] ( identifier[self] , identifier[stack] , identifier[path] ):
literal[string]
identifier[srcmodule] = identifier[_run_resources_file] ( identifier[path] , identifier[stack] )
identifier[elements] = identifier[self] . identifier[find_stack_elements] ( identifier[srcmodule] )
identifier[elements] = identifier[sorted] ( identifier[elements] , identifier[key] = keyword[lambda] identifier[x] : identifier[x] [:- literal[int] ])
keyword[for] ( identifier[module_name] , identifier[el_name] , identifier[element] ) keyword[in] identifier[elements] :
identifier[full_name] = identifier[self] . identifier[generate_cfn_name] ( identifier[module_name] , identifier[el_name] )
identifier[self] . identifier[name_stack_element] ( identifier[element] , identifier[full_name] )
identifier[self] . identifier[add_stack_element] ( identifier[element] ) | def load_template_srcmodule(self, stack, path):
"""
This function actually fills the stack with definitions coming from a template file
"""
srcmodule = _run_resources_file(path, stack)
# Process the loaded module and find the stack elements
elements = self.find_stack_elements(srcmodule)
elements = sorted(elements, key=lambda x: x[:-1])
# Assign a name to each element and add to our dictionaries
for (module_name, el_name, element) in elements:
full_name = self.generate_cfn_name(module_name, el_name)
self.name_stack_element(element, full_name)
self.add_stack_element(element) # depends on [control=['for'], data=[]] |
def get_content_object_url(self):
"""Gets the absolute url for the content object."""
if (self.content_object and
hasattr(self.content_object, 'get_absolute_url')):
return self.content_object.get_absolute_url()
return None | def function[get_content_object_url, parameter[self]]:
constant[Gets the absolute url for the content object.]
if <ast.BoolOp object at 0x7da1b18024d0> begin[:]
return[call[name[self].content_object.get_absolute_url, parameter[]]]
return[constant[None]] | keyword[def] identifier[get_content_object_url] ( identifier[self] ):
literal[string]
keyword[if] ( identifier[self] . identifier[content_object] keyword[and]
identifier[hasattr] ( identifier[self] . identifier[content_object] , literal[string] )):
keyword[return] identifier[self] . identifier[content_object] . identifier[get_absolute_url] ()
keyword[return] keyword[None] | def get_content_object_url(self):
"""Gets the absolute url for the content object."""
if self.content_object and hasattr(self.content_object, 'get_absolute_url'):
return self.content_object.get_absolute_url() # depends on [control=['if'], data=[]]
return None |
def console_list_load_xp(
filename: str
) -> Optional[List[tcod.console.Console]]:
"""Return a list of consoles from a REXPaint `.xp` file."""
tcod_list = lib.TCOD_console_list_from_xp(filename.encode("utf-8"))
if tcod_list == ffi.NULL:
return None
try:
python_list = []
lib.TCOD_list_reverse(tcod_list)
while not lib.TCOD_list_is_empty(tcod_list):
python_list.append(
tcod.console.Console._from_cdata(lib.TCOD_list_pop(tcod_list))
)
return python_list
finally:
lib.TCOD_list_delete(tcod_list) | def function[console_list_load_xp, parameter[filename]]:
constant[Return a list of consoles from a REXPaint `.xp` file.]
variable[tcod_list] assign[=] call[name[lib].TCOD_console_list_from_xp, parameter[call[name[filename].encode, parameter[constant[utf-8]]]]]
if compare[name[tcod_list] equal[==] name[ffi].NULL] begin[:]
return[constant[None]]
<ast.Try object at 0x7da1b23451b0> | keyword[def] identifier[console_list_load_xp] (
identifier[filename] : identifier[str]
)-> identifier[Optional] [ identifier[List] [ identifier[tcod] . identifier[console] . identifier[Console] ]]:
literal[string]
identifier[tcod_list] = identifier[lib] . identifier[TCOD_console_list_from_xp] ( identifier[filename] . identifier[encode] ( literal[string] ))
keyword[if] identifier[tcod_list] == identifier[ffi] . identifier[NULL] :
keyword[return] keyword[None]
keyword[try] :
identifier[python_list] =[]
identifier[lib] . identifier[TCOD_list_reverse] ( identifier[tcod_list] )
keyword[while] keyword[not] identifier[lib] . identifier[TCOD_list_is_empty] ( identifier[tcod_list] ):
identifier[python_list] . identifier[append] (
identifier[tcod] . identifier[console] . identifier[Console] . identifier[_from_cdata] ( identifier[lib] . identifier[TCOD_list_pop] ( identifier[tcod_list] ))
)
keyword[return] identifier[python_list]
keyword[finally] :
identifier[lib] . identifier[TCOD_list_delete] ( identifier[tcod_list] ) | def console_list_load_xp(filename: str) -> Optional[List[tcod.console.Console]]:
"""Return a list of consoles from a REXPaint `.xp` file."""
tcod_list = lib.TCOD_console_list_from_xp(filename.encode('utf-8'))
if tcod_list == ffi.NULL:
return None # depends on [control=['if'], data=[]]
try:
python_list = []
lib.TCOD_list_reverse(tcod_list)
while not lib.TCOD_list_is_empty(tcod_list):
python_list.append(tcod.console.Console._from_cdata(lib.TCOD_list_pop(tcod_list))) # depends on [control=['while'], data=[]]
return python_list # depends on [control=['try'], data=[]]
finally:
lib.TCOD_list_delete(tcod_list) |
def middleware(func):
""" Executes routes.py route middleware """
@wraps(func)
def parse(*args, **kwargs):
""" get middleware from route, execute middleware in order """
middleware = copy.deepcopy(kwargs['middleware'])
kwargs.pop('middleware')
if request.method == "OPTIONS":
# return 200 json response for CORS
return JsonResponse(200)
if middleware is None:
return func(*args, **kwargs)
for mware in middleware:
ware = mware()
if ware.status is False:
return ware.response
return func(*args, **kwargs)
return parse | def function[middleware, parameter[func]]:
constant[ Executes routes.py route middleware ]
def function[parse, parameter[]]:
constant[ get middleware from route, execute middleware in order ]
variable[middleware] assign[=] call[name[copy].deepcopy, parameter[call[name[kwargs]][constant[middleware]]]]
call[name[kwargs].pop, parameter[constant[middleware]]]
if compare[name[request].method equal[==] constant[OPTIONS]] begin[:]
return[call[name[JsonResponse], parameter[constant[200]]]]
if compare[name[middleware] is constant[None]] begin[:]
return[call[name[func], parameter[<ast.Starred object at 0x7da207f014b0>]]]
for taget[name[mware]] in starred[name[middleware]] begin[:]
variable[ware] assign[=] call[name[mware], parameter[]]
if compare[name[ware].status is constant[False]] begin[:]
return[name[ware].response]
return[call[name[func], parameter[<ast.Starred object at 0x7da18c4cd120>]]]
return[name[parse]] | keyword[def] identifier[middleware] ( identifier[func] ):
literal[string]
@ identifier[wraps] ( identifier[func] )
keyword[def] identifier[parse] (* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[middleware] = identifier[copy] . identifier[deepcopy] ( identifier[kwargs] [ literal[string] ])
identifier[kwargs] . identifier[pop] ( literal[string] )
keyword[if] identifier[request] . identifier[method] == literal[string] :
keyword[return] identifier[JsonResponse] ( literal[int] )
keyword[if] identifier[middleware] keyword[is] keyword[None] :
keyword[return] identifier[func] (* identifier[args] ,** identifier[kwargs] )
keyword[for] identifier[mware] keyword[in] identifier[middleware] :
identifier[ware] = identifier[mware] ()
keyword[if] identifier[ware] . identifier[status] keyword[is] keyword[False] :
keyword[return] identifier[ware] . identifier[response]
keyword[return] identifier[func] (* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[parse] | def middleware(func):
""" Executes routes.py route middleware """
@wraps(func)
def parse(*args, **kwargs):
""" get middleware from route, execute middleware in order """
middleware = copy.deepcopy(kwargs['middleware'])
kwargs.pop('middleware')
if request.method == 'OPTIONS': # return 200 json response for CORS
return JsonResponse(200) # depends on [control=['if'], data=[]]
if middleware is None:
return func(*args, **kwargs) # depends on [control=['if'], data=[]]
for mware in middleware:
ware = mware()
if ware.status is False:
return ware.response # depends on [control=['if'], data=[]]
return func(*args, **kwargs) # depends on [control=['for'], data=['mware']]
return parse |
def copy_template(self, name=None):
"""Create empty copy of the current table, with copies of all
index definitions.
"""
ret = Table(self.table_name)
ret._indexes.update(dict((k, v.copy_template()) for k, v in self._indexes.items()))
ret(name)
return ret | def function[copy_template, parameter[self, name]]:
constant[Create empty copy of the current table, with copies of all
index definitions.
]
variable[ret] assign[=] call[name[Table], parameter[name[self].table_name]]
call[name[ret]._indexes.update, parameter[call[name[dict], parameter[<ast.GeneratorExp object at 0x7da1b24ad7e0>]]]]
call[name[ret], parameter[name[name]]]
return[name[ret]] | keyword[def] identifier[copy_template] ( identifier[self] , identifier[name] = keyword[None] ):
literal[string]
identifier[ret] = identifier[Table] ( identifier[self] . identifier[table_name] )
identifier[ret] . identifier[_indexes] . identifier[update] ( identifier[dict] (( identifier[k] , identifier[v] . identifier[copy_template] ()) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[_indexes] . identifier[items] ()))
identifier[ret] ( identifier[name] )
keyword[return] identifier[ret] | def copy_template(self, name=None):
"""Create empty copy of the current table, with copies of all
index definitions.
"""
ret = Table(self.table_name)
ret._indexes.update(dict(((k, v.copy_template()) for (k, v) in self._indexes.items())))
ret(name)
return ret |
def activate(self, tourfile=None, minsize=10000, backuptour=True):
"""
Select contigs in the current partition. This is the setup phase of the
algorithm, and supports two modes:
- "de novo": This is useful at the start of a new run where no tours
available. We select the strong contigs that have significant number
of links to other contigs in the partition. We build a histogram of
link density (# links per bp) and remove the contigs that appear as
outliers. The orientations are derived from the matrix decomposition
of the pairwise strandedness matrix O.
- "hotstart": This is useful when there was a past run, with a given
tourfile. In this case, the active contig list and orientations are
derived from the last tour in the file.
"""
if tourfile and (not op.exists(tourfile)):
logging.debug("Tourfile `{}` not found".format(tourfile))
tourfile = None
if tourfile:
logging.debug("Importing tourfile `{}`".format(tourfile))
tour, tour_o = iter_last_tour(tourfile, self)
self.active = set(tour)
tig_to_idx = self.tig_to_idx
tour = [tig_to_idx[x] for x in tour]
signs = sorted([(x, FF[o]) for (x, o) in zip(tour, tour_o)])
_, signs = zip(*signs)
self.signs = np.array(signs, dtype=int)
if backuptour:
backup(tourfile)
tour = array.array('i', tour)
else:
self.report_active()
while True:
logdensities = self.calculate_densities()
lb, ub = outlier_cutoff(logdensities.values())
logging.debug("Log10(link_densities) ~ [{}, {}]"
.format(lb, ub))
remove = set(x for x, d in logdensities.items() if
(d < lb and self.tig_to_size[x] < minsize * 10))
if remove:
self.active -= remove
self.report_active()
else:
break
logging.debug("Remove contigs with size < {}".format(minsize))
self.active = set(x for x in self.active if
self.tig_to_size[x] >= minsize)
tour = range(self.N) # Use starting (random) order otherwise
tour = array.array('i', tour)
# Determine orientations
self.flip_all(tour)
self.report_active()
self.tour = tour
return tour | def function[activate, parameter[self, tourfile, minsize, backuptour]]:
constant[
Select contigs in the current partition. This is the setup phase of the
algorithm, and supports two modes:
- "de novo": This is useful at the start of a new run where no tours
available. We select the strong contigs that have significant number
of links to other contigs in the partition. We build a histogram of
link density (# links per bp) and remove the contigs that appear as
outliers. The orientations are derived from the matrix decomposition
of the pairwise strandedness matrix O.
- "hotstart": This is useful when there was a past run, with a given
tourfile. In this case, the active contig list and orientations are
derived from the last tour in the file.
]
if <ast.BoolOp object at 0x7da18fe93bb0> begin[:]
call[name[logging].debug, parameter[call[constant[Tourfile `{}` not found].format, parameter[name[tourfile]]]]]
variable[tourfile] assign[=] constant[None]
if name[tourfile] begin[:]
call[name[logging].debug, parameter[call[constant[Importing tourfile `{}`].format, parameter[name[tourfile]]]]]
<ast.Tuple object at 0x7da18fe92740> assign[=] call[name[iter_last_tour], parameter[name[tourfile], name[self]]]
name[self].active assign[=] call[name[set], parameter[name[tour]]]
variable[tig_to_idx] assign[=] name[self].tig_to_idx
variable[tour] assign[=] <ast.ListComp object at 0x7da18fe92200>
variable[signs] assign[=] call[name[sorted], parameter[<ast.ListComp object at 0x7da18fe91ea0>]]
<ast.Tuple object at 0x7da18fe92860> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da2041d9360>]]
name[self].signs assign[=] call[name[np].array, parameter[name[signs]]]
if name[backuptour] begin[:]
call[name[backup], parameter[name[tourfile]]]
variable[tour] assign[=] call[name[array].array, parameter[constant[i], name[tour]]]
call[name[self].report_active, parameter[]]
name[self].tour assign[=] name[tour]
return[name[tour]] | keyword[def] identifier[activate] ( identifier[self] , identifier[tourfile] = keyword[None] , identifier[minsize] = literal[int] , identifier[backuptour] = keyword[True] ):
literal[string]
keyword[if] identifier[tourfile] keyword[and] ( keyword[not] identifier[op] . identifier[exists] ( identifier[tourfile] )):
identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[tourfile] ))
identifier[tourfile] = keyword[None]
keyword[if] identifier[tourfile] :
identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[tourfile] ))
identifier[tour] , identifier[tour_o] = identifier[iter_last_tour] ( identifier[tourfile] , identifier[self] )
identifier[self] . identifier[active] = identifier[set] ( identifier[tour] )
identifier[tig_to_idx] = identifier[self] . identifier[tig_to_idx]
identifier[tour] =[ identifier[tig_to_idx] [ identifier[x] ] keyword[for] identifier[x] keyword[in] identifier[tour] ]
identifier[signs] = identifier[sorted] ([( identifier[x] , identifier[FF] [ identifier[o] ]) keyword[for] ( identifier[x] , identifier[o] ) keyword[in] identifier[zip] ( identifier[tour] , identifier[tour_o] )])
identifier[_] , identifier[signs] = identifier[zip] (* identifier[signs] )
identifier[self] . identifier[signs] = identifier[np] . identifier[array] ( identifier[signs] , identifier[dtype] = identifier[int] )
keyword[if] identifier[backuptour] :
identifier[backup] ( identifier[tourfile] )
identifier[tour] = identifier[array] . identifier[array] ( literal[string] , identifier[tour] )
keyword[else] :
identifier[self] . identifier[report_active] ()
keyword[while] keyword[True] :
identifier[logdensities] = identifier[self] . identifier[calculate_densities] ()
identifier[lb] , identifier[ub] = identifier[outlier_cutoff] ( identifier[logdensities] . identifier[values] ())
identifier[logging] . identifier[debug] ( literal[string]
. identifier[format] ( identifier[lb] , identifier[ub] ))
identifier[remove] = identifier[set] ( identifier[x] keyword[for] identifier[x] , identifier[d] keyword[in] identifier[logdensities] . identifier[items] () keyword[if]
( identifier[d] < identifier[lb] keyword[and] identifier[self] . identifier[tig_to_size] [ identifier[x] ]< identifier[minsize] * literal[int] ))
keyword[if] identifier[remove] :
identifier[self] . identifier[active] -= identifier[remove]
identifier[self] . identifier[report_active] ()
keyword[else] :
keyword[break]
identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[minsize] ))
identifier[self] . identifier[active] = identifier[set] ( identifier[x] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[active] keyword[if]
identifier[self] . identifier[tig_to_size] [ identifier[x] ]>= identifier[minsize] )
identifier[tour] = identifier[range] ( identifier[self] . identifier[N] )
identifier[tour] = identifier[array] . identifier[array] ( literal[string] , identifier[tour] )
identifier[self] . identifier[flip_all] ( identifier[tour] )
identifier[self] . identifier[report_active] ()
identifier[self] . identifier[tour] = identifier[tour]
keyword[return] identifier[tour] | def activate(self, tourfile=None, minsize=10000, backuptour=True):
"""
Select contigs in the current partition. This is the setup phase of the
algorithm, and supports two modes:
- "de novo": This is useful at the start of a new run where no tours
available. We select the strong contigs that have significant number
of links to other contigs in the partition. We build a histogram of
link density (# links per bp) and remove the contigs that appear as
outliers. The orientations are derived from the matrix decomposition
of the pairwise strandedness matrix O.
- "hotstart": This is useful when there was a past run, with a given
tourfile. In this case, the active contig list and orientations are
derived from the last tour in the file.
"""
if tourfile and (not op.exists(tourfile)):
logging.debug('Tourfile `{}` not found'.format(tourfile))
tourfile = None # depends on [control=['if'], data=[]]
if tourfile:
logging.debug('Importing tourfile `{}`'.format(tourfile))
(tour, tour_o) = iter_last_tour(tourfile, self)
self.active = set(tour)
tig_to_idx = self.tig_to_idx
tour = [tig_to_idx[x] for x in tour]
signs = sorted([(x, FF[o]) for (x, o) in zip(tour, tour_o)])
(_, signs) = zip(*signs)
self.signs = np.array(signs, dtype=int)
if backuptour:
backup(tourfile) # depends on [control=['if'], data=[]]
tour = array.array('i', tour) # depends on [control=['if'], data=[]]
else:
self.report_active()
while True:
logdensities = self.calculate_densities()
(lb, ub) = outlier_cutoff(logdensities.values())
logging.debug('Log10(link_densities) ~ [{}, {}]'.format(lb, ub))
remove = set((x for (x, d) in logdensities.items() if d < lb and self.tig_to_size[x] < minsize * 10))
if remove:
self.active -= remove
self.report_active() # depends on [control=['if'], data=[]]
else:
break # depends on [control=['while'], data=[]]
logging.debug('Remove contigs with size < {}'.format(minsize))
self.active = set((x for x in self.active if self.tig_to_size[x] >= minsize))
tour = range(self.N) # Use starting (random) order otherwise
tour = array.array('i', tour)
# Determine orientations
self.flip_all(tour)
self.report_active()
self.tour = tour
return tour |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.