_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3
values | text stringlengths 75 19.8k | language stringclasses 1
value | meta_information dict |
|---|---|---|---|---|---|
q274000 | diff_roessler | test | def diff_roessler(value_array, a, c):
"""The Roessler attractor differential equation
:param value_array: 3d array containing the x,y, and z component values.
:param a: Constant attractor parameter
:param c: Constant attractor parameter
:return: 3d array of the Roessler system evaluated at `value_array`
"""
b=a
diff_array = np.zeros(3)
diff_array[0] = -value_array[1] - value_array[2]
diff_array[1] = value_array[0] + a * value_array[1]
diff_array[2] = b + value_array[2] * (value_array[0] - c)
return diff_array | python | {
"resource": ""
} |
q274001 | compact_hdf5_file | test | def compact_hdf5_file(filename, name=None, index=None, keep_backup=True):
"""Can compress an HDF5 to reduce file size.
The properties on how to compress the new file are taken from a given
trajectory in the file.
Simply calls ``ptrepack`` from the command line.
(Se also https://pytables.github.io/usersguide/utilities.html#ptrepackdescr)
Currently only supported under Linux, no guarantee for Windows usage.
:param filename:
Name of the file to compact
:param name:
The name of the trajectory from which the compression properties are taken
:param index:
Instead of a name you could also specify an index, i.e -1 for the last trajectory
in the file.
:param keep_backup:
If a back up version of the original file should be kept.
The backup file is named as the original but `_backup` is appended to the end.
:return:
The return/error code of ptrepack
"""
if name is None and index is None:
index = -1
tmp_traj = load_trajectory(name, index, as_new=False, load_all=pypetconstants.LOAD_NOTHING,
force=True, filename=filename)
service = tmp_traj.v_storage_service
complevel = service.complevel
complib = service.complib
shuffle = service.shuffle
fletcher32 = service.fletcher32
name_wo_ext, ext = os.path.splitext(filename)
tmp_filename = name_wo_ext + '_tmp' + ext
abs_filename = os.path.abspath(filename)
abs_tmp_filename = os.path.abspath(tmp_filename)
command = ['ptrepack', '-v',
'--complib', complib,
'--complevel', str(complevel),
'--shuffle', str(int(shuffle)),
'--fletcher32', str(int(fletcher32)),
abs_filename, abs_tmp_filename]
str_command = ' '.join(command)
print('Executing command `%s`' % str_command)
retcode = subprocess.call(command)
if retcode != 0:
print('#### ERROR: Compacting `%s` failed with errorcode %s! ####' %
(filename, str(retcode)))
else:
print('#### Compacting successful ####')
print('Renaming files')
if keep_backup:
backup_file_name = name_wo_ext + '_backup' + ext
os.rename(filename, backup_file_name)
else:
os.remove(filename)
os.rename(tmp_filename, filename)
print('### Compacting and Renaming finished ####')
return retcode | python | {
"resource": ""
} |
q274002 | _explored_parameters_in_group | test | def _explored_parameters_in_group(traj, group_node):
"""Checks if one the parameters in `group_node` is explored.
:param traj: Trajectory container
:param group_node: Group node
:return: `True` or `False`
"""
explored = False
for param in traj.f_get_explored_parameters():
if param in group_node:
explored = True
break
return explored | python | {
"resource": ""
} |
q274003 | CNNeuronGroup._build_model_eqs | test | def _build_model_eqs(traj):
"""Computes model equations for the excitatory and inhibitory population.
Equation objects are created by fusing `model.eqs` and `model.synaptic.eqs`
and replacing `PRE` by `i` (for inhibitory) or `e` (for excitatory) depending
on the type of population.
:return: Dictionary with 'i' equation object for inhibitory neurons and 'e' for excitatory
"""
model_eqs = traj.model.eqs
post_eqs={}
for name_post in ['i','e']:
variables_dict ={}
new_model_eqs=model_eqs.replace('POST', name_post)
for name_pre in ['i', 'e']:
conn_eqs = traj.model.synaptic.eqs
new_conn_eqs = conn_eqs.replace('PRE', name_pre)
new_model_eqs += new_conn_eqs
tau1 = traj.model.synaptic['tau1']
tau2 = traj.model.synaptic['tau2_'+name_pre]
normalization = (tau1-tau2) / tau2
invtau1=1.0/tau1
invtau2 = 1.0/tau2
variables_dict['invtau1_'+name_pre] = invtau1
variables_dict['invtau2_'+name_pre] = invtau2
variables_dict['normalization_'+name_pre] = normalization
variables_dict['tau1_'+name_pre] = tau1
variables_dict['tau2_'+name_pre] = tau2
variables_dict['tau_'+name_post] = traj.model['tau_'+name_post]
post_eqs[name_post] = Equations(new_model_eqs, **variables_dict)
return post_eqs | python | {
"resource": ""
} |
q274004 | CNNeuronGroup.pre_build | test | def pre_build(self, traj, brian_list, network_dict):
"""Pre-builds the neuron groups.
Pre-build is only performed if none of the
relevant parameters is explored.
:param traj: Trajectory container
:param brian_list:
List of objects passed to BRIAN network constructor.
Adds:
Inhibitory neuron group
Excitatory neuron group
:param network_dict:
Dictionary of elements shared among the components
Adds:
'neurons_i': Inhibitory neuron group
'neurons_e': Excitatory neuron group
"""
self._pre_build = not _explored_parameters_in_group(traj, traj.parameters.model)
if self._pre_build:
self._build_model(traj, brian_list, network_dict) | python | {
"resource": ""
} |
q274005 | CNNeuronGroup.build | test | def build(self, traj, brian_list, network_dict):
"""Builds the neuron groups.
Build is only performed if neuron group was not
pre-build before.
:param traj: Trajectory container
:param brian_list:
List of objects passed to BRIAN network constructor.
Adds:
Inhibitory neuron group
Excitatory neuron group
:param network_dict:
Dictionary of elements shared among the components
Adds:
'neurons_i': Inhibitory neuron group
'neurons_e': Excitatory neuron group
"""
if not hasattr(self, '_pre_build') or not self._pre_build:
self._build_model(traj, brian_list, network_dict) | python | {
"resource": ""
} |
q274006 | CNNeuronGroup._build_model | test | def _build_model(self, traj, brian_list, network_dict):
"""Builds the neuron groups from `traj`.
Adds the neuron groups to `brian_list` and `network_dict`.
"""
model = traj.parameters.model
# Create the equations for both models
eqs_dict = self._build_model_eqs(traj)
# Create inhibitory neurons
eqs_i = eqs_dict['i']
neurons_i = NeuronGroup(N=model.N_i,
model = eqs_i,
threshold=model.V_th,
reset=model.reset_func,
refractory=model.refractory,
method='Euler')
# Create excitatory neurons
eqs_e = eqs_dict['e']
neurons_e = NeuronGroup(N=model.N_e,
model = eqs_e,
threshold=model.V_th,
reset=model.reset_func,
refractory=model.refractory,
method='Euler')
# Set the bias terms
neurons_e.mu =rand(model.N_e) * (model.mu_e_max - model.mu_e_min) + model.mu_e_min
neurons_i.mu =rand(model.N_i) * (model.mu_i_max - model.mu_i_min) + model.mu_i_min
# Set initial membrane potentials
neurons_e.V = rand(model.N_e)
neurons_i.V = rand(model.N_i)
# Add both groups to the `brian_list` and the `network_dict`
brian_list.append(neurons_i)
brian_list.append(neurons_e)
network_dict['neurons_e']=neurons_e
network_dict['neurons_i']=neurons_i | python | {
"resource": ""
} |
q274007 | CNConnections.pre_build | test | def pre_build(self, traj, brian_list, network_dict):
"""Pre-builds the connections.
Pre-build is only performed if none of the
relevant parameters is explored and the relevant neuron groups
exist.
:param traj: Trajectory container
:param brian_list:
List of objects passed to BRIAN network constructor.
Adds:
Connections, amount depends on clustering
:param network_dict:
Dictionary of elements shared among the components
Expects:
'neurons_i': Inhibitory neuron group
'neurons_e': Excitatory neuron group
Adds:
Connections, amount depends on clustering
"""
self._pre_build = not _explored_parameters_in_group(traj, traj.parameters.connections)
self._pre_build = (self._pre_build and 'neurons_i' in network_dict and
'neurons_e' in network_dict)
if self._pre_build:
self._build_connections(traj, brian_list, network_dict) | python | {
"resource": ""
} |
q274008 | CNConnections.build | test | def build(self, traj, brian_list, network_dict):
"""Builds the connections.
Build is only performed if connections have not
been pre-build.
:param traj: Trajectory container
:param brian_list:
List of objects passed to BRIAN network constructor.
Adds:
Connections, amount depends on clustering
:param network_dict:
Dictionary of elements shared among the components
Expects:
'neurons_i': Inhibitory neuron group
'neurons_e': Excitatory neuron group
Adds:
Connections, amount depends on clustering
"""
if not hasattr(self, '_pre_build') or not self._pre_build:
self._build_connections(traj, brian_list, network_dict) | python | {
"resource": ""
} |
q274009 | CNNetworkRunner.add_parameters | test | def add_parameters(self, traj):
"""Adds all necessary parameters to `traj` container."""
par= traj.f_add_parameter(Brian2Parameter,'simulation.durations.initial_run', 500*ms,
comment='Initialisation run for more realistic '
'measurement conditions.')
par.v_annotations.order=0
par=traj.f_add_parameter(Brian2Parameter,'simulation.durations.measurement_run', 1500*ms,
comment='Measurement run that is considered for '
'statistical evaluation')
par.v_annotations.order=1 | python | {
"resource": ""
} |
q274010 | CNFanoFactorComputer._compute_fano_factor | test | def _compute_fano_factor(spike_res, neuron_id, time_window, start_time, end_time):
"""Computes Fano Factor for one neuron.
:param spike_res:
Result containing the spiketimes of all neurons
:param neuron_id:
Index of neuron for which FF is computed
:param time_window:
Length of the consecutive time windows to compute the FF
:param start_time:
Start time of measurement to consider
:param end_time:
End time of measurement to consider
:return:
Fano Factor (float) or
returns 0 if mean firing activity is 0.
"""
assert(end_time >= start_time+time_window)
# Number of time bins
bins = (end_time-start_time)/time_window
bins = int(np.floor(bins))
# Arrays for binning of spike counts
binned_spikes = np.zeros(bins)
# DataFrame only containing spikes of the particular neuron
spike_array_neuron = spike_res.t[spike_res.i==neuron_id]
for bin in range(bins):
# We iterate over the bins to calculate the spike counts
lower_time = start_time+time_window*bin
upper_time = start_time+time_window*(bin+1)
# Filter the spikes
spike_array_interval = spike_array_neuron[spike_array_neuron >= lower_time]
spike_array_interval = spike_array_interval[spike_array_interval < upper_time]
# Add count to bins
spikes = len(spike_array_interval)
binned_spikes[bin]=spikes
var = np.var(binned_spikes)
avg = np.mean(binned_spikes)
if avg > 0:
return var/float(avg)
else:
return 0 | python | {
"resource": ""
} |
q274011 | CNFanoFactorComputer._compute_mean_fano_factor | test | def _compute_mean_fano_factor( neuron_ids, spike_res, time_window, start_time, end_time):
"""Computes average Fano Factor over many neurons.
:param neuron_ids:
List of neuron indices to average over
:param spike_res:
Result containing all the spikes
:param time_window:
Length of the consecutive time windows to compute the FF
:param start_time:
Start time of measurement to consider
:param end_time:
End time of measurement to consider
:return:
Average fano factor
"""
ffs = np.zeros(len(neuron_ids))
for idx, neuron_id in enumerate(neuron_ids):
ff=CNFanoFactorComputer._compute_fano_factor(
spike_res, neuron_id, time_window, start_time, end_time)
ffs[idx]=ff
mean_ff = np.mean(ffs)
return mean_ff | python | {
"resource": ""
} |
q274012 | CNFanoFactorComputer.analyse | test | def analyse(self, traj, network, current_subrun, subrun_list, network_dict):
"""Calculates average Fano Factor of a network.
:param traj:
Trajectory container
Expects:
`results.monitors.spikes_e`: Data from SpikeMonitor for excitatory neurons
Adds:
`results.statistics.mean_fano_factor`: Average Fano Factor
:param network:
The BRIAN network
:param current_subrun:
BrianParameter
:param subrun_list:
Upcoming subruns, analysis is only performed if subruns is empty,
aka the final subrun has finished.
:param network_dict:
Dictionary of items shared among componetns
"""
#Check if we finished all subruns
if len(subrun_list)==0:
spikes_e = traj.results.monitors.spikes_e
time_window = traj.parameters.analysis.statistics.time_window
start_time = traj.parameters.simulation.durations.initial_run
end_time = start_time+traj.parameters.simulation.durations.measurement_run
neuron_ids = traj.parameters.analysis.statistics.neuron_ids
mean_ff = self._compute_mean_fano_factor(
neuron_ids, spikes_e, time_window, start_time, end_time)
traj.f_add_result('statistics.mean_fano_factor', mean_ff, comment='Average Fano '
'Factor over all '
'exc neurons')
print('R_ee: %f, Mean FF: %f' % (traj.R_ee, mean_ff)) | python | {
"resource": ""
} |
q274013 | CNMonitorAnalysis.add_to_network | test | def add_to_network(self, traj, network, current_subrun, subrun_list, network_dict):
"""Adds monitors to the network if the measurement run is carried out.
:param traj: Trajectory container
:param network: The BRIAN network
:param current_subrun: BrianParameter
:param subrun_list: List of coming subrun_list
:param network_dict:
Dictionary of items shared among the components
Expects:
'neurons_e': Excitatory neuron group
Adds:
'monitors': List of monitors
0. SpikeMonitor of excitatory neurons
1. StateMonitor of membrane potential of some excitatory neurons
(specified in `neuron_records`)
2. StateMonitor of excitatory synaptic currents of some excitatory neurons
3. State monitor of inhibitory currents of some excitatory neurons
"""
if current_subrun.v_annotations.order == 1:
self._add_monitors(traj, network, network_dict) | python | {
"resource": ""
} |
q274014 | CNMonitorAnalysis._add_monitors | test | def _add_monitors(self, traj, network, network_dict):
"""Adds monitors to the network"""
neurons_e = network_dict['neurons_e']
monitor_list = []
# Spiketimes
self.spike_monitor = SpikeMonitor(neurons_e)
monitor_list.append(self.spike_monitor)
# Membrane Potential
self.V_monitor = StateMonitor(neurons_e,'V',
record=list(traj.neuron_records))
monitor_list.append(self.V_monitor)
# Exc. syn .Current
self.I_syn_e_monitor = StateMonitor(neurons_e, 'I_syn_e',
record=list(traj.neuron_records))
monitor_list.append(self.I_syn_e_monitor)
# Inh. syn. Current
self.I_syn_i_monitor = StateMonitor(neurons_e, 'I_syn_i',
record=list(traj.neuron_records))
monitor_list.append(self.I_syn_i_monitor)
# Add monitors to network and dictionary
network.add(*monitor_list)
network_dict['monitors'] = monitor_list | python | {
"resource": ""
} |
q274015 | CNMonitorAnalysis._make_folder | test | def _make_folder(self, traj):
"""Makes a subfolder for plots.
:return: Path name to print folder
"""
print_folder = os.path.join(traj.analysis.plot_folder,
traj.v_name, traj.v_crun)
print_folder = os.path.abspath(print_folder)
if not os.path.isdir(print_folder):
os.makedirs(print_folder)
return print_folder | python | {
"resource": ""
} |
q274016 | CNMonitorAnalysis._plot_result | test | def _plot_result(self, traj, result_name):
"""Plots a state variable graph for several neurons into one figure"""
result = traj.f_get(result_name)
varname = result.record_variables[0]
values = result[varname]
times = result.t
record = result.record
for idx, celia_neuron in enumerate(record):
plt.subplot(len(record), 1, idx+1)
plt.plot(times, values[idx,:])
if idx==0:
plt.title('%s' % varname)
if idx==1:
plt.ylabel('%s' % ( varname))
if idx == len(record)-1:
plt.xlabel('t') | python | {
"resource": ""
} |
q274017 | CNMonitorAnalysis._print_graphs | test | def _print_graphs(self, traj):
"""Makes some plots and stores them into subfolders"""
print_folder = self._make_folder(traj)
# If we use BRIAN's own raster_plot functionality we
# need to sue the SpikeMonitor directly
plt.figure()
plt.scatter(self.spike_monitor.t, self.spike_monitor.i, s=1)
plt.xlabel('t')
plt.ylabel('Exc. Neurons')
plt.title('Spike Raster Plot')
filename=os.path.join(print_folder,'spike.png')
print('Current plot: %s ' % filename)
plt.savefig(filename)
plt.close()
fig=plt.figure()
self._plot_result(traj, 'monitors.V')
filename=os.path.join(print_folder,'V.png')
print('Current plot: %s ' % filename)
fig.savefig(filename)
plt.close()
plt.figure()
self._plot_result(traj, 'monitors.I_syn_e')
filename=os.path.join(print_folder,'I_syn_e.png')
print('Current plot: %s ' % filename)
plt.savefig(filename)
plt.close()
plt.figure()
self._plot_result(traj, 'monitors.I_syn_i')
filename=os.path.join(print_folder,'I_syn_i.png')
print('Current plot: %s ' % filename)
plt.savefig(filename)
plt.close()
if not traj.analysis.show_plots:
plt.close('all')
else:
plt.show() | python | {
"resource": ""
} |
q274018 | CNMonitorAnalysis.analyse | test | def analyse(self, traj, network, current_subrun, subrun_list, network_dict):
"""Extracts monitor data and plots.
Data extraction is done if all subruns have been completed,
i.e. `len(subrun_list)==0`
First, extracts results from the monitors and stores them into `traj`.
Next, uses the extracted data for plots.
:param traj:
Trajectory container
Adds:
Data from monitors
:param network: The BRIAN network
:param current_subrun: BrianParameter
:param subrun_list: List of coming subruns
:param network_dict: Dictionary of items shared among all components
"""
if len(subrun_list)==0:
traj.f_add_result(Brian2MonitorResult, 'monitors.spikes_e', self.spike_monitor,
comment = 'The spiketimes of the excitatory population')
traj.f_add_result(Brian2MonitorResult, 'monitors.V', self.V_monitor,
comment = 'Membrane voltage of four neurons from 2 clusters')
traj.f_add_result(Brian2MonitorResult, 'monitors.I_syn_e', self.I_syn_e_monitor,
comment = 'I_syn_e of four neurons from 2 clusters')
traj.f_add_result(Brian2MonitorResult, 'monitors.I_syn_i', self.I_syn_i_monitor,
comment = 'I_syn_i of four neurons from 2 clusters')
print('Plotting')
if traj.parameters.analysis.make_plots:
self._print_graphs(traj) | python | {
"resource": ""
} |
q274019 | get_batch | test | def get_batch():
"""Function that parses the batch id from the command line arguments"""
optlist, args = getopt.getopt(sys.argv[1:], '', longopts='batch=')
batch = 0
for o, a in optlist:
if o == '--batch':
batch = int(a)
print('Found batch %d' % batch)
return batch | python | {
"resource": ""
} |
q274020 | explore_batch | test | def explore_batch(traj, batch):
"""Chooses exploration according to `batch`"""
explore_dict = {}
explore_dict['sigma'] = np.arange(10.0 * batch, 10.0*(batch+1), 1.0).tolist()
# for batch = 0 explores sigma in [0.0, 1.0, 2.0, ..., 9.0],
# for batch = 1 explores sigma in [10.0, 11.0, 12.0, ..., 19.0]
# and so on
traj.f_explore(explore_dict) | python | {
"resource": ""
} |
q274021 | NNTreeNode.vars | test | def vars(self):
"""Alternative naming, you can use `node.vars.name` instead of `node.v_name`"""
if self._vars is None:
self._vars = NNTreeNodeVars(self)
return self._vars | python | {
"resource": ""
} |
q274022 | NNTreeNode.func | test | def func(self):
"""Alternative naming, you can use `node.func.name` instead of `node.f_func`"""
if self._func is None:
self._func = NNTreeNodeFunc(self)
return self._func | python | {
"resource": ""
} |
q274023 | NNTreeNode._rename | test | def _rename(self, full_name):
"""Renames the tree node"""
self._full_name = full_name
if full_name:
self._name = full_name.rsplit('.', 1)[-1] | python | {
"resource": ""
} |
q274024 | NNTreeNode._set_details | test | def _set_details(self, depth, branch, run_branch):
"""Sets some details for internal handling."""
self._depth = depth
self._branch = branch
self._run_branch = run_branch | python | {
"resource": ""
} |
q274025 | NaturalNamingInterface._node_to_msg | test | def _node_to_msg(store_load, node):
"""Maps a given node and a store_load constant to the message that is understood by
the storage service.
"""
if node.v_is_leaf:
if store_load == STORE:
return pypetconstants.LEAF
elif store_load == LOAD:
return pypetconstants.LEAF
elif store_load == REMOVE:
return pypetconstants.DELETE
else:
if store_load == STORE:
return pypetconstants.GROUP
elif store_load == LOAD:
return pypetconstants.GROUP
elif store_load == REMOVE:
return pypetconstants.DELETE | python | {
"resource": ""
} |
q274026 | NaturalNamingInterface._remove_subtree | test | def _remove_subtree(self, start_node, name, predicate=None):
"""Removes a subtree from the trajectory tree.
Does not delete stuff from disk only from RAM.
:param start_node: The parent node from where to start
:param name: Name of child which will be deleted and recursively all nodes below the child
:param predicate:
Predicate that can be used to compute for individual nodes if they should be removed
``True`` or kept ``False``.
"""
def _delete_from_children(node, child_name):
del node._children[child_name]
if child_name in node._groups:
del node._groups[child_name]
elif child_name in node._leaves:
del node._leaves[child_name]
else:
raise RuntimeError('You shall not pass!')
def _remove_subtree_inner(node, predicate):
if not predicate(node):
return False
elif node.v_is_group:
for name_ in itools.chain(list(node._leaves.keys()),
list(node._groups.keys())):
child_ = node._children[name_]
child_deleted = _remove_subtree_inner(child_, predicate)
if child_deleted:
_delete_from_children(node, name_)
del child_
for link_ in list(node._links.keys()):
node.f_remove_link(link_)
if len(node._children) == 0:
self._delete_node(node)
return True
else:
return False
else:
self._delete_node(node)
return True
if name in start_node._links:
start_node.f_remove_link(name)
else:
child = start_node._children[name]
if predicate is None:
predicate = lambda x: True
if _remove_subtree_inner(child, predicate):
_delete_from_children(start_node, name)
del child
return True
else:
return False | python | {
"resource": ""
} |
q274027 | NaturalNamingInterface._delete_node | test | def _delete_node(self, node):
"""Deletes a single node from the tree.
Removes all references to the node.
Note that the 'parameters', 'results', 'derived_parameters', and 'config' groups
hanging directly below root cannot be deleted. Also the root node itself cannot be
deleted. (This would cause a tremendous wave of uncontrollable self destruction, which
would finally lead to the Apocalypse!)
"""
full_name = node.v_full_name
root = self._root_instance
if full_name == '':
# You cannot delete root
return
if node.v_is_leaf:
if full_name in root._parameters:
del root._parameters[full_name]
elif full_name in root._config:
del root._config[full_name]
elif full_name in root._derived_parameters:
del root._derived_parameters[full_name]
elif full_name in root._results:
del root._results[full_name]
elif full_name in root._other_leaves:
del root._other_leaves[full_name]
if full_name in root._explored_parameters:
if root._stored:
# We always keep the explored parameters in case the trajectory was stored
root._explored_parameters[full_name] = None
else:
del root._explored_parameters[full_name]
if len(root._explored_parameters) == 0:
root.f_shrink()
del self._flat_leaf_storage_dict[full_name]
else:
del root._all_groups[full_name]
if full_name in root._run_parent_groups:
del root._run_parent_groups[full_name]
# Delete all links to the node
if full_name in root._linked_by:
linking = root._linked_by[full_name]
for linking_name in list(linking.keys()):
linking_group, link_set = linking[linking_name]
for link in list(link_set):
linking_group.f_remove_link(link)
if (node.v_location, node.v_name) in self._root_instance._new_nodes:
del self._root_instance._new_nodes[(node.v_location, node.v_name)]
# Finally remove all references in the dictionaries for fast search
self._remove_from_nodes_and_leaves(node)
# Remove circular references
node._vars = None
node._func = None | python | {
"resource": ""
} |
q274028 | NaturalNamingInterface._remove_node_or_leaf | test | def _remove_node_or_leaf(self, instance, recursive=False):
"""Removes a single node from the tree.
Only from RAM not from hdf5 file!
:param instance: The node to be deleted
:param recursive: If group nodes with children should be deleted
"""
full_name = instance.v_full_name
split_name = deque(full_name.split('.'))
self._remove_along_branch(self._root_instance, split_name, recursive) | python | {
"resource": ""
} |
q274029 | NaturalNamingInterface._remove_along_branch | test | def _remove_along_branch(self, actual_node, split_name, recursive=False):
"""Removes a given node from the tree.
Starts from a given node and walks recursively down the tree to the location of the node
we want to remove.
We need to walk from a start node in case we want to check on the way back whether we got
empty group nodes due to deletion.
:param actual_node: Current node
:param split_name: DEQUE of names to get the next nodes.
:param recursive:
To also delete all children of a group node
:return: True if node was deleted, otherwise False
"""
# If the names list is empty, we have reached the node we want to delete.
if len(split_name) == 0:
if actual_node.v_is_group and actual_node.f_has_children():
if recursive:
for child in list(actual_node._children.keys()):
actual_node.f_remove_child(child, recursive=True)
else:
raise TypeError('Cannot remove group `%s` it contains children. Please '
'remove with `recursive=True`.' % actual_node.v_full_name)
self._delete_node(actual_node)
return True
# Otherwise get the next node by using the first name in the list
name = split_name.popleft()
if name in actual_node._links:
if len(split_name)>0:
raise RuntimeError('You cannot remove nodes while hopping over links!')
actual_node.f_remove_link(name)
else:
child = actual_node._children[name]
if self._remove_along_branch(child, split_name, recursive=recursive):
del actual_node._children[name]
if name in actual_node._groups:
del actual_node._groups[name]
elif name in actual_node._leaves:
del actual_node._leaves[name]
else:
raise RuntimeError('You shall not pass!')
del child
return False | python | {
"resource": ""
} |
q274030 | NaturalNamingInterface._translate_shortcut | test | def _translate_shortcut(self, name):
"""Maps a given shortcut to corresponding name
* 'run_X' or 'r_X' to 'run_XXXXXXXXX'
* 'crun' to the current run name in case of a
single run instance if trajectory is used via `v_crun`
* 'par' 'parameters'
* 'dpar' to 'derived_parameters'
* 'res' to 'results'
* 'conf' to 'config'
:return: True or False and the mapped name.
"""
if isinstance(name, int):
return True, self._root_instance.f_wildcard('$', name)
if name.startswith('run_') or name.startswith('r_'):
split_name = name.split('_')
if len(split_name) == 2:
index = split_name[1]
if index.isdigit():
return True, self._root_instance.f_wildcard('$', int(index))
elif index == 'A':
return True, self._root_instance.f_wildcard('$', -1)
if name.startswith('runtoset_') or name.startswith('rts_'):
split_name = name.split('_')
if len(split_name) == 2:
index = split_name[1]
if index.isdigit():
return True, self._root_instance.f_wildcard('$set', int(index))
elif index == 'A':
return True, self._root_instance.f_wildcard('$set', -1)
if name in SHORTCUT_SET:
if name == 'par':
return True, 'parameters'
elif name == 'dpar':
return True, 'derived_parameters'
elif name == 'res':
return True, 'results'
elif name == 'conf':
return True, 'config'
else:
raise RuntimeError('You shall not pass!')
return False, name | python | {
"resource": ""
} |
q274031 | NaturalNamingInterface._add_prefix | test | def _add_prefix(self, split_names, start_node, group_type_name):
"""Adds the correct sub branch prefix to a given name.
Usually the prefix is the full name of the parent node. In case items are added
directly to the trajectory the prefixes are chosen according to the matching subbranch.
For example, this could be 'parameters' for parameters or 'results.run_00000004' for
results added to the fifth single run.
:param split_names:
List of names of the new node (e.g. ``['mynewgroupA', 'mynewgroupB', 'myresult']``).
:param start_node:
Parent node under which the new node should be added.
:param group_type_name:
Type name of subbranch the item belongs to
(e.g. 'PARAMETER_GROUP', 'RESULT_GROUP' etc).
:return: The name with the added prefix.
"""
root = self._root_instance
# If the start node of our insertion is root or one below root
# we might need to add prefixes.
# In case of derived parameters and results we also need to add prefixes containing the
# subbranch and the current run in case of a single run.
# For instance, a prefix could be 'results.runs.run_00000007'.
prepend = []
if start_node.v_depth < 3 and not group_type_name == GROUP:
if start_node.v_depth == 0:
if group_type_name == DERIVED_PARAMETER_GROUP:
if split_names[0] == 'derived_parameters':
return split_names
else:
prepend += ['derived_parameters']
elif group_type_name == RESULT_GROUP:
if split_names[0] == 'results':
return split_names
else:
prepend += ['results']
elif group_type_name == CONFIG_GROUP:
if split_names[0] == 'config':
return split_names
else:
prepend += ['config']
elif group_type_name == PARAMETER_GROUP:
if split_names[0] == 'parameters':
return split_names[0]
else:
prepend += ['parameters']
else:
raise RuntimeError('Why are you here?')
# Check if we have to add a prefix containing the current run
if root._is_run and root._auto_run_prepend:
dummy = root.f_wildcard('$', -1)
crun = root.f_wildcard('$')
if any(name in root._run_information for name in split_names):
pass
elif any(name == dummy for name in split_names):
pass
elif (group_type_name == RESULT_GROUP or
group_type_name == DERIVED_PARAMETER_GROUP):
if start_node.v_depth == 0:
prepend += ['runs', crun]
elif start_node.v_depth == 1:
if len(split_names) == 1 and split_names[0] == 'runs':
return split_names
else:
prepend += ['runs', crun]
elif start_node.v_depth == 2 and start_node.v_name == 'runs':
prepend += [crun]
if prepend:
split_names = prepend + split_names
return split_names | python | {
"resource": ""
} |
q274032 | NaturalNamingInterface._determine_types | test | def _determine_types(start_node, first_name, add_leaf, add_link):
"""Determines types for generic additions"""
if start_node.v_is_root:
where = first_name
else:
where = start_node._branch
if where in SUBTREE_MAPPING:
type_tuple = SUBTREE_MAPPING[where]
else:
type_tuple = (GROUP, LEAF)
if add_link:
return type_tuple[0], LINK
if add_leaf:
return type_tuple
else:
return type_tuple[0], type_tuple[0] | python | {
"resource": ""
} |
q274033 | NaturalNamingInterface._add_generic | test | def _add_generic(self, start_node, type_name, group_type_name, args, kwargs,
add_prefix=True, check_naming=True):
"""Adds a given item to the tree irrespective of the subtree.
Infers the subtree from the arguments.
:param start_node: The parental node the adding was initiated from
:param type_name:
The type of the new instance. Whether it is a parameter, parameter group, config,
config group, etc. See the name of the corresponding constants at the top of this
python module.
:param group_type_name:
Type of the subbranch. i.e. whether the item is added to the 'parameters',
'results' etc. These subbranch types are named as the group names
(e.g. 'PARAMETER_GROUP') in order to have less constants.
For all constants used see beginning of this python module.
:param args:
Arguments specifying how the item is added.
If len(args)==1 and the argument is the a given instance of a result or parameter,
this one is added to the tree.
Otherwise it is checked if the first argument is a class specifying how to
construct a new item and the second argument is the name of the new class.
If the first argument is not a class but a string, the string is assumed to be
the name of the new instance.
Additional args are later on used for the construction of the instance.
:param kwargs:
Additional keyword arguments that might be handed over to the instance constructor.
:param add_prefix:
If a prefix group, i.e. `results`, `config`, etc. should be added
:param check_naming:
If it should be checked for correct namings, can be set to ``False`` if data is loaded
and we know that all names are correct.
:return: The new added instance
"""
args = list(args)
create_new = True
name = ''
instance = None
constructor = None
add_link = type_name == LINK
# First check if the item is already a given instance or we want to add a link
if add_link:
name = args[0]
instance = args[1]
create_new = False
elif len(args) == 1 and len(kwargs) == 0:
item = args[0]
try:
name = item.v_full_name
instance = item
create_new = False
except AttributeError:
pass
# If the item is not an instance yet, check if args[0] is a class and args[1] is
# a string describing the new name of the instance.
# If args[0] is not a class it is assumed to be the name of the new instance.
if create_new:
if len(args) > 0 and inspect.isclass(args[0]):
constructor = args.pop(0)
if len(args) > 0 and isinstance(args[0], str):
name = args.pop(0)
elif 'name' in kwargs:
name = kwargs.pop('name')
elif 'full_name' in kwargs:
name = kwargs.pop('full_name')
else:
raise ValueError('Could not determine a name of the new item you want to add. '
'Either pass the name as positional argument or as a keyword '
'argument `name`.')
split_names = name.split('.')
if check_naming:
for idx, name in enumerate(split_names):
translated_shortcut, name = self._translate_shortcut(name)
replaced, name = self._replace_wildcards(name)
if translated_shortcut or replaced:
split_names[idx] = name
# First check if the naming of the new item is appropriate
faulty_names = self._check_names(split_names, start_node)
if faulty_names:
full_name = '.'.join(split_names)
raise ValueError(
'Your Parameter/Result/Node `%s` contains the following not admissible names: '
'%s please choose other names.' % (full_name, faulty_names))
if add_link:
if instance is None:
raise ValueError('You must provide an instance to link to!')
if instance.v_is_root:
raise ValueError('You cannot create a link to the root node')
if start_node.v_is_root and name in SUBTREE_MAPPING:
raise ValueError('`%s` is a reserved name for a group under root.' % name)
if not self._root_instance.f_contains(instance, with_links=False, shortcuts=False):
raise ValueError('You can only link to items within the trajectory tree!')
# Check if the name fulfils the prefix conditions, if not change the name accordingly.
if add_prefix:
split_names = self._add_prefix(split_names, start_node, group_type_name)
if group_type_name == GROUP:
add_leaf = type_name != group_type_name and not add_link
# If this is equal we add a group node
group_type_name, type_name = self._determine_types(start_node, split_names[0],
add_leaf, add_link)
# Check if we are allowed to add the data
if self._root_instance._is_run and type_name in SENSITIVE_TYPES:
raise TypeError('You are not allowed to add config or parameter data or groups '
'during a single run.')
return self._add_to_tree(start_node, split_names, type_name, group_type_name, instance,
constructor, args, kwargs) | python | {
"resource": ""
} |
q274034 | NaturalNamingInterface._add_to_tree | test | def _add_to_tree(self, start_node, split_names, type_name, group_type_name,
instance, constructor, args, kwargs):
"""Adds a new item to the tree.
The item can be an already given instance or it is created new.
:param start_node:
Parental node the adding of the item was initiated from.
:param split_names:
List of names of the new item
:param type_name:
Type of item 'RESULT', 'RESULT_GROUP', 'PARAMETER', etc. See name of constants
at beginning of the python module.
:param group_type_name:
Name of the subbranch the item is added to 'RESULT_GROUP', 'PARAMETER_GROUP' etc.
See name of constants at beginning of this python module.
:param instance:
Here an already given instance can be passed. If instance should be created new
pass None.
:param constructor:
If instance should be created new pass a constructor class. If None is passed
the standard constructor for the instance is chosen.
:param args:
Additional arguments passed to instance construction
:param kwargs:
Additional keyword arguments passed to instance construction
:return: The new added instance
:raises: ValueError if naming of the new item is invalid
"""
# Then walk iteratively from the start node as specified by the new name and create
# new empty groups on the fly
try:
act_node = start_node
last_idx = len(split_names) - 1
add_link = type_name == LINK
link_added = False
# last_name = start_node.v_crun
for idx, name in enumerate(split_names):
if name not in act_node._children:
if idx == last_idx:
if add_link:
new_node = self._create_link(act_node, name, instance)
link_added = True
elif group_type_name != type_name:
# We are at the end of the chain and we add a leaf node
new_node = self._create_any_param_or_result(act_node,
name,
type_name,
instance,
constructor,
args, kwargs)
self._flat_leaf_storage_dict[new_node.v_full_name] = new_node
else:
# We add a group as desired
new_node = self._create_any_group(act_node, name,
group_type_name,
instance,
constructor,
args, kwargs)
else:
# We add a group on the fly
new_node = self._create_any_group(act_node, name,
group_type_name)
if name in self._root_instance._run_information:
self._root_instance._run_parent_groups[act_node.v_full_name] = act_node
if self._root_instance._is_run:
if link_added:
self._root_instance._new_links[(act_node.v_full_name, name)] = \
(act_node, new_node)
else:
self._root_instance._new_nodes[(act_node.v_full_name, name)] = \
(act_node, new_node)
else:
if name in act_node._links:
raise AttributeError('You cannot hop over links when adding '
'data to the tree. '
'There is a link called `%s` under `%s`.' %
(name, act_node.v_full_name))
if idx == last_idx:
if self._root_instance._no_clobber:
self._logger.warning('You already have a group/instance/link `%s` '
'under `%s`. '
'However, you set `v_no_clobber=True`, '
'so I will ignore your addition of '
'data.' % (name, act_node.v_full_name))
else:
raise AttributeError('You already have a group/instance/link `%s` '
'under `%s`' % (name, act_node.v_full_name))
act_node = act_node._children[name]
return act_node
except:
self._logger.error('Failed adding `%s` under `%s`.' %
(name, start_node.v_full_name))
raise | python | {
"resource": ""
} |
q274035 | NaturalNamingInterface._create_link | test | def _create_link(self, act_node, name, instance):
"""Creates a link and checks if names are appropriate
"""
act_node._links[name] = instance
act_node._children[name] = instance
full_name = instance.v_full_name
if full_name not in self._root_instance._linked_by:
self._root_instance._linked_by[full_name] = {}
linking = self._root_instance._linked_by[full_name]
if act_node.v_full_name not in linking:
linking[act_node.v_full_name] = (act_node, set())
linking[act_node.v_full_name][1].add(name)
if name not in self._links_count:
self._links_count[name] = 0
self._links_count[name] = self._links_count[name] + 1
self._logger.debug('Added link `%s` under `%s` pointing '
'to `%s`.' % (name, act_node.v_full_name,
instance.v_full_name))
return instance | python | {
"resource": ""
} |
q274036 | NaturalNamingInterface._check_names | test | def _check_names(self, split_names, parent_node=None):
"""Checks if a list contains strings with invalid names.
Returns a description of the name violations. If names are correct the empty
string is returned.
:param split_names: List of strings
:param parent_node:
The parental node from where to start (only applicable for node names)
"""
faulty_names = ''
if parent_node is not None and parent_node.v_is_root and split_names[0] == 'overview':
faulty_names = '%s `overview` cannot be added directly under the root node ' \
'this is a reserved keyword,' % (faulty_names)
for split_name in split_names:
if len(split_name) == 0:
faulty_names = '%s `%s` contains no characters, please use at least 1,' % (
faulty_names, split_name)
elif split_name.startswith('_'):
faulty_names = '%s `%s` starts with a leading underscore,' % (
faulty_names, split_name)
elif re.match(CHECK_REGEXP, split_name) is None:
faulty_names = '%s `%s` contains non-admissible characters ' \
'(use only [A-Za-z0-9_-]),' % \
(faulty_names, split_name)
elif '$' in split_name:
if split_name not in self._root_instance._wildcard_keys:
faulty_names = '%s `%s` contains `$` but has no associated ' \
'wildcard function,' % (faulty_names, split_name)
elif split_name in self._not_admissible_names:
warnings.warn('`%s` is a method/attribute of the '
'trajectory/treenode/naminginterface, you may not be '
'able to access it via natural naming but only by using '
'`[]` square bracket notation. ' % split_name,
category=SyntaxWarning)
elif split_name in self._python_keywords:
warnings.warn('`%s` is a python keyword, you may not be '
'able to access it via natural naming but only by using '
'`[]` square bracket notation. ' % split_name,
category=SyntaxWarning)
name = split_names[-1]
if len(name) >= pypetconstants.HDF5_STRCOL_MAX_NAME_LENGTH:
faulty_names = '%s `%s` is too long the name can only have %d characters but it has ' \
'%d,' % \
(faulty_names, name, len(name),
pypetconstants.HDF5_STRCOL_MAX_NAME_LENGTH)
return faulty_names | python | {
"resource": ""
} |
q274037 | NaturalNamingInterface._create_any_group | test | def _create_any_group(self, parent_node, name, type_name, instance=None, constructor=None,
args=None, kwargs=None):
"""Generically creates a new group inferring from the `type_name`."""
if args is None:
args = []
if kwargs is None:
kwargs = {}
full_name = self._make_full_name(parent_node.v_full_name, name)
if instance is None:
if constructor is None:
if type_name == RESULT_GROUP:
constructor = ResultGroup
elif type_name == PARAMETER_GROUP:
constructor = ParameterGroup
elif type_name == CONFIG_GROUP:
constructor = ConfigGroup
elif type_name == DERIVED_PARAMETER_GROUP:
constructor = DerivedParameterGroup
elif type_name == GROUP:
constructor = NNGroupNode
else:
raise RuntimeError('You shall not pass!')
instance = self._root_instance._construct_instance(constructor, full_name,
*args, **kwargs)
else:
instance._rename(full_name)
# Check if someone tries to add a particular standard group to a branch where
# it does not belong:
if type_name == RESULT_GROUP:
if type(instance) in (NNGroupNode,
ParameterGroup,
ConfigGroup,
DerivedParameterGroup):
raise TypeError('You cannot add a `%s` type of group under results' %
str(type(instance)))
elif type_name == PARAMETER_GROUP:
if type(instance) in (NNGroupNode,
ResultGroup,
ConfigGroup,
DerivedParameterGroup):
raise TypeError('You cannot add a `%s` type of group under parameters' %
str(type(instance)))
elif type_name == CONFIG_GROUP:
if type(instance) in (NNGroupNode,
ParameterGroup,
ResultGroup,
DerivedParameterGroup):
raise TypeError('You cannot add a `%s` type of group under config' %
str(type(instance)))
elif type_name == DERIVED_PARAMETER_GROUP:
if type(instance) in (NNGroupNode,
ParameterGroup,
ConfigGroup,
ResultGroup):
raise TypeError('You cannot add a `%s` type of group under derived '
'parameters' % str(type(instance)))
elif type_name == GROUP:
if type(instance) in (ResultGroup,
ParameterGroup,
ConfigGroup,
DerivedParameterGroup):
raise TypeError('You cannot add a `%s` type of group under other data' %
str(type(instance)))
else:
raise RuntimeError('You shall not pass!')
self._set_details_tree_node(parent_node, name, instance)
instance._nn_interface = self
self._root_instance._all_groups[instance.v_full_name] = instance
self._add_to_nodes_and_leaves(instance)
parent_node._children[name] = instance
parent_node._groups[name] = instance
return instance | python | {
"resource": ""
} |
q274038 | NaturalNamingInterface._create_any_param_or_result | test | def _create_any_param_or_result(self, parent_node, name, type_name, instance, constructor,
args, kwargs):
"""Generically creates a novel parameter or result instance inferring from the `type_name`.
If the instance is already supplied it is NOT constructed new.
:param parent_node:
Parent trajectory node
:param name:
Name of the new result or parameter. Here the name no longer contains colons.
:param type_name:
Whether it is a parameter below parameters, config, derived parameters or whether
it is a result.
:param instance:
The instance if it has been constructed somewhere else, otherwise None.
:param constructor:
A constructor used if instance needs to be constructed. If None the current standard
constructor is chosen.
:param args:
Additional arguments passed to the constructor
:param kwargs:
Additional keyword arguments passed to the constructor
:return: The new instance
"""
root = self._root_instance
full_name = self._make_full_name(parent_node.v_full_name, name)
if instance is None:
if constructor is None:
if type_name == RESULT:
constructor = root._standard_result
elif type_name in [PARAMETER, CONFIG, DERIVED_PARAMETER]:
constructor = root._standard_parameter
else:
constructor = root._standard_leaf
instance = root._construct_instance(constructor, full_name, *args, **kwargs)
else:
instance._rename(full_name)
self._set_details_tree_node(parent_node, name, instance)
where_dict = self._map_type_to_dict(type_name)
full_name = instance._full_name
if full_name in where_dict:
raise AttributeError(full_name + ' is already part of trajectory,')
if type_name != RESULT and full_name in root._changed_default_parameters:
self._logger.info(
'You have marked parameter %s for change before, so here you go!' %
full_name)
change_args, change_kwargs = root._changed_default_parameters.pop(full_name)
instance.f_set(*change_args, **change_kwargs)
where_dict[full_name] = instance
self._add_to_nodes_and_leaves(instance)
parent_node._children[name] = instance
parent_node._leaves[name] = instance
if full_name in self._root_instance._explored_parameters:
instance._explored = True # Mark this parameter as explored.
self._root_instance._explored_parameters[full_name] = instance
self._logger.debug('Added `%s` to trajectory.' % full_name)
return instance | python | {
"resource": ""
} |
q274039 | NaturalNamingInterface._set_details_tree_node | test | def _set_details_tree_node(self, parent_node, name, instance):
"""Renames a given `instance` based on `parent_node` and `name`.
Adds meta information like depth as well.
"""
depth = parent_node._depth + 1
if parent_node.v_is_root:
branch = name # We add below root
else:
branch = parent_node._branch
if name in self._root_instance._run_information:
run_branch = name
else:
run_branch = parent_node._run_branch
instance._set_details(depth, branch, run_branch) | python | {
"resource": ""
} |
q274040 | NaturalNamingInterface._iter_nodes | test | def _iter_nodes(self, node, recursive=False, max_depth=float('inf'),
with_links=True, in_search=False, predicate=None):
"""Returns an iterator over nodes hanging below a given start node.
:param node:
Start node
:param recursive:
Whether recursively also iterate over the children of the start node's children
:param max_depth:
Maximum depth to search for
:param in_search:
if it is used during get search and if detailed info should be returned
:param with_links:
If links should be considered
:param predicate:
A predicate to filter nodes
:return: Iterator
"""
def _run_predicate(x, run_name_set):
branch = x.v_run_branch
return branch == 'trajectory' or branch in run_name_set
if max_depth is None:
max_depth = float('inf')
if predicate is None:
predicate = lambda x: True
elif isinstance(predicate, (tuple, list)):
# Create a predicate from a list of run names or run indices
run_list = predicate
run_name_set = set()
for item in run_list:
if item == -1:
run_name_set.add(self._root_instance.f_wildcard('$', -1))
elif isinstance(item, int):
run_name_set.add(self._root_instance.f_idx_to_run(item))
else:
run_name_set.add(item)
predicate = lambda x: _run_predicate(x, run_name_set)
if recursive:
return NaturalNamingInterface._recursive_traversal_bfs(node,
self._root_instance._linked_by,
max_depth, with_links,
in_search, predicate)
else:
iterator = (x for x in self._make_child_iterator(node, with_links) if
predicate(x[2]))
if in_search:
return iterator # Here we return tuples: (depth, name, object)
else:
return (x[2] for x in iterator) | python | {
"resource": ""
} |
q274041 | NaturalNamingInterface._make_child_iterator | test | def _make_child_iterator(node, with_links, current_depth=0):
"""Returns an iterator over a node's children.
In case of using a trajectory as a run (setting 'v_crun') some sub branches
that do not belong to the run are blinded out.
"""
cdp1 = current_depth + 1
if with_links:
iterator = ((cdp1, x[0], x[1]) for x in node._children.items())
else:
leaves = ((cdp1, x[0], x[1]) for x in node._leaves.items())
groups = ((cdp1, y[0], y[1]) for y in node._groups.items())
iterator = itools.chain(groups, leaves)
return iterator | python | {
"resource": ""
} |
q274042 | NaturalNamingInterface._recursive_traversal_bfs | test | def _recursive_traversal_bfs(node, linked_by=None,
max_depth=float('inf'),
with_links=True, in_search=False, predicate=None):
"""Iterator function traversing the tree below `node` in breadth first search manner.
If `run_name` is given only sub branches of this run are considered and the rest is
blinded out.
"""
if predicate is None:
predicate = lambda x: True
iterator_queue = IteratorChain([(0, node.v_name, node)])
#iterator_queue = iter([(0, node.v_name, node)])
start = True
visited_linked_nodes = set([])
while True:
try:
depth, name, item = next(iterator_queue)
full_name = item._full_name
if start or predicate(item):
if full_name in visited_linked_nodes:
if in_search:
# We need to return the node again to check if a link to the node
# has to be found
yield depth, name, item
elif depth <= max_depth:
if start:
start = False
else:
if in_search:
yield depth, name, item
else:
yield item
if full_name in linked_by:
visited_linked_nodes.add(full_name)
if not item._is_leaf and depth < max_depth:
child_iterator = NaturalNamingInterface._make_child_iterator(item,
with_links,
current_depth=depth)
iterator_queue.add(child_iterator)
#iterator_queue = itools.chain(iterator_queue, child_iterator)
except StopIteration:
break | python | {
"resource": ""
} |
q274043 | NaturalNamingInterface._very_fast_search | test | def _very_fast_search(self, node, key, max_depth, with_links, crun):
"""Fast search for a node in the tree.
The tree is not traversed but the reference dictionaries are searched.
:param node:
Parent node to start from
:param key:
Name of node to find
:param max_depth:
Maximum depth.
:param with_links:
If we work with links than we can only be sure to found the node in case we
have a single match. Otherwise the other match might have been linked as well.
:param crun:
If given only nodes belonging to this particular run are searched and the rest
is blinded out.
:return: The found node and its depth
:raises:
TooManyGroupsError:
If search cannot performed fast enough, an alternative search method is needed.
NotUniqueNodeError:
If several nodes match the key criterion
"""
if key in self._links_count:
return
parent_full_name = node.v_full_name
starting_depth = node.v_depth
candidate_dict = self._get_candidate_dict(key, crun)
# If there are to many potential candidates sequential search might be too slow
if with_links:
upper_bound = 1
else:
upper_bound = FAST_UPPER_BOUND
if len(candidate_dict) > upper_bound:
raise pex.TooManyGroupsError('Too many nodes')
# Next check if the found candidates could be reached from the parent node
result_node = None
for goal_name in candidate_dict:
# Check if we have found a matching node
if goal_name.startswith(parent_full_name):
candidate = candidate_dict[goal_name]
if candidate.v_depth - starting_depth <= max_depth:
# In case of several solutions raise an error:
if result_node is not None:
raise pex.NotUniqueNodeError('Node `%s` has been found more than once, '
'full name of first occurrence is `%s` and of'
'second `%s`'
% (key, goal_name, result_node.v_full_name))
result_node = candidate
if result_node is not None:
return result_node, result_node.v_depth | python | {
"resource": ""
} |
q274044 | NaturalNamingInterface._search | test | def _search(self, node, key, max_depth=float('inf'), with_links=True, crun=None):
""" Searches for an item in the tree below `node`
:param node:
The parent node below which the search is performed
:param key:
Name to search for. Can be the short name, the full name or parts of it
:param max_depth:
maximum search depth.
:param with_links:
If links should be considered
:param crun:
Used for very fast search if we know we operate in a single run branch
:return: The found node and the depth it was found for
"""
# If we find it directly there is no need for an exhaustive search
if key in node._children and (with_links or key not in node._links):
return node._children[key], 1
# First the very fast search is tried that does not need tree traversal.
try:
result = self._very_fast_search(node, key, max_depth, with_links, crun)
if result:
return result
except pex.TooManyGroupsError:
pass
except pex.NotUniqueNodeError:
pass
# Slowly traverse the entire tree
nodes_iterator = self._iter_nodes(node, recursive=True,
max_depth=max_depth, in_search=True,
with_links=with_links)
result_node = None
result_depth = float('inf')
for depth, name, child in nodes_iterator:
if depth > result_depth:
# We can break here because we enter a deeper stage of the tree and we
# cannot find matching node of the same depth as the one we found
break
if key == name:
# If result_node is not None means that we care about uniqueness and the search
# has found more than a single solution.
if result_node is not None:
raise pex.NotUniqueNodeError('Node `%s` has been found more than once within '
'the same depth %d. '
'Full name of first occurrence is `%s` and of '
'second `%s`'
% (key, child.v_depth, result_node.v_full_name,
child.v_full_name))
result_node = child
result_depth = depth
return result_node, result_depth | python | {
"resource": ""
} |
q274045 | NaturalNamingInterface._backwards_search | test | def _backwards_search(self, start_node, split_name, max_depth=float('inf'), shortcuts=True):
""" Performs a backwards search from the terminal node back to the start node
:param start_node:
The node from where search starts, or here better way where backwards search should
end.
:param split_name:
List of names
:param max_depth:
Maximum search depth where to look for
:param shortcuts:
If shortcuts are allowed
"""
result_list = [] # Result list of all found items
full_name_set = set() # Set containing full names of all found items to avoid finding items
# twice due to links
colon_name = '.'.join(split_name)
key = split_name[-1]
candidate_dict = self._get_candidate_dict(key, None, use_upper_bound=False)
parent_full_name = start_node.v_full_name
split_length = len(split_name)
for candidate_name in candidate_dict:
# Check if candidate startswith the parent's name
candidate = candidate_dict[candidate_name]
if key != candidate.v_name or candidate.v_full_name in full_name_set:
# If this is not the case we do have link, that we need to skip
continue
if candidate_name.startswith(parent_full_name):
if parent_full_name != '':
reduced_candidate_name = candidate_name[len(parent_full_name) + 1:]
else:
reduced_candidate_name = candidate_name
candidate_split_name = reduced_candidate_name.split('.')
if len(candidate_split_name) > max_depth:
break
if len(split_name) == 1 or reduced_candidate_name.endswith(colon_name):
result_list.append(candidate)
full_name_set.add(candidate.v_full_name)
elif shortcuts:
candidate_set = set(candidate_split_name)
climbing = True
for name in split_name:
if name not in candidate_set:
climbing = False
break
if climbing:
count = 0
candidate_length = len(candidate_split_name)
for idx in range(candidate_length):
if idx + split_length - count > candidate_length:
break
if split_name[count] == candidate_split_name[idx]:
count += 1
if count == len(split_name):
result_list.append(candidate)
full_name_set.add(candidate.v_full_name)
break
return result_list | python | {
"resource": ""
} |
q274046 | NNGroupNode.kids | test | def kids(self):
"""Alternative naming, you can use `node.kids.name` instead of `node.name`
for easier tab completion."""
if self._kids is None:
self._kids = NNTreeNodeKids(self)
return self._kids | python | {
"resource": ""
} |
q274047 | NNGroupNode._add_group_from_storage | test | def _add_group_from_storage(self, args, kwargs):
"""Can be called from storage service to create a new group to bypass name checking"""
return self._nn_interface._add_generic(self,
type_name=GROUP,
group_type_name=GROUP,
args=args,
kwargs=kwargs,
add_prefix=False,
check_naming=False) | python | {
"resource": ""
} |
q274048 | NNGroupNode._add_leaf_from_storage | test | def _add_leaf_from_storage(self, args, kwargs):
"""Can be called from storage service to create a new leaf to bypass name checking"""
return self._nn_interface._add_generic(self,
type_name=LEAF,
group_type_name=GROUP,
args=args, kwargs=kwargs,
add_prefix=False,
check_naming=False) | python | {
"resource": ""
} |
q274049 | NNGroupNode.f_dir_data | test | def f_dir_data(self):
"""Returns a list of all children names"""
if (self._nn_interface is not None and
self._nn_interface._root_instance is not None
and self.v_root.v_auto_load):
try:
if self.v_is_root:
self.f_load(recursive=True, max_depth=1,
load_data=pypetconstants.LOAD_SKELETON,
with_meta_data=False,
with_run_information=False)
else:
self.f_load(recursive=True, max_depth=1, load_data=pypetconstants.LOAD_SKELETON)
except Exception as exc:
pass
return list(self._children.keys()) | python | {
"resource": ""
} |
q274050 | NNGroupNode._debug | test | def _debug(self):
"""Creates a dummy object containing the whole tree to make unfolding easier.
This method is only useful for debugging purposes.
If you use an IDE and want to unfold the trajectory tree, you always need to
open the private attribute `_children`. Use to this function to create a new
object that contains the tree structure in its attributes.
Manipulating the returned object does not change the original tree!
"""
class Bunch(object):
"""Dummy container class"""
pass
debug_tree = Bunch()
if not self.v_annotations.f_is_empty():
debug_tree.v_annotations = self.v_annotations
if not self.v_comment == '':
debug_tree.v_comment = self.v_comment
for leaf_name in self._leaves:
leaf = self._leaves[leaf_name]
setattr(debug_tree, leaf_name, leaf)
for link_name in self._links:
linked_node = self._links[link_name]
setattr(debug_tree, link_name, 'Link to `%s`' % linked_node.v_full_name)
for group_name in self._groups:
group = self._groups[group_name]
setattr(debug_tree, group_name, group._debug())
return debug_tree | python | {
"resource": ""
} |
q274051 | NNGroupNode.f_get_parent | test | def f_get_parent(self):
"""Returns the parent of the node.
Raises a TypeError if current node is root.
"""
if self.v_is_root:
raise TypeError('Root does not have a parent')
elif self.v_location == '':
return self.v_root
else:
return self.v_root.f_get(self.v_location, fast_access=False, shortcuts=False) | python | {
"resource": ""
} |
q274052 | NNGroupNode.f_add_group | test | def f_add_group(self, *args, **kwargs):
"""Adds an empty generic group under the current node.
You can add to a generic group anywhere you want. So you are free to build
your parameter tree with any structure. You do not necessarily have to follow the
four subtrees `config`, `parameters`, `derived_parameters`, `results`.
If you are operating within these subtrees this simply calls the corresponding adding
function.
Be aware that if you are within a single run and you add items not below a group
`run_XXXXXXXX` that you have to manually
save the items. Otherwise they will be lost after the single run is completed.
"""
return self._nn_interface._add_generic(self, type_name=GROUP,
group_type_name=GROUP,
args=args, kwargs=kwargs, add_prefix=False) | python | {
"resource": ""
} |
q274053 | NNGroupNode.f_add_link | test | def f_add_link(self, name_or_item, full_name_or_item=None):
"""Adds a link to an existing node.
Can be called as ``node.f_add_link(other_node)`` this will add a link the `other_node`
with the link name as the name of the node.
Or can be called as ``node.f_add_link(name, other_node)`` to add a link to the
`other_node` and the given `name` of the link.
In contrast to addition of groups and leaves, colon separated names
are **not** allowed, i.e. ``node.f_add_link('mygroup.mylink', other_node)``
does not work.
"""
if isinstance(name_or_item, str):
name = name_or_item
if isinstance(full_name_or_item, str):
instance = self.v_root.f_get(full_name_or_item)
else:
instance = full_name_or_item
else:
instance = name_or_item
name = instance.v_name
return self._nn_interface._add_generic(self, type_name=LINK,
group_type_name=GROUP, args=(name, instance),
kwargs={},
add_prefix=False) | python | {
"resource": ""
} |
q274054 | NNGroupNode.f_remove_link | test | def f_remove_link(self, name):
""" Removes a link from from the current group node with a given name.
Does not delete the link from the hard drive. If you want to do this,
checkout :func:`~pypet.trajectory.Trajectory.f_delete_links`
"""
if name not in self._links:
raise ValueError('No link with name `%s` found under `%s`.' % (name, self._full_name))
self._nn_interface._remove_link(self, name) | python | {
"resource": ""
} |
q274055 | NNGroupNode.f_add_leaf | test | def f_add_leaf(self, *args, **kwargs):
"""Adds an empty generic leaf under the current node.
You can add to a generic leaves anywhere you want. So you are free to build
your trajectory tree with any structure. You do not necessarily have to follow the
four subtrees `config`, `parameters`, `derived_parameters`, `results`.
If you are operating within these subtrees this simply calls the corresponding adding
function.
Be aware that if you are within a single run and you add items not below a group
`run_XXXXXXXX` that you have to manually
save the items. Otherwise they will be lost after the single run is completed.
"""
return self._nn_interface._add_generic(self, type_name=LEAF,
group_type_name=GROUP,
args=args, kwargs=kwargs,
add_prefix=False) | python | {
"resource": ""
} |
q274056 | NNGroupNode.f_remove | test | def f_remove(self, recursive=True, predicate=None):
"""Recursively removes the group and all it's children.
:param recursive:
If removal should be applied recursively. If not, node can only be removed
if it has no children.
:param predicate:
In case of recursive removal, you can selectively remove nodes in the tree.
Predicate which can evaluate for each node to ``True`` in order to remove the node or
``False`` if the node should be kept. Leave ``None`` if you want to remove all nodes.
"""
parent = self.f_get_parent()
parent.f_remove_child(self.v_name, recursive=recursive, predicate=predicate) | python | {
"resource": ""
} |
q274057 | NNGroupNode.f_remove_child | test | def f_remove_child(self, name, recursive=False, predicate=None):
"""Removes a child of the group.
Note that groups and leaves are only removed from the current trajectory in RAM.
If the trajectory is stored to disk, this data is not affected. Thus, removing children
can be only be used to free RAM memory!
If you want to free memory on disk via your storage service,
use :func:`~pypet.trajectory.Trajectory.f_delete_items` of your trajectory.
:param name:
Name of child, naming by grouping is NOT allowed ('groupA.groupB.childC'),
child must be direct successor of current node.
:param recursive:
Must be true if child is a group that has children. Will remove
the whole subtree in this case. Otherwise a Type Error is thrown.
:param predicate:
Predicate which can evaluate for each node to ``True`` in order to remove the node or
``False`` if the node should be kept. Leave ``None`` if you want to remove all nodes.
:raises:
TypeError if recursive is false but there are children below the node.
ValueError if child does not exist.
"""
if name not in self._children:
raise ValueError('Your group `%s` does not contain the child `%s`.' %
(self.v_full_name, name))
else:
child = self._children[name]
if (name not in self._links and
not child.v_is_leaf and
child.f_has_children() and
not recursive):
raise TypeError('Cannot remove child. It is a group with children. Use'
' f_remove with ``recursive = True``')
else:
self._nn_interface._remove_subtree(self, name, predicate) | python | {
"resource": ""
} |
q274058 | NNGroupNode.f_contains | test | def f_contains(self, item, with_links=True, shortcuts=False, max_depth=None):
"""Checks if the node contains a specific parameter or result.
It is checked if the item can be found via the
:func:`~pypet.naturalnaming.NNGroupNode.f_get` method.
:param item: Parameter/Result name or instance.
If a parameter or result instance is supplied it is also checked if
the provided item and the found item are exactly the same instance, i.e.
`id(item)==id(found_item)`.
:param with_links:
If links are considered.
:param shortcuts:
Shortcuts is `False` the name you supply must
be found in the tree WITHOUT hopping over nodes in between.
If `shortcuts=False` and you supply a
non colon separated (short) name, than the name must be found
in the immediate children of your current node.
Otherwise searching via shortcuts is allowed.
:param max_depth:
If shortcuts is `True` than the maximum search depth
can be specified. `None` means no limit.
:return: True or False
"""
# Check if an instance or a name was supplied by the user
try:
search_string = item.v_full_name
parent_full_name = self.v_full_name
if not search_string.startswith(parent_full_name):
return False
if parent_full_name != '':
search_string = search_string[len(parent_full_name) + 1:]
else:
search_string = search_string
shortcuts = False # if we search for a particular item we do not allow shortcuts
except AttributeError:
search_string = item
item = None
if search_string == '':
return False # To allow to search for nodes wit name = '', which are never part
# of the trajectory
try:
result = self.f_get(search_string,
shortcuts=shortcuts, max_depth=max_depth, with_links=with_links)
except AttributeError:
return False
if item is not None:
return id(item) == id(result)
else:
return True | python | {
"resource": ""
} |
q274059 | NNGroupNode.f_get_default | test | def f_get_default(self, name, default=None, fast_access=True, with_links=True,
shortcuts=True, max_depth=None, auto_load=False):
""" Similar to `f_get`, but returns the default value if `name` is not found in the
trajectory.
This function uses the `f_get` method and will return the default value
in case `f_get` raises an AttributeError or a DataNotInStorageError.
Other errors are not handled.
In contrast to `f_get`, fast access is True by default.
"""
try:
return self.f_get(name, fast_access=fast_access,
shortcuts=shortcuts,
max_depth=max_depth,
auto_load=auto_load,
with_links=with_links)
except (AttributeError, pex.DataNotInStorageError):
return default | python | {
"resource": ""
} |
q274060 | NNGroupNode.f_get_children | test | def f_get_children(self, copy=True):
"""Returns a children dictionary.
:param copy:
Whether the group's original dictionary or a shallow copy is returned.
If you want the real dictionary please do not modify it at all!
:returns: Dictionary of nodes
"""
if copy:
return self._children.copy()
else:
return self._children | python | {
"resource": ""
} |
q274061 | NNGroupNode.f_get_groups | test | def f_get_groups(self, copy=True):
"""Returns a dictionary of groups hanging immediately below this group.
:param copy:
Whether the group's original dictionary or a shallow copy is returned.
If you want the real dictionary please do not modify it at all!
:returns: Dictionary of nodes
"""
if copy:
return self._groups.copy()
else:
return self._groups | python | {
"resource": ""
} |
q274062 | NNGroupNode.f_get_leaves | test | def f_get_leaves(self, copy=True):
"""Returns a dictionary of all leaves hanging immediately below this group.
:param copy:
Whether the group's original dictionary or a shallow copy is returned.
If you want the real dictionary please do not modify it at all!
:returns: Dictionary of nodes
"""
if copy:
return self._leaves.copy()
else:
return self._leaves | python | {
"resource": ""
} |
q274063 | NNGroupNode.f_get_links | test | def f_get_links(self, copy=True):
"""Returns a link dictionary.
:param copy:
Whether the group's original dictionary or a shallow copy is returned.
If you want the real dictionary please do not modify it at all!
:returns: Dictionary of nodes
"""
if copy:
return self._links.copy()
else:
return self._links | python | {
"resource": ""
} |
q274064 | NNGroupNode.f_store_child | test | def f_store_child(self, name, recursive=False, store_data=pypetconstants.STORE_DATA,
max_depth=None):
"""Stores a child or recursively a subtree to disk.
:param name:
Name of child to store. If grouped ('groupA.groupB.childC') the path along the way
to last node in the chain is stored. Shortcuts are NOT allowed!
:param recursive:
Whether recursively all children's children should be stored too.
:param store_data:
For how to choose 'store_data' see :ref:`more-on-storing`.
:param max_depth:
In case `recursive` is `True`, you can specify the maximum depth to store
data relative from current node. Leave `None` if you don't want to limit
the depth.
:raises: ValueError if the child does not exist.
"""
if not self.f_contains(name, shortcuts=False):
raise ValueError('Your group `%s` does not (directly) contain the child `%s`. '
'Please not that shortcuts are not allowed for `f_store_child`.' %
(self.v_full_name, name))
traj = self._nn_interface._root_instance
storage_service = traj.v_storage_service
storage_service.store(pypetconstants.TREE, self, name,
trajectory_name=traj.v_name,
recursive=recursive,
store_data=store_data,
max_depth=max_depth) | python | {
"resource": ""
} |
q274065 | NNGroupNode.f_store | test | def f_store(self, recursive=True, store_data=pypetconstants.STORE_DATA,
max_depth=None):
"""Stores a group node to disk
:param recursive:
Whether recursively all children should be stored too. Default is ``True``.
:param store_data:
For how to choose 'store_data' see :ref:`more-on-storing`.
:param max_depth:
In case `recursive` is `True`, you can specify the maximum depth to store
data relative from current node. Leave `None` if you don't want to limit
the depth.
"""
traj = self._nn_interface._root_instance
storage_service = traj.v_storage_service
storage_service.store(pypetconstants.GROUP, self,
trajectory_name=traj.v_name,
recursive=recursive,
store_data=store_data,
max_depth=max_depth) | python | {
"resource": ""
} |
q274066 | NNGroupNode.f_load_child | test | def f_load_child(self, name, recursive=False, load_data=pypetconstants.LOAD_DATA,
max_depth=None):
"""Loads a child or recursively a subtree from disk.
:param name:
Name of child to load. If grouped ('groupA.groupB.childC') the path along the way
to last node in the chain is loaded. Shortcuts are NOT allowed!
:param recursive:
Whether recursively all nodes below the last child should be loaded, too.
Note that links are never evaluated recursively. Only the linked node
will be loaded if it does not exist in the tree, yet. Any nodes or links
of this linked node are not loaded.
:param load_data:
Flag how to load the data.
For how to choose 'load_data' see :ref:`more-on-loading`.
:param max_depth:
In case `recursive` is `True`, you can specify the maximum depth to load
load data relative from current node. Leave `None` if you don't want to limit
the depth.
:returns:
The loaded child, in case of grouping ('groupA.groupB.childC') the last
node (here 'childC') is returned.
"""
traj = self._nn_interface._root_instance
storage_service = traj.v_storage_service
storage_service.load(pypetconstants.TREE, self, name,
trajectory_name=traj.v_name,
load_data=load_data,
recursive=recursive,
max_depth=max_depth)
return self.f_get(name, shortcuts=False) | python | {
"resource": ""
} |
q274067 | NNGroupNode.f_load | test | def f_load(self, recursive=True, load_data=pypetconstants.LOAD_DATA,
max_depth=None):
"""Loads a group from disk.
:param recursive:
Default is ``True``.
Whether recursively all nodes below the current node should be loaded, too.
Note that links are never evaluated recursively. Only the linked node
will be loaded if it does not exist in the tree, yet. Any nodes or links
of this linked node are not loaded.
:param load_data:
Flag how to load the data.
For how to choose 'load_data' see :ref:`more-on-loading`.
:param max_depth:
In case `recursive` is `True`, you can specify the maximum depth to load
load data relative from current node.
:returns:
The node itself.
"""
traj = self._nn_interface._root_instance
storage_service = traj.v_storage_service
storage_service.load(pypetconstants.GROUP, self,
trajectory_name=traj.v_name,
load_data=load_data,
recursive=recursive,
max_depth=max_depth)
return self | python | {
"resource": ""
} |
q274068 | ParameterGroup.f_add_parameter_group | test | def f_add_parameter_group(self, *args, **kwargs):
"""Adds an empty parameter group under the current node.
Can be called with ``f_add_parameter_group('MyName', 'this is an informative comment')``
or ``f_add_parameter_group(name='MyName', comment='This is an informative comment')``
or with a given new group instance:
``f_add_parameter_group(ParameterGroup('MyName', comment='This is a comment'))``.
Adds the full name of the current node as prefix to the name of the group.
If current node is the trajectory (root), the prefix `'parameters'`
is added to the full name.
The `name` can also contain subgroups separated via colons, for example:
`name=subgroup1.subgroup2.subgroup3`. These other parent groups will be automatically
created.
"""
return self._nn_interface._add_generic(self, type_name=PARAMETER_GROUP,
group_type_name=PARAMETER_GROUP,
args=args, kwargs=kwargs) | python | {
"resource": ""
} |
q274069 | ParameterGroup.f_add_parameter | test | def f_add_parameter(self, *args, **kwargs):
""" Adds a parameter under the current node.
There are two ways to add a new parameter either by adding a parameter instance:
>>> new_parameter = Parameter('group1.group2.myparam', data=42, comment='Example!')
>>> traj.f_add_parameter(new_parameter)
Or by passing the values directly to the function, with the name being the first
(non-keyword!) argument:
>>> traj.f_add_parameter('group1.group2.myparam', 42, comment='Example!')
If you want to create a different parameter than the standard parameter, you can
give the constructor as the first (non-keyword!) argument followed by the name
(non-keyword!):
>>> traj.f_add_parameter(PickleParameter,'group1.group2.myparam', data=42, comment='Example!')
The full name of the current node is added as a prefix to the given parameter name.
If the current node is the trajectory the prefix `'parameters'` is added to the name.
Note, all non-keyword and keyword parameters apart from the optional constructor
are passed on as is to the constructor.
Moreover, you always should specify a default data value of a parameter,
even if you want to explore it later.
"""
return self._nn_interface._add_generic(self, type_name=PARAMETER,
group_type_name=PARAMETER_GROUP,
args=args, kwargs=kwargs) | python | {
"resource": ""
} |
q274070 | ResultGroup.f_add_result_group | test | def f_add_result_group(self, *args, **kwargs):
"""Adds an empty result group under the current node.
Adds the full name of the current node as prefix to the name of the group.
If current node is a single run (root) adds the prefix `'results.runs.run_08%d%'` to the
full name where `'08%d'` is replaced by the index of the current run.
The `name` can also contain subgroups separated via colons, for example:
`name=subgroup1.subgroup2.subgroup3`. These other parent groups will be automatically
be created.
"""
return self._nn_interface._add_generic(self, type_name=RESULT_GROUP,
group_type_name=RESULT_GROUP,
args=args, kwargs=kwargs) | python | {
"resource": ""
} |
q274071 | ResultGroup.f_add_result | test | def f_add_result(self, *args, **kwargs):
"""Adds a result under the current node.
There are two ways to add a new result either by adding a result instance:
>>> new_result = Result('group1.group2.myresult', 1666, x=3, y=4, comment='Example!')
>>> traj.f_add_result(new_result)
Or by passing the values directly to the function, with the name being the first
(non-keyword!) argument:
>>> traj.f_add_result('group1.group2.myresult', 1666, x=3, y=3,comment='Example!')
If you want to create a different result than the standard result, you can
give the constructor as the first (non-keyword!) argument followed by the name
(non-keyword!):
>>> traj.f_add_result(PickleResult,'group1.group2.myresult', 1666, x=3, y=3, comment='Example!')
Additional arguments (here `1666`) or keyword arguments (here `x=3, y=3`) are passed
onto the constructor of the result.
Adds the full name of the current node as prefix to the name of the result.
If current node is a single run (root) adds the prefix `'results.runs.run_08%d%'` to the
full name where `'08%d'` is replaced by the index of the current run.
"""
return self._nn_interface._add_generic(self, type_name=RESULT,
group_type_name=RESULT_GROUP,
args=args, kwargs=kwargs) | python | {
"resource": ""
} |
q274072 | DerivedParameterGroup.f_add_derived_parameter_group | test | def f_add_derived_parameter_group(self, *args, **kwargs):
"""Adds an empty derived parameter group under the current node.
Adds the full name of the current node as prefix to the name of the group.
If current node is a single run (root) adds the prefix `'derived_parameters.runs.run_08%d%'`
to the full name where `'08%d'` is replaced by the index of the current run.
The `name` can also contain subgroups separated via colons, for example:
`name=subgroup1.subgroup2.subgroup3`. These other parent groups will be automatically
be created.
"""
return self._nn_interface._add_generic(self, type_name=DERIVED_PARAMETER_GROUP,
group_type_name=DERIVED_PARAMETER_GROUP,
args=args, kwargs=kwargs) | python | {
"resource": ""
} |
q274073 | DerivedParameterGroup.f_add_derived_parameter | test | def f_add_derived_parameter(self, *args, **kwargs):
"""Adds a derived parameter under the current group.
Similar to
:func:`~pypet.naturalnaming.ParameterGroup.f_add_parameter`
Naming prefixes are added as in
:func:`~pypet.naturalnaming.DerivedParameterGroup.f_add_derived_parameter_group`
"""
return self._nn_interface._add_generic(self, type_name=DERIVED_PARAMETER,
group_type_name=DERIVED_PARAMETER_GROUP,
args=args, kwargs=kwargs) | python | {
"resource": ""
} |
q274074 | ConfigGroup.f_add_config_group | test | def f_add_config_group(self, *args, **kwargs):
"""Adds an empty config group under the current node.
Adds the full name of the current node as prefix to the name of the group.
If current node is the trajectory (root), the prefix `'config'` is added to the full name.
The `name` can also contain subgroups separated via colons, for example:
`name=subgroup1.subgroup2.subgroup3`. These other parent groups will be automatically
be created.
"""
return self._nn_interface._add_generic(self, type_name=CONFIG_GROUP,
group_type_name=CONFIG_GROUP,
args=args, kwargs=kwargs) | python | {
"resource": ""
} |
q274075 | ConfigGroup.f_add_config | test | def f_add_config(self, *args, **kwargs):
"""Adds a config parameter under the current group.
Similar to
:func:`~pypet.naturalnaming.ParameterGroup.f_add_parameter`.
If current group is the trajectory the prefix `'config'` is added to the name.
"""
return self._nn_interface._add_generic(self, type_name=CONFIG,
group_type_name=CONFIG_GROUP,
args=args, kwargs=kwargs) | python | {
"resource": ""
} |
q274076 | eval_one_max | test | def eval_one_max(traj, individual):
"""The fitness function"""
traj.f_add_result('$set.$.individual', list(individual))
fitness = sum(individual)
traj.f_add_result('$set.$.fitness', fitness)
traj.f_store()
return (fitness,) | python | {
"resource": ""
} |
q274077 | add_commit_variables | test | def add_commit_variables(traj, commit):
"""Adds commit information to the trajectory."""
git_time_value = time.strftime('%Y_%m_%d_%Hh%Mm%Ss', time.localtime(commit.committed_date))
git_short_name = str(commit.hexsha[0:7])
git_commit_name = 'commit_%s_' % git_short_name
git_commit_name = 'git.' + git_commit_name + git_time_value
if not traj.f_contains('config.'+git_commit_name, shortcuts=False):
git_commit_name += '.'
# Add the hexsha
traj.f_add_config(git_commit_name+'hexsha', commit.hexsha,
comment='SHA-1 hash of commit')
# Add the description string
traj.f_add_config(git_commit_name+'name_rev', commit.name_rev,
comment='String describing the commits hex sha based on '
'the closest Reference')
# Add unix epoch
traj.f_add_config(git_commit_name+'committed_date',
commit.committed_date, comment='Date of commit as unix epoch seconds')
# Add commit message
traj.f_add_config(git_commit_name+'message', str(commit.message),
comment='The commit message') | python | {
"resource": ""
} |
q274078 | make_git_commit | test | def make_git_commit(environment, git_repository, user_message, git_fail):
""" Makes a commit and returns if a new commit was triggered and the SHA_1 code of the commit.
If `git_fail` is `True` program fails instead of triggering a new commit given
not committed changes. Then a `GitDiffError` is raised.
"""
# Import GitPython, we do it here to allow also users not having GitPython installed
# to use the normal environment
# Open the repository
repo = git.Repo(git_repository)
index = repo.index
traj = environment.v_trajectory
# Create the commit message and append the trajectory name and comment
if traj.v_comment:
commentstr = ', Comment: `%s`' % traj.v_comment
else:
commentstr = ''
if user_message:
user_message += ' -- '
message = '%sTrajectory: `%s`, Time: `%s`, %s' % \
(user_message, traj.v_name, traj.v_time, commentstr)
# Detect changes:
diff = index.diff(None)
if diff:
if git_fail:
# User requested fail instead of a new commit
raise pex.GitDiffError('Found not committed changes!')
# Make the commit
repo.git.add('-u')
commit = index.commit(message)
new_commit = True
else:
# Take old commit
commit = repo.commit(None)
new_commit = False
# Add the commit info to the trajectory
add_commit_variables(traj, commit)
return new_commit, commit.hexsha | python | {
"resource": ""
} |
q274079 | flatten_dictionary | test | def flatten_dictionary(nested_dict, separator):
"""Flattens a nested dictionary.
New keys are concatenations of nested keys with the `separator` in between.
"""
flat_dict = {}
for key, val in nested_dict.items():
if isinstance(val, dict):
new_flat_dict = flatten_dictionary(val, separator)
for flat_key, inval in new_flat_dict.items():
new_key = key + separator + flat_key
flat_dict[new_key] = inval
else:
flat_dict[key] = val
return flat_dict | python | {
"resource": ""
} |
q274080 | nest_dictionary | test | def nest_dictionary(flat_dict, separator):
""" Nests a given flat dictionary.
Nested keys are created by splitting given keys around the `separator`.
"""
nested_dict = {}
for key, val in flat_dict.items():
split_key = key.split(separator)
act_dict = nested_dict
final_key = split_key.pop()
for new_key in split_key:
if not new_key in act_dict:
act_dict[new_key] = {}
act_dict = act_dict[new_key]
act_dict[final_key] = val
return nested_dict | python | {
"resource": ""
} |
q274081 | progressbar | test | def progressbar(index, total, percentage_step=10, logger='print', log_level=logging.INFO,
reprint=True, time=True, length=20, fmt_string=None, reset=False):
"""Plots a progress bar to the given `logger` for large for loops.
To be used inside a for-loop at the end of the loop:
.. code-block:: python
for irun in range(42):
my_costly_job() # Your expensive function
progressbar(index=irun, total=42, reprint=True) # shows a growing progressbar
There is no initialisation of the progressbar necessary before the for-loop.
The progressbar will be reset automatically if used in another for-loop.
:param index: Current index of for-loop
:param total: Total size of for-loop
:param percentage_step: Steps with which the bar should be plotted
:param logger:
Logger to write to - with level INFO. If string 'print' is given, the print statement is
used. Use ``None`` if you don't want to print or log the progressbar statement.
:param log_level: Log level with which to log.
:param reprint:
If no new line should be plotted but carriage return (works only for printing)
:param time: If the remaining time should be estimated and displayed
:param length: Length of the bar in `=` signs.
:param fmt_string:
A string which contains exactly one `%s` in order to incorporate the progressbar.
If such a string is given, ``fmt_string % progressbar`` is printed/logged.
:param reset:
If the progressbar should be restarted. If progressbar is called with a lower
index than the one before, the progressbar is automatically restarted.
:return:
The progressbar string or `None` if the string has not been updated.
"""
return _progressbar(index=index, total=total, percentage_step=percentage_step,
logger=logger, log_level=log_level, reprint=reprint,
time=time, length=length, fmt_string=fmt_string, reset=reset) | python | {
"resource": ""
} |
q274082 | _get_argspec | test | def _get_argspec(func):
"""Helper function to support both Python versions"""
if inspect.isclass(func):
func = func.__init__
if not inspect.isfunction(func):
# Init function not existing
return [], False
parameters = inspect.signature(func).parameters
args = []
uses_starstar = False
for par in parameters.values():
if (par.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD or
par.kind == inspect.Parameter.KEYWORD_ONLY):
args.append(par.name)
elif par.kind == inspect.Parameter.VAR_KEYWORD:
uses_starstar = True
return args, uses_starstar | python | {
"resource": ""
} |
q274083 | get_matching_kwargs | test | def get_matching_kwargs(func, kwargs):
"""Takes a function and keyword arguments and returns the ones that can be passed."""
args, uses_startstar = _get_argspec(func)
if uses_startstar:
return kwargs.copy()
else:
matching_kwargs = dict((k, kwargs[k]) for k in args if k in kwargs)
return matching_kwargs | python | {
"resource": ""
} |
q274084 | format_time | test | def format_time(timestamp):
"""Formats timestamp to human readable format"""
format_string = '%Y_%m_%d_%Hh%Mm%Ss'
formatted_time = datetime.datetime.fromtimestamp(timestamp).strftime(format_string)
return formatted_time | python | {
"resource": ""
} |
q274085 | port_to_tcp | test | def port_to_tcp(port=None):
"""Returns local tcp address for a given `port`, automatic port if `None`"""
#address = 'tcp://' + socket.gethostbyname(socket.getfqdn())
domain_name = socket.getfqdn()
try:
addr_list = socket.getaddrinfo(domain_name, None)
except Exception:
addr_list = socket.getaddrinfo('127.0.0.1', None)
family, socktype, proto, canonname, sockaddr = addr_list[0]
host = convert_ipv6(sockaddr[0])
address = 'tcp://' + host
if port is None:
port = ()
if not isinstance(port, int):
# determine port automatically
context = zmq.Context()
try:
socket_ = context.socket(zmq.REP)
socket_.ipv6 = is_ipv6(address)
port = socket_.bind_to_random_port(address, *port)
except Exception:
print('Could not connect to {} using {}'.format(address, addr_list))
pypet_root_logger = logging.getLogger('pypet')
pypet_root_logger.exception('Could not connect to {}'.format(address))
raise
socket_.close()
context.term()
return address + ':' + str(port) | python | {
"resource": ""
} |
q274086 | racedirs | test | def racedirs(path):
"""Like os.makedirs but takes care about race conditions"""
if os.path.isfile(path):
raise IOError('Path `%s` is already a file not a directory')
while True:
try:
if os.path.isdir(path):
# only break if full path has been created or exists
break
os.makedirs(path)
except EnvironmentError as exc:
# Part of the directory path already exist
if exc.errno != 17:
# This error won't be any good
raise | python | {
"resource": ""
} |
q274087 | _Progressbar._reset | test | def _reset(self, index, total, percentage_step, length):
"""Resets to the progressbar to start a new one"""
self._start_time = datetime.datetime.now()
self._start_index = index
self._current_index = index
self._percentage_step = percentage_step
self._total = float(total)
self._total_minus_one = total - 1
self._length = length
self._norm_factor = total * percentage_step / 100.0
self._current_interval = int((index + 1.0) / self._norm_factor) | python | {
"resource": ""
} |
q274088 | _Progressbar._get_remaining | test | def _get_remaining(self, index):
"""Calculates remaining time as a string"""
try:
current_time = datetime.datetime.now()
time_delta = current_time - self._start_time
try:
total_seconds = time_delta.total_seconds()
except AttributeError:
# for backwards-compatibility
# Python 2.6 does not support `total_seconds`
total_seconds = ((time_delta.microseconds +
(time_delta.seconds +
time_delta.days * 24 * 3600) * 10 ** 6) / 10.0 ** 6)
remaining_seconds = int((self._total - self._start_index - 1.0) *
total_seconds / float(index - self._start_index) -
total_seconds)
remaining_delta = datetime.timedelta(seconds=remaining_seconds)
remaining_str = ', remaining: ' + str(remaining_delta)
except ZeroDivisionError:
remaining_str = ''
return remaining_str | python | {
"resource": ""
} |
q274089 | Annotations.f_to_dict | test | def f_to_dict(self, copy=True):
"""Returns annotations as dictionary.
:param copy: Whether to return a shallow copy or the real thing (aka _dict).
"""
if copy:
return self._dict.copy()
else:
return self._dict | python | {
"resource": ""
} |
q274090 | Annotations.f_remove | test | def f_remove(self, key):
"""Removes `key` from annotations"""
key = self._translate_key(key)
try:
del self._dict[key]
except KeyError:
raise AttributeError('Your annotations do not contain %s' % key) | python | {
"resource": ""
} |
q274091 | Annotations.f_ann_to_str | test | def f_ann_to_str(self):
"""Returns all annotations lexicographically sorted as a concatenated string."""
resstr = ''
for key in sorted(self._dict.keys()):
resstr += '%s=%s; ' % (key, str(self._dict[key]))
return resstr[:-2] | python | {
"resource": ""
} |
q274092 | make_ordinary_result | test | def make_ordinary_result(result, key, trajectory=None, reload=True):
"""Turns a given shared data item into a an ordinary one.
:param result: Result container with shared data
:param key: The name of the shared data
:param trajectory:
The trajectory, only needed if shared data has
no access to the trajectory, yet.
:param reload: If data should be reloaded after conversion
:return: The result
"""
shared_data = result.f_get(key)
if trajectory is not None:
shared_data.traj = trajectory
shared_data._request_data('make_ordinary')
result.f_remove(key)
if reload:
trajectory.f_load_item(result, load_data=pypetconstants.OVERWRITE_DATA)
return result | python | {
"resource": ""
} |
q274093 | make_shared_result | test | def make_shared_result(result, key, trajectory, new_class=None):
"""Turns an ordinary data item into a shared one.
Removes the old result from the trajectory and replaces it.
Empties the given result.
:param result: The result containing ordinary data
:param key: Name of ordinary data item
:param trajectory: Trajectory container
:param new_class:
Class of new shared data item.
Leave `None` for automatic detection.
:return: The `result`
"""
data = result.f_get(key)
if new_class is None:
if isinstance(data, ObjectTable):
new_class = SharedTable
elif isinstance(data, pd.DataFrame):
new_class = SharedPandasFrame
elif isinstance(data, (tuple, list)):
new_class = SharedArray
elif isinstance(data, (np.ndarray, np.matrix)):
new_class = SharedCArray
else:
raise RuntimeError('Your data `%s` is not understood.' % key)
shared_data = new_class(result.f_translate_key(key), result, trajectory=trajectory)
result[key] = shared_data
shared_data._request_data('make_shared')
return result | python | {
"resource": ""
} |
q274094 | SharedData.create_shared_data | test | def create_shared_data(self, **kwargs):
"""Creates shared data on disk with a StorageService on disk.
Needs to be called before shared data can be used later on.
Actual arguments of ``kwargs`` depend on the type of data to be
created. For instance, creating an array one can use the keyword
``obj`` to pass a numpy array (``obj=np.zeros((10,20,30))``).
Whereas for a PyTables table may need a description dictionary
(``description={'column_1': pt.StringCol(2, pos=0),'column_2': pt.FloatCol( pos=1)}``)
Refer to the PyTables documentation on how to create tables.
"""
if 'flag' not in kwargs:
kwargs['flag'] = self.FLAG
if 'data' in kwargs:
kwargs['obj'] = kwargs.pop('data')
if 'trajectory' in kwargs:
self.traj = kwargs.pop('trajectory')
if 'traj' in kwargs:
self.traj = kwargs.pop('traj')
if 'name' in kwargs:
self.name = kwargs.pop['name']
if 'parent' in kwargs:
self.parent = kwargs.pop('parent')
if self.name is not None:
self.parent[self.name] = self
return self._request_data('create_shared_data', kwargs=kwargs) | python | {
"resource": ""
} |
q274095 | SharedData._request_data | test | def _request_data(self, request, args=None, kwargs=None):
"""Interface with the underlying storage.
Passes request to the StorageService that performs the appropriate action.
For example, given a shared table ``t``.
``t.remove_row(4)`` is parsed into ``request='remove_row', args=(4,)`` and
passed onto the storage service. In case of the HDF5StorageService,
this is again translated back into ``hdf5_table_node.remove_row(4)``.
"""
return self._storage_service.store(pypetconstants.ACCESS_DATA,
self.parent.v_full_name,
self.name,
request, args, kwargs,
trajectory_name=self.traj.v_name) | python | {
"resource": ""
} |
q274096 | SharedData.get_data_node | test | def get_data_node(self):
"""Returns the actula node of the underlying data.
In case one uses HDF5 this will be the HDF5 leaf node.
"""
if not self._storage_service.is_open:
warnings.warn('You requesting the data item but your store is not open, '
'the item itself will be closed, too!',
category=RuntimeWarning)
return self._request_data('__thenode__') | python | {
"resource": ""
} |
q274097 | SharedResult._supports | test | def _supports(self, item):
"""Checks if outer data structure is supported."""
result = super(SharedResult, self)._supports(item)
result = result or type(item) in SharedResult.SUPPORTED_DATA
return result | python | {
"resource": ""
} |
q274098 | SharedResult.create_shared_data | test | def create_shared_data(self, name=None, **kwargs):
"""Calls the corresponding function of the shared data item"""
if name is None:
item = self.f_get()
else:
item = self.f_get(name)
return item.create_shared_data(**kwargs) | python | {
"resource": ""
} |
q274099 | manipulate_multiproc_safe | test | def manipulate_multiproc_safe(traj):
""" Target function that manipulates the trajectory.
Stores the current name of the process into the trajectory and
**overwrites** previous settings.
:param traj:
Trajectory container with multiprocessing safe storage service
"""
# Manipulate the data in the trajectory
traj.last_process_name = mp.current_process().name
# Store the manipulated data
traj.results.f_store(store_data=3) | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.