code stringlengths 3 6.57k |
|---|
np.where((rollwin2D(rd, width) |
all(axis=1) |
all(axis=1) |
len(odinndis) |
len(odinndis) |
intround(dnt * self.tres) |
printflush('F', end='') |
reload_spikes_and_templates(self, sids, usemeanchans=False) |
self.reload_spikes(sids, usemeanchans=usemeanchans) |
np.unique(self.spikes['nid'][sids]) |
neuron.update_wave() |
init_spike_alignment(self) |
print('Setting initial spike alignment points') |
self.neurons.values() |
neuron.get_wave() |
nwave.data.argmin(axis=1) |
nwave.data.argmax(axis=1) |
np.column_stack([mintis, maxtis]) |
np.argmin([mintis.std() |
maxtis.std() |
zip(self.spikes, self.wavedata) |
printflush(sid, end='') |
printflush('.', end='') |
assert (chans == neuronchans) |
all() |
int(s['tis'][maxchani, 0]) |
int(s['tis'][maxchani, 1]) |
abs(t1i - t0i) |
AD2uV(wd[maxchani, t0i]) |
abs(s['V1'] - s['V0']) |
print() |
spatially_localize_spikes(self, sortwin, method='fit') |
print('Running spatial localization on all %d spikes' % self.nspikes) |
time.clock() |
zip(self.spikes, self.wavedata) |
core.rowtake() |
util.rowtake_cy() |
printflush(sid, end='') |
printflush('.', end='') |
det.chans.searchsorted(chans) |
np.float32(wd[np.arange(s['nchans']) |
abs(w) |
sum(axis=1) |
array (row) |
weights2f(f, w, x, y, maxchani) |
weights2spatialmean(w, x, y) |
dist((x0, y0) |
print('Unknown method %r' % method) |
printflush('X', end='') |
intround(s['t']) |
det.log("Reject spike %d at t=%d based on fit params" % (sid, spiket) |
sortwin.MoveSpikes2List(neuron, [sid], update=False) |
weights2spatialmean(w, x, y) |
min(det.lockrx*s['sx'], det.inclr) |
max(lockr, 1) |
np.where(np.abs(y - y0) |
ylockchaniis.copy() |
dist((x[ylockchanii], y[ylockchanii]) |
len(lockchans) |
print('Spatial localization of spikes took %.3f s' % (time.clock() |
get_component_matrix(self, dims=None, weighting=None) |
self.get_param_matrix(dims=dims) |
weighting.lower() |
mdp.nodes.FastICANode() |
weighting.lower() |
mdp.nodes.PCANode() |
node.train(X) |
node.execute(X) |
get_ids(self, cids, spikes) |
np.asarray(cids) |
cids.min() |
set(cids) |
len(uniquecids) |
IDs (plural) |
dict(zip(uniquecids, [ [] for i in range(nclusters) |
zip(spikes, cids) |
append(spike['id']) |
write_spc_input(self) |
self.get_component_matrix() |
os.path.dirname(__file__) |
str(datetime.datetime.now() |
dt.split('.') |
dt.replace(' ', '_') |
dt.replace(':', '.') |
os.path.join(spykedir, 'spc', dt+'.dat') |
os.path.join(spykedir, 'spc', dt+'.dg_01.lab') |
open(self.spcdatfname, 'w') |
params.tofile(f, sep=' ', format='%.6f') |
f.write('\n') |
f.close() |
parse_spc_lab_file(self, fname=None) |
spin (datapoint) |
number (0-based) |
Returns (Ts, cids) |
self.get_spikes_sortedby('id') |
files (*.*) |
files (*.lab) |
dlg.ShowModal() |
dlg.GetPath() |
dlg.Destroy() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.