Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
"""
# Check that data is a list of SpikeTrains
if not all([isinstance(elem, neo.SpikeTrain) for elem in data]):
raise TypeError(
'data must be a list of SpikeTrains')
# Check that all spiketrains have same t_start and same t_stop
if not all([st.t_start == data[0].t_start for st in data]) or not all(
[st.t_stop == data[0].t_stop for st in data]):
raise AttributeError(
'All spiketrains must have the same t_start and t_stop')
if report not in ['a', '#', '3d#']:
raise ValueError(
"report has to assume of the following values:" +
" 'a', '#' and '3d#,' got {} instead".format(report))
# Binning the data and clipping (binary matrix)
binary_matrix = conv.BinnedSpikeTrain(
data, binsize).to_sparse_bool_array().tocoo()
# Computing the context and the binary matrix encoding the relation between
# objects (window positions) and attributes (spikes,
# indexed with a number equal to neuron idx*winlen+bin idx)
context, transactions, rel_matrix = _build_context(binary_matrix, winlen)
# By default, set the maximum pattern size to the maximum number of
# spikes in a window
if max_spikes is None:
max_spikes = np.max((int(np.max(np.sum(rel_matrix, axis=1))),
min_spikes + 1))
# By default, set maximum number of occurrences to number of non-empty
# windows
if max_occ is None:
max_occ = int(np.sum(np.sum(rel_matrix, axis=1) > 0))
# Check if fim.so available and use it
if HAVE_FIM:
winsize_bintime * binsize))
if winstep_bintime * binsize != winstep:
warnings.warn(
"ratio between winstep and binsize is not integer -- "
"the actual number for window size is " + str(
winstep_bintime * binsize))
num_tr, N = np.shape(data)[:2]
n_bins = int((t_stop - t_start) / binsize)
mat_tr_unit_spt = np.zeros((len(data), N, n_bins))
for tr, sts in enumerate(data):
sts = list(sts)
bs = conv.BinnedSpikeTrain(
sts, t_start=t_start, t_stop=t_stop, binsize=binsize)
if binary is True:
mat = bs.to_bool_array()
else:
raise ValueError(
"The method only works on the zero_one matrix at the moment")
mat_tr_unit_spt[tr] = mat
num_win = len(t_winpos)
Js_win, n_exp_win, n_emp_win = (np.zeros(num_win) for _ in range(3))
rate_avg = np.zeros((num_win, N))
indices_win = {}
for i in range(num_tr):
indices_win['trial' + str(i)] = []
for i, win_pos in enumerate(t_winpos_bintime):
the cumulative probability matrix. pmat[i, j] represents the
estimated probability of having an overlap between bins i and j
STRICTLY LOWER THAN the observed overlap, under the null hypothesis
of independence of the input spike trains.
x_edges : numpy.ndarray
edges of the bins used for the horizontal axis of pmat. If pmat is
a matrix of shape (n, n), x_edges has length n+1
y_edges : numpy.ndarray
edges of the bins used for the vertical axis of pmat. If pmat is
a matrix of shape (n, n), y_edges has length n+1
'''
# Bin the spike trains
t_stop_x = None if t_start_x is None else t_start_x + dt
t_stop_y = None if t_start_y is None else t_start_y + dt
bsts_x = conv.BinnedSpikeTrain(
spiketrains, binsize=binsize, t_start=t_start_x, t_stop=t_stop_x)
bsts_y = conv.BinnedSpikeTrain(
spiketrains, binsize=binsize, t_start=t_start_y, t_stop=t_stop_y)
bsts_x_matrix = bsts_x.to_bool_array()
bsts_y_matrix = bsts_y.to_bool_array()
# Check that the duration and nr. neurons is identical between the two axes
if bsts_x_matrix.shape != bsts_y_matrix.shape:
raise ValueError(
'Different spike train durations along the x and y axis!')
# Define the firing rate profiles
# If rates are to be estimated, create the rate profiles as Quantity
# objects obtained by boxcar-kernel convolution
----------
simulation_end : float
The simulation end time.
neo_spiketrains : list
A list of Neo spiketrains.
Returns
-------
time : None
values : 2D array
The pairwise covariances.
"""
if len(spiketrains) == 0:
return None, None
binned_sts = elephant.conversion.BinnedSpikeTrain(spiketrains,
binsize=self.covariance_bin_size*self.units)
covariance = elephant.spike_train_correlation.covariance(binned_sts)
return None, covariance
t_start = max_tstart
if not all([max_tstart == t.t_start for t in spiketrains]):
warnings.warn(
"Spiketrains have different t_start values -- "
"using maximum t_start as t_start.")
if t_stop is None:
# Find the internal range for t_stop
if min_tstop:
t_stop = min_tstop
if not all([min_tstop == t.t_stop for t in spiketrains]):
warnings.warn(
"Spiketrains have different t_stop values -- "
"using minimum t_stop as t_stop.")
else:
min_tstop = conv._get_start_stop_from_input(spiketrains)[1]
t_stop = min_tstop
if not all([min_tstop == t.t_stop for t in spiketrains]):
warnings.warn(
"Spiketrains have different t_stop values -- "
"using minimum t_stop as t_stop.")
sts_cut = [st.time_slice(t_start=t_start, t_stop=t_stop) for st in
spiketrains]
# Bin the spike trains and sum across columns
bs = conv.BinnedSpikeTrain(sts_cut, t_start=t_start, t_stop=t_stop,
binsize=binsize)
if binary:
bin_hist = bs.to_sparse_bool_array().sum(axis=0)
else:
'than the specified t_start value')
else:
start = t_start
# Set stopping time of binning
if t_stop is None:
stop = _signals_same_tstop(trains)
elif t_stop > min_tstop:
raise ValueError(
'Some SpikeTrains have a smaller t_stop ' +
'than the specified t_stop value')
else:
stop = t_stop
# Bin the spike trains and take for each of them the ids of filled bins
binned = conv.BinnedSpikeTrain(
trains, binsize=binsize, t_start=start, t_stop=stop)
Nbins = binned.num_bins
filled_bins = binned.spike_indices
# Compute and return the transaction list
return [[train_id for train_id, b in zip(ids, filled_bins)
if bin_id in b] for bin_id in range(Nbins)]